Monorepo for Aesthetic.Computer aesthetic.computer
4
fork

Configure Feed

Select the types of activity you want to include in your feed.

Add configurable AT frontend auto-sync for PDS

+345
+8
at/pds/MAINTENANCE.md
··· 16 16 0 2 * * * /root/backup.sh >> /var/log/pds-backup.log 2>&1 17 17 ``` 18 18 19 + ### AT Frontend Sync (Every minute) 20 + ```bash 21 + * * * * * /root/auto-sync-frontend.sh >> /var/log/at-frontend-sync.log 2>&1 22 + ``` 23 + 19 24 ## Weekly Tasks 20 25 21 26 ### Review Logs ··· 31 36 32 37 # Check backup logs 33 38 tail -50 /var/log/pds-backup.log 39 + 40 + # Check frontend sync logs 41 + tail -100 /var/log/at-frontend-sync.log 34 42 ``` 35 43 36 44 ### Check Storage Usage
+50
at/pds/scripts/README.md
··· 28 28 fish generate-pds-env.fish [output-file] 29 29 ``` 30 30 31 + #### `scripts/auto-sync-frontend.sh` 32 + Polls GitHub and auto-deploys configured frontend files into the PDS Caddy container. 33 + 34 + ```bash 35 + # Local smoke test (from repo root) 36 + AC_FORCE=1 at/pds/scripts/auto-sync-frontend.sh 37 + ``` 38 + 39 + **Default file map:** 40 + - `at/index.html -> /data/www/index.html` 41 + - `at/user-page.html -> /data/www/user.html` 42 + 43 + **Configurable via env:** 44 + - `AC_FILE_MAP="at/index.html:index.html;at/user-page.html:user.html;at/landing-page.html:landing-page.html"` 45 + 46 + **Typical server install (cron every minute):** 47 + ```bash 48 + # 1) SSH to server 49 + ssh -i ~/.ssh/aesthetic_pds root@<SERVER_IP> 50 + 51 + # 2) Install dependencies (jq is required for commit diff checks) 52 + apt update && apt install -y curl jq git 53 + 54 + # 3) Copy script to server 55 + scp -i ~/.ssh/aesthetic_pds /workspaces/aesthetic-computer/at/pds/scripts/auto-sync-frontend.sh root@<SERVER_IP>:/root/auto-sync-frontend.sh 56 + ssh -i ~/.ssh/aesthetic_pds root@<SERVER_IP> 'chmod +x /root/auto-sync-frontend.sh' 57 + 58 + # 4) Add cron job 59 + ssh -i ~/.ssh/aesthetic_pds root@<SERVER_IP> '(crontab -l 2>/dev/null; echo "* * * * * /root/auto-sync-frontend.sh >> /var/log/at-frontend-sync.log 2>&1") | crontab -' 60 + ``` 61 + 62 + **How it behaves:** 63 + - Tracks latest deployed commit in `/var/lib/at-frontend-sync/last_deployed_sha` 64 + - Compares changed files between last deployed SHA and latest main SHA 65 + - Skips deploy if none of the mapped frontend files changed 66 + - Uses `docker cp` into `caddy:/data/www/*` 67 + - Runs health check against `https://at.aesthetic.computer/xrpc/_health` 68 + 69 + #### `at/scripts/deploy-at-frontend.sh` 70 + Manual one-shot deploy helper from local machine to PDS host. 71 + 72 + ```bash 73 + # From repo root (uses defaults + ~/.ssh/aesthetic_pds) 74 + at/scripts/deploy-at-frontend.sh 75 + 76 + # Optional custom map 77 + AT_FRONTEND_FILE_MAP="at/index.html:index.html;at/user-page.html:user.html;at/landing-page.html:landing-page.html" \ 78 + at/scripts/deploy-at-frontend.sh 79 + ``` 80 + 31 81 ### Monitoring 32 82 33 83 #### `scripts/health-check.sh`
+178
at/pds/scripts/auto-sync-frontend.sh
··· 1 + #!/usr/bin/env bash 2 + set -euo pipefail 3 + 4 + # Auto-sync AT frontend pages from GitHub to the PDS Caddy container. 5 + # Intended for cron/systemd on the PDS server. 6 + # 7 + # Env overrides: 8 + # AC_REPO="whistlegraph/aesthetic-computer" 9 + # AC_BRANCH="main" 10 + # AC_CONTAINER="caddy" 11 + # AC_CONTAINER_WEBROOT="/data/www" 12 + # AC_STATE_DIR="/var/lib/at-frontend-sync" 13 + # AC_HEALTH_URL="https://at.aesthetic.computer/xrpc/_health" 14 + # AC_FILE_MAP="at/index.html:index.html;at/user-page.html:user.html" 15 + # AC_FORCE="1" # force deploy even if SHA unchanged 16 + # 17 + # AC_FILE_MAP format: 18 + # "repo/source/path:container/target/path;repo/source2:path2" 19 + 20 + AC_REPO="${AC_REPO:-whistlegraph/aesthetic-computer}" 21 + AC_BRANCH="${AC_BRANCH:-main}" 22 + AC_CONTAINER="${AC_CONTAINER:-caddy}" 23 + AC_CONTAINER_WEBROOT="${AC_CONTAINER_WEBROOT:-/data/www}" 24 + AC_STATE_DIR="${AC_STATE_DIR:-/var/lib/at-frontend-sync}" 25 + AC_HEALTH_URL="${AC_HEALTH_URL:-https://at.aesthetic.computer/xrpc/_health}" 26 + AC_FILE_MAP="${AC_FILE_MAP:-at/index.html:index.html;at/user-page.html:user.html}" 27 + AC_FORCE="${AC_FORCE:-0}" 28 + 29 + RAW_BASE="https://raw.githubusercontent.com/${AC_REPO}" 30 + REPO_URL="https://github.com/${AC_REPO}.git" 31 + STATE_FILE="${AC_STATE_DIR}/last_deployed_sha" 32 + LOCK_FILE="${AC_STATE_DIR}/sync.lock" 33 + TMP_DIR="$(mktemp -d /tmp/at-frontend-sync.XXXXXX)" 34 + COMPARE_JSON="${TMP_DIR}/compare.json" 35 + 36 + trim() { 37 + local value="$1" 38 + value="${value#"${value%%[![:space:]]*}"}" 39 + value="${value%"${value##*[![:space:]]}"}" 40 + printf "%s" "${value}" 41 + } 42 + 43 + declare -a SOURCE_FILES=() 44 + declare -a TARGET_FILES=() 45 + IFS=';' read -r -a FILE_MAP_ENTRIES <<< "${AC_FILE_MAP}" 46 + for raw_entry in "${FILE_MAP_ENTRIES[@]}"; do 47 + entry="$(trim "${raw_entry}")" 48 + [[ -z "${entry}" ]] && continue 49 + 50 + if [[ "${entry}" != *:* ]]; then 51 + echo "Invalid AC_FILE_MAP entry: '${entry}' (expected source:target)" >&2 52 + exit 1 53 + fi 54 + 55 + source_path="$(trim "${entry%%:*}")" 56 + target_path="$(trim "${entry#*:}")" 57 + 58 + if [[ -z "${source_path}" || -z "${target_path}" ]]; then 59 + echo "Invalid AC_FILE_MAP entry: '${entry}' (empty source or target)" >&2 60 + exit 1 61 + fi 62 + 63 + SOURCE_FILES+=("${source_path}") 64 + TARGET_FILES+=("${target_path}") 65 + done 66 + 67 + if [[ "${#SOURCE_FILES[@]}" -eq 0 ]]; then 68 + echo "No frontend files configured. Set AC_FILE_MAP." >&2 69 + exit 1 70 + fi 71 + 72 + cleanup() { 73 + rm -rf "${TMP_DIR}" || true 74 + } 75 + trap cleanup EXIT 76 + 77 + mkdir -p "${AC_STATE_DIR}" 78 + 79 + exec 9>"${LOCK_FILE}" 80 + if ! flock -n 9; then 81 + echo "Auto-sync already running; exiting." 82 + exit 0 83 + fi 84 + 85 + if ! command -v docker >/dev/null 2>&1; then 86 + echo "docker is required but not found." >&2 87 + exit 1 88 + fi 89 + 90 + if ! command -v curl >/dev/null 2>&1; then 91 + echo "curl is required but not found." >&2 92 + exit 1 93 + fi 94 + 95 + if ! command -v jq >/dev/null 2>&1; then 96 + echo "jq is required but not found." >&2 97 + exit 1 98 + fi 99 + 100 + if ! docker ps --format '{{.Names}}' | grep -Fxq "${AC_CONTAINER}"; then 101 + echo "Container '${AC_CONTAINER}' is not running." >&2 102 + exit 1 103 + fi 104 + 105 + echo "Checking latest commit for ${AC_REPO}@${AC_BRANCH}..." 106 + LATEST_SHA="$(git ls-remote "${REPO_URL}" "refs/heads/${AC_BRANCH}" | awk '{print $1}')" 107 + if [[ -z "${LATEST_SHA}" ]]; then 108 + echo "Could not resolve latest SHA for ${AC_REPO}@${AC_BRANCH}" >&2 109 + exit 1 110 + fi 111 + 112 + LAST_SHA="" 113 + if [[ -f "${STATE_FILE}" ]]; then 114 + LAST_SHA="$(cat "${STATE_FILE}")" 115 + fi 116 + 117 + if [[ "${AC_FORCE}" != "1" && "${LATEST_SHA}" == "${LAST_SHA}" ]]; then 118 + echo "No new commit (${LATEST_SHA:0:12}); frontend is up to date." 119 + exit 0 120 + fi 121 + 122 + if [[ "${AC_FORCE}" != "1" && -n "${LAST_SHA}" ]]; then 123 + COMPARE_URL="https://api.github.com/repos/${AC_REPO}/compare/${LAST_SHA}...${LATEST_SHA}" 124 + if curl -fsSL "${COMPARE_URL}" -o "${COMPARE_JSON}"; then 125 + SHOULD_DEPLOY=0 126 + for source_path in "${SOURCE_FILES[@]}"; do 127 + if jq -e --arg source_path "${source_path}" '.files[]? | select(.filename == $source_path)' "${COMPARE_JSON}" >/dev/null; then 128 + SHOULD_DEPLOY=1 129 + break 130 + fi 131 + done 132 + 133 + if [[ "${SHOULD_DEPLOY}" == "0" ]]; then 134 + echo "No tracked frontend files changed; skipping deploy for ${LATEST_SHA:0:12}." 135 + echo "${LATEST_SHA}" > "${STATE_FILE}" 136 + exit 0 137 + fi 138 + else 139 + echo "Could not compare commit diff. Continuing with deploy." 140 + fi 141 + fi 142 + 143 + echo "Deploying frontend for commit ${LATEST_SHA:0:12}..." 144 + 145 + declare -a TMP_FILES=() 146 + for i in "${!SOURCE_FILES[@]}"; do 147 + source_path="${SOURCE_FILES[$i]}" 148 + target_path="${TARGET_FILES[$i]}" 149 + source_url="${RAW_BASE}/${LATEST_SHA}/${source_path}" 150 + tmp_file="${TMP_DIR}/${i}-$(basename "${target_path}")" 151 + 152 + curl -fsSL "${source_url}" -o "${tmp_file}" 153 + if [[ "${source_path}" == *.html ]] && ! grep -qi "<!doctype html>" "${tmp_file}"; then 154 + echo "Downloaded ${source_path} does not look like HTML." >&2 155 + exit 1 156 + fi 157 + 158 + TMP_FILES+=("${tmp_file}") 159 + done 160 + 161 + for i in "${!TARGET_FILES[@]}"; do 162 + target_path="${TARGET_FILES[$i]}" 163 + tmp_file="${TMP_FILES[$i]}" 164 + target_dir="$(dirname "${target_path}")" 165 + if [[ "${target_dir}" != "." ]]; then 166 + docker exec "${AC_CONTAINER}" mkdir -p "${AC_CONTAINER_WEBROOT}/${target_dir}" 167 + fi 168 + docker cp "${tmp_file}" "${AC_CONTAINER}:${AC_CONTAINER_WEBROOT}/${target_path}" 169 + done 170 + 171 + echo "${LATEST_SHA}" > "${STATE_FILE}" 172 + 173 + if [[ -n "${AC_HEALTH_URL}" ]]; then 174 + echo "Running health check..." 175 + curl -fsSL "${AC_HEALTH_URL}" >/dev/null 176 + fi 177 + 178 + echo "AT frontend sync complete (${LATEST_SHA:0:12})."
+109
at/scripts/deploy-at-frontend.sh
··· 1 + #!/usr/bin/env bash 2 + set -euo pipefail 3 + 4 + # Deploy AT frontend pages to PDS caddy container. 5 + # Intended for both local use and CI (GitHub Actions). 6 + # 7 + # Required env (or defaults): 8 + # AT_PDS_HOST (default: 165.227.120.137) 9 + # AT_PDS_USER (default: root) 10 + # AT_PDS_SSH_KEY_PATH (default: ~/.ssh/aesthetic_pds) 11 + # AT_PDS_CONTAINER (default: caddy) 12 + # AT_PDS_CONTAINER_WEBROOT (default: /data/www) 13 + # AT_FRONTEND_FILE_MAP (default: at/index.html:index.html;at/user-page.html:user.html) 14 + # 15 + # AT_FRONTEND_FILE_MAP format: 16 + # "repo/source/path:container/target/path;repo/source2:path2" 17 + 18 + AT_PDS_HOST="${AT_PDS_HOST:-165.227.120.137}" 19 + AT_PDS_USER="${AT_PDS_USER:-root}" 20 + AT_PDS_SSH_KEY_PATH="${AT_PDS_SSH_KEY_PATH:-$HOME/.ssh/aesthetic_pds}" 21 + AT_PDS_CONTAINER="${AT_PDS_CONTAINER:-caddy}" 22 + AT_PDS_CONTAINER_WEBROOT="${AT_PDS_CONTAINER_WEBROOT:-/data/www}" 23 + AT_FRONTEND_FILE_MAP="${AT_FRONTEND_FILE_MAP:-at/index.html:index.html;at/user-page.html:user.html}" 24 + 25 + SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 26 + REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" 27 + 28 + trim() { 29 + local value="$1" 30 + value="${value#"${value%%[![:space:]]*}"}" 31 + value="${value%"${value##*[![:space:]]}"}" 32 + printf "%s" "${value}" 33 + } 34 + 35 + declare -a SOURCE_FILES=() 36 + declare -a TARGET_FILES=() 37 + IFS=';' read -r -a FILE_MAP_ENTRIES <<< "$AT_FRONTEND_FILE_MAP" 38 + for raw_entry in "${FILE_MAP_ENTRIES[@]}"; do 39 + entry="$(trim "$raw_entry")" 40 + [[ -z "$entry" ]] && continue 41 + 42 + if [[ "$entry" != *:* ]]; then 43 + echo "Invalid AT_FRONTEND_FILE_MAP entry: '$entry' (expected source:target)" >&2 44 + exit 1 45 + fi 46 + 47 + source_path="$(trim "${entry%%:*}")" 48 + target_path="$(trim "${entry#*:}")" 49 + local_source="$REPO_ROOT/$source_path" 50 + 51 + if [[ -z "$source_path" || -z "$target_path" ]]; then 52 + echo "Invalid AT_FRONTEND_FILE_MAP entry: '$entry' (empty source or target)" >&2 53 + exit 1 54 + fi 55 + 56 + if [[ ! -f "$local_source" ]]; then 57 + echo "Missing source file: $local_source" >&2 58 + exit 1 59 + fi 60 + 61 + SOURCE_FILES+=("$source_path") 62 + TARGET_FILES+=("$target_path") 63 + done 64 + 65 + if [[ "${#SOURCE_FILES[@]}" -eq 0 ]]; then 66 + echo "No frontend files configured. Set AT_FRONTEND_FILE_MAP." >&2 67 + exit 1 68 + fi 69 + 70 + if [[ ! -f "$AT_PDS_SSH_KEY_PATH" ]]; then 71 + echo "Missing SSH key: $AT_PDS_SSH_KEY_PATH" >&2 72 + exit 1 73 + fi 74 + 75 + SSH_TARGET="$AT_PDS_USER@$AT_PDS_HOST" 76 + SSH_OPTS=( 77 + -i "$AT_PDS_SSH_KEY_PATH" 78 + -o StrictHostKeyChecking=no 79 + -o UserKnownHostsFile=/dev/null 80 + ) 81 + 82 + STAMP="${GITHUB_SHA:-$(date +%Y%m%d%H%M%S)}" 83 + REMOTE_DIR="/tmp/at-frontend-${STAMP}" 84 + 85 + echo "Deploying AT frontend to ${SSH_TARGET}" 86 + echo "Uploading staging files..." 87 + ssh "${SSH_OPTS[@]}" "$SSH_TARGET" "mkdir -p '$REMOTE_DIR'" 88 + 89 + REMOTE_COPY_STEPS=() 90 + for i in "${!SOURCE_FILES[@]}"; do 91 + source_path="${SOURCE_FILES[$i]}" 92 + target_path="${TARGET_FILES[$i]}" 93 + local_source="$REPO_ROOT/$source_path" 94 + remote_stage="$REMOTE_DIR/${i}-$(basename "$target_path")" 95 + target_dir="$(dirname "$target_path")" 96 + 97 + scp "${SSH_OPTS[@]}" "$local_source" "${SSH_TARGET}:${remote_stage}" 98 + if [[ "$target_dir" != "." ]]; then 99 + REMOTE_COPY_STEPS+=("docker exec '${AT_PDS_CONTAINER}' mkdir -p '${AT_PDS_CONTAINER_WEBROOT}/$target_dir'") 100 + fi 101 + REMOTE_COPY_STEPS+=("docker cp '$remote_stage' '${AT_PDS_CONTAINER}:${AT_PDS_CONTAINER_WEBROOT}/$target_path'") 102 + done 103 + 104 + copy_command="$(IFS=' && '; echo "${REMOTE_COPY_STEPS[*]}")" 105 + ssh "${SSH_OPTS[@]}" "$SSH_TARGET" "$copy_command && rm -rf '$REMOTE_DIR'" 106 + 107 + echo "Deployment complete." 108 + echo "Landing: https://at.aesthetic.computer" 109 + echo "User page: https://jeffrey.at.aesthetic.computer"