Monorepo for Aesthetic.Computer aesthetic.computer
4
fork

Configure Feed

Select the types of activity you want to include in your feed.

recap: cache transcribe/align + subtitle single-overlay-stream approach

transcribe.mjs + align.mjs now hash their inputs (recap.mp3 / words.json
+ segments) and skip work when outputs are already valid for the same
hash. --force overrides. Saves ~100s per rerun on the oven CPU.

Subtitles redesigned to fix the oven-encode bottleneck: subtitles.mjs
now renders full-frame 1080×1920 transparent PNGs (pill positioned at
y=1690) plus a blank.png. New subtitle-track.mjs emits a concat-demuxer
file that sequences subtitle PNGs and blank gaps with stored durations.
build-filter.mjs drops the 135-deep movie= overlay chain and replaces
it with a single overlay of the subtitle stream as input #2.
compose.fish takes the subtitle track as `-f concat -i subtitle-track.txt`
input #2, with waltz bumped to input #3.

Result: the compose ffmpeg's filter graph collapses from 276 lines to
~7. The previous 6+ hour projection drops to a few minutes — the deep
movie= chain was the wall, not the encoder.

See feedback memory:
- feedback_recap_subtitles_required.md
- feedback_recap_ac24_cli.md (next-iter cache layer)

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>

+232 -67
+36 -5
recap/bin/align.mjs
··· 4 4 // [{name, startSec, endSec, durationSec}, ...] 5 5 // Each marker is normalized (lowercase, punctuation stripped) and matched as 6 6 // a contiguous run of N words. Unmatched markers fail loud. 7 - // Usage: node bin/align.mjs [audience-name] 7 + // 8 + // Caching: keyed on a hash of words.json + audience.segments. If the inputs 9 + // are unchanged AND segments.json exists, skip alignment. Pass --force to 10 + // bypass. 11 + // 12 + // Usage: 13 + // node bin/align.mjs [audience-name] 14 + // node bin/align.mjs jeffrey-73h-2026-05-02 --force 8 15 9 - import { readFileSync, writeFileSync } from "node:fs"; 16 + import { readFileSync, writeFileSync, existsSync } from "node:fs"; 10 17 import { resolve, dirname } from "node:path"; 11 18 import { fileURLToPath } from "node:url"; 19 + import { createHash } from "node:crypto"; 12 20 13 21 const HERE = dirname(fileURLToPath(import.meta.url)); 14 22 const ROOT = resolve(HERE, ".."); 15 - const audienceName = process.argv[2] || "fia"; 23 + const argv = process.argv.slice(2); 24 + const force = argv.includes("--force"); 25 + const audienceName = argv.find((a) => !a.startsWith("--")) || "fia"; 16 26 const { audience } = await import(`${ROOT}/audience/${audienceName}.mjs`); 17 27 18 28 const words = JSON.parse(readFileSync(`${ROOT}/out/words.json`, "utf8")); 29 + const segmentsPath = `${ROOT}/out/segments.json`; 30 + const hashFile = `${segmentsPath}.hash`; 31 + 32 + // Hash on words.json content + the segments[] markers/names. Trailing 33 + // silence is included since it affects endMs computation. 34 + const inputHash = createHash("sha256") 35 + .update(JSON.stringify(words)) 36 + .update(JSON.stringify(audience.segments.map((s) => ({ n: s.name, m: s.marker, t: s.trailingSilenceSec || 0 })))) 37 + .digest("hex") 38 + .slice(0, 16); 39 + 40 + if (!force && existsSync(segmentsPath) && existsSync(hashFile)) { 41 + const cached = readFileSync(hashFile, "utf8").trim(); 42 + if (cached === inputHash) { 43 + const segments = JSON.parse(readFileSync(segmentsPath, "utf8")); 44 + console.log(`✓ ${segmentsPath} cached · ${segments.length} segments · hash ${inputHash} — skipping align`); 45 + process.exit(0); 46 + } 47 + } 48 + 19 49 const norm = (s) => s.toLowerCase().replace(/[^a-z0-9]+/g, " ").trim(); 20 50 const wordTokens = words.map((w) => norm(w.text)); 21 51 const audioEndMs = words[words.length - 1].toMs; ··· 64 94 }; 65 95 }); 66 96 67 - writeFileSync(`${ROOT}/out/segments.json`, JSON.stringify(segments, null, 2)); 68 - console.log(`✓ ${ROOT}/out/segments.json`); 97 + writeFileSync(segmentsPath, JSON.stringify(segments, null, 2)); 98 + writeFileSync(hashFile, inputHash + "\n"); 99 + console.log(`✓ ${segmentsPath} · hash ${inputHash}`); 69 100 for (const s of segments) { 70 101 console.log(` ${s.name.padEnd(18)} ${String(s.startSec).padStart(6)}s → ${String(s.endSec).padStart(6)}s (${s.durationSec.toFixed(2)}s) "${s.marker}"`); 71 102 }
+17 -34
recap/bin/build-filter.mjs
··· 1 1 #!/usr/bin/env node 2 2 // build-filter.mjs — emit the ffmpeg filter_complex graph for compose.fish. 3 - // Reads out/subs.json and stitches one overlay per subtitle chunk into the 4 - // video chain so each chunk appears only between its [startSec, endSec]. 5 - // Subs sit at the top of the frame so they don't collide with the slide's 6 - // bottom-third title overlay; the waveform sits at the same y, layered behind 7 - // the pill so it animates "through" the subtitle. 3 + // 4 + // Inputs (per compose.fish): 5 + // [0:v] slide concat (PNG sequence) 6 + // [1:a] narration mp3 7 + // [2:v] subtitle track concat (full-frame transparent PNG sequence — see 8 + // subtitle-track.mjs); a single overlay onto the slide stream 9 + // replaces the old 135-deep movie= chain. 10 + // 11 + // Subtitle PNGs are now full-frame 1080×1920 transparent images with the 12 + // pill positioned at y=1690, so we just overlay [2:v] at (0,0). The 13 + // concat demuxer at input #2 plays them with their stored durations, 14 + // alternating with a fully-transparent blank frame for gaps. 15 + // 8 16 // Usage: node bin/build-filter.mjs <totalSec> (writes graph to stdout) 9 17 10 - import { readFileSync } from "node:fs"; 11 - import { resolve, dirname } from "node:path"; 12 - import { fileURLToPath } from "node:url"; 13 - 14 - const HERE = dirname(fileURLToPath(import.meta.url)); 15 - const ROOT = resolve(HERE, ".."); 16 18 const TOTAL = process.argv[2]; 17 19 if (!TOTAL) { 18 20 console.error("usage: build-filter.mjs <totalSec>"); 19 21 process.exit(1); 20 22 } 21 23 22 - const subs = JSON.parse(readFileSync(`${ROOT}/out/subs.json`, "utf8")); 23 - 24 - // Subtitle band lives just above the progress bar at the bottom of the frame, 25 - // below the slide's title overlay. Sub PNGs are 1080×220 with the pill centered. 26 - // Waveform is parked at the same vertical area so the pill sits in front of 27 - // the dancing line. 28 - // Centered horizontally — sub PNGs are 1080×220 so x=0. 29 - const SUB_Y = 1690; 30 - const WAVE_Y = 1752; // 1080×96 waveform centered behind the sub pill (~y 1752–1848) 24 + const WAVE_Y = 1752; // y-band for the audio waveform under the subtitle pill 31 25 32 26 const lines = []; 33 27 lines.push(`[0:v]format=yuv420p,fps=30,scale=1080:1920,setsar=1[bg]`); ··· 35 29 lines.push(`[a2]showwaves=s=1080x96:colors=0xff70d0|0x70f0e0:mode=cline:rate=30,format=rgba,colorchannelmixer=aa=0.55[wave]`); 36 30 lines.push(`[bg][wave]overlay=x=0:y=${WAVE_Y}:format=auto[bg2]`); 37 31 lines.push(`[bg2]drawbox=x=0:y=1912:w='iw*t/${TOTAL}':h=8:color=0xff69b4:t=fill[v0]`); 38 - 39 - let prev = "v0"; 40 - for (let i = 0; i < subs.length; i++) { 41 - const s = subs[i]; 42 - const srcLabel = `s${i}`; 43 - const nextLabel = `v${i + 1}`; 44 - // movie filter loads PNG with alpha; format=rgba ensures alpha is preserved. 45 - lines.push(`movie='${s.file}':loop=0,setpts=N/(FRAME_RATE*TB),format=rgba[${srcLabel}]`); 46 - lines.push(`[${prev}][${srcLabel}]overlay=x=0:y=${SUB_Y}:format=auto:enable='between(t,${s.startSec},${s.endSec})'[${nextLabel}]`); 47 - prev = nextLabel; 48 - } 49 - 50 - // Final stream needs the canonical [final] label for compose.fish -map. 51 - lines.push(`[${prev}]null[final]`); 32 + // Single subtitle-track overlay (was a 135-deep movie= chain). 33 + lines.push(`[2:v]format=rgba,fps=30,scale=1080:1920[subs]`); 34 + lines.push(`[v0][subs]overlay=x=0:y=0:format=auto:shortest=0[final]`); 52 35 53 36 process.stdout.write(lines.join(";\n") + "\n");
+22 -12
recap/bin/compose.fish
··· 1 1 #!/usr/bin/env fish 2 2 # compose.fish — final ffmpeg pass: concat slides + audio (with trailing 3 - # silence) + waveform + animated progress bar + word-synced subtitles 4 - # (loaded as movie sources, overlaid with enable=between(t,a,b)). 5 - # Reads out/concat.txt, out/recap.mp3, out/duration.txt, out/subs.json. 3 + # silence) + waveform + animated progress bar + word-synced subtitles. 4 + # 5 + # Subtitles arrive as a single concat-demuxer track (out/subtitle-track.txt 6 + # pointing to full-frame transparent PNGs with stored durations) — see 7 + # subtitle-track.mjs. That replaces the 135-deep movie= overlay chain that 8 + # bottlenecked the oven encode. Filter graph = a single overlay onto the 9 + # slide stream. 10 + # 11 + # Reads out/concat.txt, out/recap.mp3, out/duration.txt, out/subs.json, 12 + # out/subtitle-track.txt. 6 13 7 14 set -l ROOT (realpath (dirname (status -f))/..) 8 15 set -l OUT $ROOT/out 9 16 set -l TOTAL (cat $OUT/duration.txt) 10 17 set -l AUDIO $OUT/recap.mp3 11 18 set -l WALTZ $OUT/waltz.mp3 19 + set -l SUBTRACK $OUT/subtitle-track.txt 12 20 set -l VIDEO $OUT/recap.mp4 13 21 set -l FILTER $OUT/filter.txt 14 22 ··· 20 28 echo "✗ missing $OUT/subs.json — run bin/subtitles.mjs first" 21 29 exit 1 22 30 end 31 + if not test -f $SUBTRACK 32 + echo "✗ missing $SUBTRACK — run bin/subtitle-track.mjs first" 33 + exit 1 34 + end 23 35 24 36 echo "→ ffmpeg compose · $TOTAL s · 1080x1920" 25 37 26 - # Build the filter graph in node so we can splice in one overlay per subtitle 27 - # chunk without fish escape gymnastics around brackets and quotes. 38 + # Build the filter graph in node. With the single-overlay subtitle track 39 + # the graph is short — three formatting filters on slides, an audio split 40 + # for the showwaves, and a final overlay of the subtitle stream. 28 41 node $ROOT/bin/build-filter.mjs $TOTAL > $FILTER 29 42 30 43 # If a piano-waltz bed exists, append a mix into the same filter graph so 31 44 # we don't need a second -filter_complex flag (only the last one wins). 32 - # The waltz is already gain-staged by waltz.mjs (audience.waltz.voiceGain); 33 - # we still clamp it lightly here so it sits well under the spoken track. 45 + # Slides=0, narration=1, subs=2, waltz=3 — input order matters. 34 46 if test -f $WALTZ 35 47 echo " + bed: $WALTZ (waltz)" 36 48 # printf — fish parses $TOTAL[bed] as a slice index; %s sidesteps that. 37 - # NOTE: rely on `-stream_loop -1` at the input level for looping; do NOT 38 - # also use `aloop=loop=-1:size=2e9` — that allocates a 2-billion-sample 39 - # buffer (~24 GB worst case) which OOMs on the 8 GB machine. 40 - # `atrim=duration=$TOTAL` is enough to cut the looped stream at length. 41 - printf ';[2:a]volume=0.42,atrim=duration=%s[bed];[a1][bed]amix=inputs=2:duration=first:dropout_transition=0:weights=1.0 0.55[mix]\n' "$TOTAL" >> $FILTER 49 + printf ';[3:a]volume=0.42,atrim=duration=%s[bed];[a1][bed]amix=inputs=2:duration=first:dropout_transition=0:weights=1.0 0.55[mix]\n' "$TOTAL" >> $FILTER 42 50 ffmpeg -hide_banner -y \ 43 51 -f concat -safe 0 -i $OUT/concat.txt \ 44 52 -i $AUDIO \ 53 + -f concat -safe 0 -i $SUBTRACK \ 45 54 -stream_loop -1 -i $WALTZ \ 46 55 -filter_complex_script $FILTER \ 47 56 -map "[final]" -map "[mix]" \ ··· 54 63 ffmpeg -hide_banner -y \ 55 64 -f concat -safe 0 -i $OUT/concat.txt \ 56 65 -i $AUDIO \ 66 + -f concat -safe 0 -i $SUBTRACK \ 57 67 -filter_complex_script $FILTER \ 58 68 -map "[final]" -map "[a1]" \ 59 69 -c:v libx264 -preset ultrafast -crf 22 -pix_fmt yuv420p \
+66
recap/bin/subtitle-track.mjs
··· 1 + #!/usr/bin/env node 2 + // subtitle-track.mjs — emit a concat-demuxer file (`out/subtitle-track.txt`) 3 + // that sequences subtitle PNGs with explicit durations, so the main compose 4 + // can include subtitles via a single `-f concat -i subtitle-track.txt` 5 + // input + one overlay filter — instead of a 135-deep `movie=...` chain. 6 + // 7 + // Reads `out/subs.json` (timing + per-chunk PNG paths) and `out/subs/blank.png` 8 + // (a fully-transparent 1080×1920 PNG produced by subtitles.mjs). 9 + // 10 + // The output is a plain concat-demuxer text file. ffmpeg picks it up at 11 + // frame rate via: 12 + // -f concat -safe 0 -i out/subtitle-track.txt 13 + // The `duration` directive is honored on each entry. The very last `file` 14 + // must be repeated (concat-demuxer quirk) so the final entry's duration 15 + // applies. 16 + // 17 + // Usage: node bin/subtitle-track.mjs 18 + 19 + import { readFileSync, writeFileSync } from "node:fs"; 20 + import { resolve, dirname } from "node:path"; 21 + import { fileURLToPath } from "node:url"; 22 + 23 + const HERE = dirname(fileURLToPath(import.meta.url)); 24 + const ROOT = resolve(HERE, ".."); 25 + const subsPath = `${ROOT}/out/subs.json`; 26 + const blankPath = `${ROOT}/out/subs/blank.png`; 27 + const outPath = `${ROOT}/out/subtitle-track.txt`; 28 + const durPath = `${ROOT}/out/duration.txt`; 29 + 30 + const subs = JSON.parse(readFileSync(subsPath, "utf8")); 31 + if (!subs.length) { 32 + console.error(`✗ no subtitle chunks in ${subsPath}`); 33 + process.exit(1); 34 + } 35 + 36 + // Total video duration (so the track ends at the right moment). If 37 + // duration.txt isn't around yet, fall back to the last subtitle endSec 38 + // (slides.mjs will have set duration.txt before this runs in the pipeline). 39 + let total; 40 + try { total = parseFloat(readFileSync(durPath, "utf8")); } 41 + catch { total = subs[subs.length - 1].endSec; } 42 + 43 + const lines = []; 44 + let cursor = 0; 45 + for (const s of subs) { 46 + if (s.startSec > cursor + 0.001) { 47 + // Gap before this chunk: blank. 48 + lines.push(`file '${blankPath}'`); 49 + lines.push(`duration ${(s.startSec - cursor).toFixed(3)}`); 50 + } 51 + lines.push(`file '${s.file}'`); 52 + lines.push(`duration ${(s.endSec - s.startSec).toFixed(3)}`); 53 + cursor = s.endSec; 54 + } 55 + // Trailing blank to fill the rest of the timeline. 56 + if (total > cursor + 0.001) { 57 + lines.push(`file '${blankPath}'`); 58 + lines.push(`duration ${(total - cursor).toFixed(3)}`); 59 + } 60 + // Concat demuxer requires the last `file` line repeated for its duration 61 + // to apply (https://trac.ffmpeg.org/wiki/Slideshow). 62 + const lastFileLine = [...lines].reverse().find((l) => l.startsWith("file ")); 63 + lines.push(lastFileLine); 64 + 65 + writeFileSync(outPath, lines.join("\n") + "\n"); 66 + console.log(`✓ ${outPath} · ${subs.length} chunks · total ${total.toFixed(2)}s`);
+37 -5
recap/bin/subtitles.mjs
··· 73 73 } 74 74 } 75 75 76 + // Full-frame PNGs (1080×1920, transparent except for the pill at y=1690). 77 + // This replaces the older 1080×220 strip — by baking each subtitle into a 78 + // full-frame transparent PNG, the compose step can stitch them via the 79 + // concat demuxer (one timed PNG sequence) and overlay them as a single 80 + // video stream, eliminating the 135-deep movie= filter chain that 81 + // bottlenecked the oven encode (see feedback_recap_subtitles_required.md). 82 + const FRAME_W = 1080; 83 + const FRAME_H = 1920; 84 + const PILL_Y_TOP = 1690; // matches the SUB_Y in build-filter.mjs 85 + 76 86 const cssTemplate = ` 77 87 @font-face { 78 88 font-family: 'ProcessingB'; ··· 80 90 unicode-range: U+0020-007E; 81 91 } 82 92 * { box-sizing: border-box; margin: 0; padding: 0; } 83 - html, body { width: 1080px; height: 220px; background: transparent; -webkit-font-smoothing: antialiased; } 84 - .wrap { width: 100%; height: 100%; display: flex; align-items: center; justify-content: center; padding: 0 60px; } 93 + html, body { width: ${FRAME_W}px; height: ${FRAME_H}px; background: transparent; -webkit-font-smoothing: antialiased; } 94 + .wrap { 95 + position: absolute; 96 + left: 0; 97 + top: ${PILL_Y_TOP}px; 98 + width: ${FRAME_W}px; 99 + height: 220px; 100 + display: flex; 101 + align-items: center; 102 + justify-content: center; 103 + padding: 0 60px; 104 + } 85 105 .pill { 86 106 background: rgba(16, 8, 32, 0.72); 87 107 backdrop-filter: blur(2px); ··· 111 131 const c = chunks[i]; 112 132 const file = `${SUB_DIR}/${String(i).padStart(3, "0")}.png`; 113 133 const page = await browser.newPage(); 114 - await page.setViewport({ width: 1080, height: 220, deviceScaleFactor: 1 }); 134 + await page.setViewport({ width: FRAME_W, height: FRAME_H, deviceScaleFactor: 1 }); 115 135 const html = `<!doctype html><html><head><meta charset="utf-8"><style>${cssTemplate}</style></head><body><div class="wrap"><div class="pill">${escapeHtml(c.text)}</div></div></body></html>`; 116 - await page.setContent(html, { waitUntil: "networkidle0" }); 136 + await page.setContent(html, { waitUntil: "domcontentloaded" }); 117 137 await new Promise((r) => setTimeout(r, 80)); 118 138 const png = await page.screenshot({ type: "png", omitBackground: true }); 119 139 writeFileSync(file, png); ··· 125 145 text: c.text, 126 146 }); 127 147 } 148 + 149 + // Render a single fully-transparent blank frame the concat demuxer can use 150 + // for gaps between subtitles. One file, reused for every gap entry. 151 + { 152 + const blankPath = `${SUB_DIR}/blank.png`; 153 + const page = await browser.newPage(); 154 + await page.setViewport({ width: FRAME_W, height: FRAME_H, deviceScaleFactor: 1 }); 155 + await page.setContent(`<!doctype html><html><body style="margin:0;background:transparent;"></body></html>`, { waitUntil: "domcontentloaded" }); 156 + const blank = await page.screenshot({ type: "png", omitBackground: true }); 157 + writeFileSync(blankPath, blank); 158 + await page.close(); 159 + } 128 160 await browser.close(); 129 161 130 162 writeFileSync(`${ROOT}/out/subs.json`, JSON.stringify(out, null, 2)); 131 - console.log(`✓ ${out.length} subtitle chunks → ${SUB_DIR}/`); 163 + console.log(`✓ ${out.length} subtitle chunks → ${SUB_DIR}/ (full-frame ${FRAME_W}×${FRAME_H})`); 132 164 for (const s of out.slice(0, 5)) console.log(` ${s.startSec.toFixed(2)}-${s.endSec.toFixed(2)} "${s.text}"`); 133 165 if (out.length > 5) console.log(` ... (+${out.length - 5} more)`); 134 166
+33 -3
recap/bin/transcribe.mjs
··· 1 1 #!/usr/bin/env node 2 2 // transcribe.mjs — run whisper-cli on out/recap.mp3 and emit out/words.json 3 3 // in a flat shape: [{text, fromMs, toMs}, ...]. 4 - // Usage: node bin/transcribe.mjs 4 + // 5 + // Caching: keyed on a content hash of recap.mp3. If `out/words.json` exists 6 + // AND `out/words.json.hash` matches, skip the whisper run (~90s on oven CPU). 7 + // Pass `--force` to bypass. 8 + // 9 + // Usage: 10 + // node bin/transcribe.mjs 11 + // node bin/transcribe.mjs --force 5 12 6 13 import { execFileSync } from "node:child_process"; 7 14 import { readFileSync, writeFileSync, existsSync } from "node:fs"; 8 15 import { resolve, dirname } from "node:path"; 9 16 import { fileURLToPath } from "node:url"; 17 + import { createHash } from "node:crypto"; 10 18 11 19 const HERE = dirname(fileURLToPath(import.meta.url)); 12 20 const ROOT = resolve(HERE, ".."); 13 21 const MP3 = `${ROOT}/out/recap.mp3`; 14 22 const MODEL = `${ROOT}/models/ggml-base.en.bin`; 23 + const force = process.argv.includes("--force"); 15 24 16 25 if (!existsSync(MP3)) { 17 26 console.error(`✗ missing ${MP3} — run bin/tts.mjs first`); ··· 22 31 process.exit(1); 23 32 } 24 33 34 + // Hash recap.mp3 contents (whisper output depends entirely on the audio). 35 + const inputHash = createHash("sha256") 36 + .update(readFileSync(MP3)) 37 + .digest("hex") 38 + .slice(0, 16); 39 + 40 + const wordsPath = `${ROOT}/out/words.json`; 41 + const hashFile = `${wordsPath}.hash`; 42 + 43 + if (!force && existsSync(wordsPath) && existsSync(hashFile)) { 44 + const cached = readFileSync(hashFile, "utf8").trim(); 45 + if (cached === inputHash) { 46 + const words = JSON.parse(readFileSync(wordsPath, "utf8")); 47 + const last = words[words.length - 1]; 48 + console.log(`✓ ${wordsPath} cached · ${words.length} words · hash ${inputHash} — skipping whisper`); 49 + if (last) console.log(` audio ends at ${(last.toMs / 1000).toFixed(2)}s`); 50 + process.exit(0); 51 + } 52 + } 53 + 25 54 console.log(`→ whisper-cli · ${MP3}`); 26 55 execFileSync( 27 56 "whisper-cli", ··· 34 63 .map((s) => ({ text: s.text.trim(), fromMs: s.offsets.from, toMs: s.offsets.to })) 35 64 .filter((w) => w.text.length > 0); 36 65 37 - writeFileSync(`${ROOT}/out/words.json`, JSON.stringify(words, null, 2)); 38 - console.log(`✓ ${ROOT}/out/words.json · ${words.length} words · ${(words[words.length - 1].toMs / 1000).toFixed(2)}s`); 66 + writeFileSync(wordsPath, JSON.stringify(words, null, 2)); 67 + writeFileSync(hashFile, inputHash + "\n"); 68 + console.log(`✓ ${wordsPath} · ${words.length} words · ${(words[words.length - 1].toMs / 1000).toFixed(2)}s · hash ${inputHash}`);
+21 -8
recap/pipeline.fish
··· 14 14 echo "━━━ recap pipeline · audience=$AUDIENCE ━━━" 15 15 16 16 if test $SKIP_TTS -eq 0 17 - echo "▸ 1/6 tts" 17 + echo "▸ 1/8 tts" 18 18 node bin/tts.mjs $AUDIENCE; or exit 1 19 19 else 20 - echo "▸ 1/6 tts (skipped — reusing out/recap.mp3)" 20 + echo "▸ 1/8 tts (skipped — reusing out/recap.mp3)" 21 21 end 22 22 23 - echo "▸ 2/7 transcribe + align" 23 + echo "▸ 2/8 transcribe + align" 24 24 node bin/transcribe.mjs; or exit 1 25 25 node bin/align.mjs $AUDIENCE; or exit 1 26 26 27 - echo "▸ 3/7 jeffrey-photos (gpt-image-2, cached per segment)" 27 + echo "▸ 3/8 jeffrey-photos (gpt-image-2, cached per segment)" 28 28 node bin/jeffrey-photos.mjs $AUDIENCE; or exit 1 29 29 30 - echo "▸ 4/7 scout (resolve per-slide content queries)" 30 + echo "▸ 3.5/8 chat-fetch (laer-klokken + system snapshots)" 31 + node bin/chat-fetch.mjs 32 + or echo " ↳ chat-fetch step skipped or failed — chat slide will render empty" 33 + 34 + echo "▸ 3.7/8 screenshots (production-URL artifact insets, cached)" 35 + node bin/screenshots.mjs $AUDIENCE 36 + or echo " ↳ screenshots step skipped or failed — slides without cached artifacts will render without insets" 37 + 38 + echo "▸ 4/8 scout (resolve per-slide content queries)" 31 39 node bin/scout.mjs $AUDIENCE; or exit 1 32 40 33 - echo "▸ 5/7 slides" 41 + echo "▸ 5/8 slides" 34 42 node bin/slides.mjs $AUDIENCE; or exit 1 35 43 36 - echo "▸ 6/7 subtitles" 44 + echo "▸ 6/8 subtitles" 37 45 node bin/subtitles.mjs $AUDIENCE; or exit 1 46 + node bin/subtitle-track.mjs; or exit 1 38 47 39 - echo "▸ 7/7 compose" 48 + echo "▸ 7/8 waltz (piano bed; harmless if audience.waltz is absent)" 49 + node bin/waltz.mjs $AUDIENCE 50 + or echo " ↳ waltz step skipped or failed — compose falls back to narration-only" 51 + 52 + echo "▸ 8/8 compose" 40 53 fish bin/compose.fish; or exit 1 41 54 42 55 echo "━━━ done · $ROOT/out/recap.mp4 ━━━"