Monorepo for Aesthetic.Computer
aesthetic.computer
1#!/usr/bin/env node
2/**
3 * Ableton Live XML Project Analyzer
4 *
5 * Goals:
6 * - Parse a large extracted Ableton .als XML (e.g. zzzZWAP_extracted.xml)
7 * - Compare against a blank template project XML
8 * - Extract timeline-relevant entities for rebuilding a visual timeline:
9 * * Tracks (type, id, name, color, group membership)
10 * * Clips (name, type, track, start/end/loop times, warp markers if present)
11 * * Devices on tracks (type, on/off state, user name)
12 * * Automation envelopes & tempo / time signature changes
13 * * Locators / cue points
14 * * Scenes (index, name if any)
15 * - Produce:
16 * * Structured JSON summary (optionally saved to a file)
17 * * Human-readable markdown-ish report printed to stdout
18 * - Designed to be memory efficient (streaming SAX parse, not full DOM)
19 *
20 * Usage (from reference/ directory):
21 * node analyze-ableton.mjs \
22 * --project ../system/public/assets/wipppps/zzzZWAP_extracted.xml \
23 * --blank ./live-12-blank.xml \
24 * --out report.json
25 *
26 * All flags optional; defaults:
27 * --project: env ABLETON_PROJECT or zzzZWAP_extracted.xml
28 * --blank: live-12-blank.xml
29 *
30 * Implementation notes:
31 * - We use saxes (SAX parser) to stream parse.
32 * - We maintain a stack of element names for path context (array join('/')).
33 * - We detect clips by element names ending with 'Clip' (excluding 'ClipSlot').
34 * - Timing fields often are empty-element tags with attribute Value="..."; we watch for known time-ish keys.
35 * - We gather attribute Value and child <Value Value="..."/> patterns generically.
36 * - We build a baseline of tag counts from the blank XML to compute diffs.
37 */
38
39import { createReadStream, promises as fs } from 'node:fs';
40import { basename, resolve } from 'node:path';
41import { performance } from 'node:perf_hooks';
42import { SaxesParser } from 'saxes';
43import chalk from 'chalk';
44
45// ---------- CLI ARG PARSING ----------
46const args = process.argv.slice(2);
47function getFlag(name, def = undefined) {
48 const idx = args.indexOf(`--${name}`);
49 if (idx !== -1) return args[idx + 1];
50 return def;
51}
52
53const projectPath = resolve(getFlag('project', process.env.ABLETON_PROJECT || '../system/public/assets/wipppps/zzzZWAP_extracted.xml'));
54const blankPath = resolve(getFlag('blank', './live-12-blank.xml'));
55const outPath = getFlag('out');
56const outMarkdown = args.includes('--markdown') || getFlag('markdown');
57const notesOutPath = getFlag('notes-out'); // optional separate notes export
58
59console.error(chalk.cyan(`Analyzing Ableton project:\n Target: ${projectPath}\n Blank : ${blankPath}`));
60
61// ---------- UTILITIES ----------
62const timeLikeKeys = new Set([
63 'CurrentStart','CurrentEnd','StartMarker','EndMarker','LoopStart','LoopEnd','LoopLength','StartRelative','Time','Length','Start','End'
64]);
65
66function isClipElement(name) {
67 return /Clip$/.test(name) && name !== 'ClipSlot';
68}
69function isDeviceElement(name) {
70 return /(Device|Instrument|Effect|PlugInDevice)$/.test(name) || name === 'Device';
71}
72function niceNum(n) { return (typeof n === 'number' && !Number.isNaN(n)) ? Number(n.toFixed(6)) : n; }
73
74// Streaming parse harness
75async function parseAbletonXML(path, options={ mode:'project' }) {
76 const start = performance.now();
77 const parser = new SaxesParser({ xmlns: false, fragment: false });
78 const stack = []; // element name stack
79 const state = {
80 tagCounts: Object.create(null),
81 paths: new Set(),
82 tracks: [],
83 byTrackId: new Map(),
84 currentTrack: null,
85 clips: [],
86 currentClip: null,
87 devices: [],
88 currentDevice: null,
89 scenes: [],
90 currentScene: null,
91 tempoChanges: [],
92 timeSigChanges: [],
93 automationCount: 0,
94 errors: [],
95 totalElements: 0,
96 file: path,
97 notesCaptured: 0,
98 warpMarkers: [],
99 locators: [],
100 currentKeyTrack: null,
101 pitchCounts: {},
102 };
103
104 function pushTrack(elName, attrs) {
105 const id = attrs.Id ?? attrs.ID ?? attrs.id;
106 const track = { id, type: elName, name: null, color: null, groupId: null, devices: [], clips: [] };
107 state.tracks.push(track);
108 if (id) state.byTrackId.set(id, track);
109 state.currentTrack = track;
110 }
111 function finalizeTrack(elName) {
112 if (state.currentTrack && state.currentTrack.type === elName) {
113 state.currentTrack = null;
114 }
115 }
116 function pushClip(elName, attrs) {
117 const clip = { type: elName, trackId: state.currentTrack?.id ?? null, name: null, times: {}, notes: [], rawAttrs: { ...attrs } };
118 state.clips.push(clip);
119 state.currentClip = clip;
120 if (state.currentTrack) state.currentTrack.clips.push(clip);
121 }
122 function finalizeClip(elName) {
123 if (state.currentClip && state.currentClip.type === elName) {
124 state.currentClip = null;
125 }
126 }
127 function pushDevice(elName, attrs) {
128 const device = { type: elName, trackId: state.currentTrack?.id ?? null, name: null, on: null, rawAttrs: { ...attrs }, params: {} };
129 state.devices.push(device);
130 state.currentDevice = device;
131 if (state.currentTrack) state.currentTrack.devices.push(device);
132 }
133 function finalizeDevice(elName) {
134 if (state.currentDevice && state.currentDevice.type === elName) {
135 state.currentDevice = null;
136 }
137 }
138
139 parser.on('error', e => {
140 state.errors.push(e.message);
141 });
142 parser.on('opentag', node => {
143 const name = node.name;
144 const attrs = Object.fromEntries(Object.entries(node.attributes).map(([k,v]) => [k, v.value ?? v]));
145 stack.push(name);
146 state.totalElements++;
147 state.tagCounts[name] = (state.tagCounts[name] || 0) + 1;
148 if (state.totalElements % 50000 === 0) {
149 process.stderr.write(chalk.gray(`.. ${state.totalElements} elements\n`));
150 }
151 const pathStr = stack.join('/');
152 state.paths.add(pathStr);
153
154 if (/^(MidiTrack|AudioTrack|ReturnTrack|GroupTrack|MainTrack|PreHearTrack)$/.test(name)) {
155 if (options.mode === 'project') pushTrack(name, attrs);
156 }
157 else if (isClipElement(name) && options.mode === 'project') {
158 pushClip(name, attrs);
159 }
160 else if (isDeviceElement(name) && options.mode === 'project') {
161 pushDevice(name, attrs);
162 }
163 // Enter KeyTrack context
164 if (name === 'KeyTrack') {
165 state.currentKeyTrack = { pendingNoteIndexes: [], pitch: undefined };
166 }
167
168 if (attrs.Value !== undefined) {
169 const parent = stack[stack.length - 2];
170 const key = name;
171 if (state.currentClip && timeLikeKeys.has(key)) {
172 const num = Number(attrs.Value);
173 if (!Number.isNaN(num)) state.currentClip.times[key] = num;
174 }
175 if (state.currentTrack && key === 'Color') {
176 state.currentTrack.color = attrs.Value;
177 }
178 if (state.currentTrack && key === 'TrackGroupId') {
179 state.currentTrack.groupId = attrs.Value;
180 }
181 if (state.currentDevice && parent === 'On' && key === 'Manual') {
182 state.currentDevice.on = attrs.Value === 'true';
183 }
184 if (key === 'Tempo' || key === 'TempoAutomationTarget') {
185 const num = Number(attrs.Value);
186 if (!Number.isNaN(num)) state.tempoChanges.push({ path: pathStr, tempo: num });
187 }
188 if (key === 'TimeSignature') {
189 state.timeSigChanges.push({ path: pathStr, value: attrs.Value });
190 }
191 // Locator child attributes
192 if (parent === 'Locator' && key === 'Time') {
193 const loc = state.locators[state.locators.length - 1];
194 if (loc) loc.time = Number(attrs.Value);
195 }
196 if (parent === 'Locator' && key === 'Name') {
197 const loc = state.locators[state.locators.length - 1];
198 if (loc) loc.name = attrs.Value;
199 }
200 // MidiKey inside KeyTrack defines pitch for all contained notes (often appears after notes list)
201 if (state.currentKeyTrack && key === 'MidiKey') {
202 const pitchVal = Number(attrs.Value);
203 if (!Number.isNaN(pitchVal)) {
204 state.currentKeyTrack.pitch = pitchVal;
205 // Backfill pending notes without pitch
206 for (const idx of state.currentKeyTrack.pendingNoteIndexes) {
207 const note = state.currentClip?.notes[idx];
208 if (note && note.pitch == null) {
209 note.pitch = pitchVal;
210 state.pitchCounts[pitchVal] = (state.pitchCounts[pitchVal]||0)+1;
211 }
212 }
213 state.currentKeyTrack.pendingNoteIndexes = [];
214 }
215 }
216 }
217
218 // Midi notes (leaf empty elements) inside clips
219 if (state.currentClip && name === 'MidiNoteEvent') {
220 const note = {
221 time: attrs.Time !== undefined ? Number(attrs.Time) : undefined,
222 duration: attrs.Duration !== undefined ? Number(attrs.Duration) : undefined,
223 velocity: attrs.Velocity !== undefined ? Number(attrs.Velocity) : undefined,
224 offVelocity: attrs.OffVelocity !== undefined ? Number(attrs.OffVelocity) : undefined,
225 noteId: attrs.NoteId !== undefined ? Number(attrs.NoteId) : undefined,
226 pitch: undefined,
227 };
228 if (!Number.isNaN(note.time)) {
229 state.currentClip.notes.push(note);
230 // Defer pitch assignment until MidiKey encountered
231 if (state.currentKeyTrack) {
232 state.currentKeyTrack.pendingNoteIndexes.push(state.currentClip.notes.length - 1);
233 }
234 state.notesCaptured++;
235 }
236 }
237 // Warp markers inside AudioClip context
238 if (state.currentClip && name === 'WarpMarker') {
239 state.warpMarkers.push({
240 clipIndex: state.clips.length - 1,
241 secTime: attrs.SecTime !== undefined ? Number(attrs.SecTime) : undefined,
242 beatTime: attrs.BeatTime !== undefined ? Number(attrs.BeatTime) : undefined,
243 });
244 }
245 // Locators at arrangement level
246 if (name === 'Locator') {
247 state.locators.push({ id: attrs.Id || attrs.ID, time: undefined, name: undefined });
248 }
249 });
250 let currentText = '';
251 parser.on('text', txt => { currentText += txt; });
252 parser.on('closetag', name => {
253 const text = currentText.trim();
254 currentText = '';
255 if (state.currentClip && /^(EffectiveName|UserName)$/.test(name) && text) {
256 state.currentClip.name = state.currentClip.name || text;
257 }
258 if (state.currentDevice && /^(UserName|EffectiveName)$/.test(name) && text) {
259 state.currentDevice.name = state.currentDevice.name || text;
260 }
261 if (state.currentTrack && /^(EffectiveName|UserName)$/.test(name) && text) {
262 state.currentTrack.name = state.currentTrack.name || text;
263 }
264 if (isClipElement(name)) finalizeClip(name);
265 else if (isDeviceElement(name)) finalizeDevice(name);
266 else if (/^(MidiTrack|AudioTrack|ReturnTrack|GroupTrack|MainTrack|PreHearTrack)$/.test(name)) finalizeTrack(name);
267 else if (name === 'KeyTrack') {
268 // If MidiKey not provided after notes, pitch remains undefined; optionally could look for other hints.
269 state.currentKeyTrack = null;
270 }
271 stack.pop();
272 });
273
274 await new Promise((resolvePromise, reject) => {
275 const rs = createReadStream(path, { encoding: 'utf8' });
276 rs.on('data', chunk => {
277 try { parser.write(chunk); } catch (e) { reject(e); }
278 });
279 rs.on('error', reject);
280 rs.on('end', () => {
281 try { parser.close(); } catch (e) { return reject(e); }
282 resolvePromise();
283 });
284 });
285
286 const elapsed = performance.now() - start;
287 state.elapsedMs = elapsed;
288 return state;
289}
290
291// ---------- MAIN FLOW ----------
292async function main() {
293 const blank = await parseAbletonXML(blankPath, { mode: 'blank' });
294 const project = await parseAbletonXML(projectPath, { mode: 'project' });
295
296 const diffCounts = {};
297 for (const [tag, count] of Object.entries(project.tagCounts)) {
298 const base = blank.tagCounts[tag] || 0;
299 const delta = count - base;
300 if (delta !== 0) diffCounts[tag] = { project: count, blank: base, delta };
301 }
302 const stats = {
303 projectFile: project.file,
304 blankFile: blank.file,
305 parsing: {
306 projectMs: Math.round(project.elapsedMs),
307 blankMs: Math.round(blank.elapsedMs),
308 totalElementsProject: project.totalElements,
309 totalElementsBlank: blank.totalElements,
310 },
311 tracks: project.tracks.length,
312 clips: project.clips.length,
313 devices: project.devices.length,
314 tempoEvents: project.tempoChanges.length,
315 timeSignatureEvents: project.timeSigChanges.length,
316 notes: project.notesCaptured,
317 warpMarkers: project.warpMarkers.length,
318 locators: project.locators.length,
319 };
320
321 const trackSummaries = project.tracks.map(t => ({
322 id: t.id,
323 type: t.type,
324 name: t.name,
325 color: t.color,
326 groupId: t.groupId,
327 devices: t.devices.map(d => ({ type: d.type, name: d.name, on: d.on })),
328 clipCount: t.clips.length,
329 clips: t.clips.slice(0, 50).map(c => ({ name: c.name, times: c.times }))
330 }));
331
332 const timeline = project.clips.map((c, idx) => ({ index: idx, trackId: c.trackId, name: c.name, times: c.times, noteCount: c.notes.length }));
333
334 // Build full notes list if requested (can be large). Each note augmented with clip + absolute beat time.
335 let allNotes = null;
336 if (notesOutPath) {
337 allNotes = [];
338 project.clips.forEach((clip, clipIndex) => {
339 const clipStart = clip.times.CurrentStart || 0; // beat-based
340 clip.notes.forEach(n => {
341 const absBeat = (clipStart || 0) + (n.time || 0);
342 allNotes.push({
343 clipIndex,
344 trackId: clip.trackId,
345 clipName: clip.name,
346 beat: absBeat,
347 relBeat: n.time,
348 duration: n.duration,
349 velocity: n.velocity,
350 pitch: n.pitch
351 });
352 });
353 });
354 // Sort by absolute beat
355 allNotes.sort((a,b)=>a.beat-b.beat);
356 }
357
358 const result = {
359 stats,
360 trackSummaries,
361 timeline,
362 tempoChanges: project.tempoChanges,
363 timeSignatureChanges: project.timeSigChanges,
364 locators: project.locators,
365 warpMarkersSample: project.warpMarkers.slice(0,50),
366 tagDiffCounts: diffCounts,
367 errors: { project: project.errors, blank: blank.errors }
368 };
369
370 if (outPath) {
371 await fs.writeFile(outPath, JSON.stringify(result, null, 2), 'utf8');
372 console.error(chalk.green(`Wrote JSON report: ${outPath}`));
373 }
374 if (notesOutPath && allNotes) {
375 await fs.writeFile(notesOutPath, JSON.stringify(allNotes, null, 2), 'utf8');
376 console.error(chalk.green(`Wrote notes list: ${notesOutPath} (notes=${allNotes.length})`));
377 }
378
379 console.log('\n=== Ableton Project Analysis Report ===');
380 console.log(chalk.bold('Files:'));
381 console.log(` Project: ${basename(projectPath)}`);
382 console.log(` Blank : ${basename(blankPath)}`);
383 console.log('\nStats:');
384 for (const [k,v] of Object.entries(stats)) {
385 if (typeof v === 'object') continue;
386 console.log(` ${k}: ${v}`);
387 }
388 console.log(' parse(project) ms:', stats.parsing.projectMs, 'elements:', stats.parsing.totalElementsProject);
389 console.log(' parse(blank) ms:', stats.parsing.blankMs, 'elements:', stats.parsing.totalElementsBlank);
390
391 console.log('\nTracks (first 10):');
392 for (const t of trackSummaries.slice(0,10)) {
393 console.log(` [${t.id}] ${t.type} ${t.name || ''} clips=${t.clipCount} devices=${t.devices.length}`);
394 }
395
396 console.log('\nTop Tag Deltas (by absolute delta, top 25):');
397 const topTagDeltas = Object.entries(diffCounts)
398 .sort((a,b)=>Math.abs(b[1].delta)-Math.abs(a[1].delta))
399 .slice(0,25);
400 for (const [tag, info] of topTagDeltas) {
401 console.log(` ${tag}: +${info.delta} (project ${info.project} vs blank ${info.blank})`);
402 }
403
404 console.log('\nSample Timeline Entries (first 25):');
405 for (const c of timeline.slice(0,25)) {
406 const times = Object.entries(c.times).map(([k,v])=>`${k}=${niceNum(v)}`).join(' ');
407 console.log(` Clip#${c.index} Track ${c.trackId} :: ${c.name || '(unnamed)'} :: notes=${c.noteCount} :: ${times}`);
408 }
409
410 console.log('\nLocators (first 10):');
411 for (const l of project.locators.slice(0,10)) {
412 console.log(` Locator ${l.id} @ ${l.time} :: ${l.name}`);
413 }
414
415 console.log('\nUse --out report.json to capture full JSON including all clips.');
416 console.log('Add --markdown to emit ABLETON_TIMELINE_GUIDE.md with schema + samples.');
417
418 // Pitch histogram (top 20)
419 const pitchEntries = Object.entries(project.pitchCounts).sort((a,b)=>b[1]-a[1]).slice(0,20);
420 if (pitchEntries.length) {
421 console.log('\nTop Note Pitches (count):');
422 for (const [p,c] of pitchEntries) console.log(` ${p}: ${c}`);
423 }
424
425 if (outMarkdown) {
426 const md = [];
427 md.push('# Ableton Live Project Structural Report');
428 md.push('');
429 md.push('Generated by analyze-ableton.mjs');
430 md.push('');
431 md.push('## Summary Stats');
432 Object.entries(result.stats).forEach(([k,v])=> md.push(`- ${k}: ${v}`));
433 md.push('\n## Locators');
434 if (result.locators.length === 0) md.push('*None*');
435 result.locators.forEach(l=> md.push(`- ${l.id}@${l.time} : ${l.name}`));
436 md.push('\n## Tracks (first 10)');
437 trackSummaries.slice(0,10).forEach(t=> md.push(`- [${t.id}] ${t.type} ${t.name||''} clips=${t.clipCount} devices=${t.devices.length}`));
438 md.push('\n## Sample Notes (first 40 across clips)');
439 const sampleNotes = [];
440 for (const clip of project.clips) {
441 for (const note of clip.notes) {
442 sampleNotes.push({ clipTrack: clip.trackId, time: note.time, dur: note.duration, vel: note.velocity, pitch: note.pitch });
443 if (sampleNotes.length>=40) break;
444 }
445 if (sampleNotes.length>=40) break;
446 }
447 if (sampleNotes.length === 0) md.push('*No notes captured*');
448 sampleNotes.forEach(n=> md.push(`- Track ${n.clipTrack} t=${n.time} dur=${n.dur} vel=${n.vel} pitch=${n.pitch ?? ''}`));
449 md.push('\n## Warp Markers (sample 50)');
450 if (result.warpMarkersSample.length === 0) md.push('*No warp markers*');
451 result.warpMarkersSample.forEach((m)=> md.push(`- ClipIdx ${m.clipIndex} beat=${m.beatTime} sec=${m.secTime}`));
452 md.push('\n## Top Tag Deltas');
453 Object.entries(diffCounts).sort((a,b)=>Math.abs(b[1].delta)-Math.abs(a[1].delta)).slice(0,25).forEach(([tag,info])=> md.push(`- ${tag}: +${info.delta}`));
454 md.push('\n## Timeline JSON Shape');
455 md.push('Each timeline entry:');
456 md.push('```json');
457 md.push(JSON.stringify({ index:0, trackId:'<track-id>', name:'Clip Name', times:{ CurrentStart:0, CurrentEnd:8, LoopStart:0, LoopEnd:8 }, noteCount: 12 }, null, 2));
458 md.push('```');
459 md.push('\n### Clip Object (internal)');
460 md.push('```json');
461 md.push(JSON.stringify({ type:'AudioClip|MidiClip', trackId:'61', name:'', times:{}, notes:[{time:0, duration:0.25, velocity:100}], rawAttrs:{/* original XML attrs */} }, null, 2));
462 md.push('```');
463 md.push('\n## Pitch Histogram (top 20)');
464 if (pitchEntries.length === 0) md.push('*No pitches*');
465 pitchEntries.forEach(([p,c])=> md.push(`- ${p}: ${c}`));
466 md.push('\n### Locator Object');
467 md.push('```json');
468 md.push(JSON.stringify({ id:'3', time:32, name:'DROP' }, null, 2));
469 md.push('```');
470 md.push('\n### Warp Marker Object');
471 md.push('```json');
472 md.push(JSON.stringify({ clipIndex:5, beatTime:0, secTime:0 }, null, 2));
473 md.push('```');
474 md.push('\nUse these shapes to drive a timeline visualization:');
475 md.push('- Horizontal axis: beat or absolute time (choose one; beatTime from warp markers or clip start times)');
476 md.push('- Rows: tracks (group by trackSummaries)');
477 md.push('- Clip bars: from times.CurrentStart to times.CurrentEnd (or use LoopStart/LoopEnd for loop overlay)');
478 md.push('- Notes: render inside MIDI clips using note.time and note.duration relative to clip start');
479 md.push('- Locators: vertical lines at locator.time with labels');
480 md.push('- Warp markers: micro markers inside audio clips to show warping');
481 await fs.writeFile('ABLETON_TIMELINE_GUIDE.md', md.join('\n'), 'utf8');
482 console.error(chalk.green('Wrote ABLETON_TIMELINE_GUIDE.md'));
483 }
484}
485
486main().catch(err => {
487 console.error(chalk.red('Analysis failed:'), err);
488 process.exit(1);
489});