Monorepo for Aesthetic.Computer
aesthetic.computer
1#!/usr/bin/env node
2/**
3 * Ableton Live XML Timeline Viewer
4 *
5 * A single-command pipeline that:
6 * 1. Parses Ableton XML files and extracts timeline data
7 * 2. Displays a live, real-time visual timeline in the terminal
8 *
9 * Usage:
10 * node ableton-live-viewer.mjs [path-to-extracted.xml]
11 *
12 * If no path provided, looks for zzzZWAP_extracted.xml in the parent wipppps folder
13 */
14
15import { createReadStream, existsSync } from 'node:fs';
16import { resolve, dirname } from 'node:path';
17import { fileURLToPath } from 'node:url';
18import { SaxesParser } from 'saxes';
19import chalk from 'chalk';
20
21const __dirname = dirname(fileURLToPath(import.meta.url));
22
23// Default paths
24const DEFAULT_XML_PATH = resolve(__dirname, '../system/public/assets/wipppps/zzzZWAP_extracted.xml');
25const FALLBACK_XML_PATH = '/Users/jas/Desktop/code/aesthetic-computer/system/public/assets/wipppps/zzzZWAP_extracted.xml';
26
27class AbletonParser {
28 constructor() {
29 this.tracks = [];
30 this.clips = [];
31 this.locators = [];
32 this.tempo = 120;
33 this.timeSignature = [4, 4];
34 this.totalLength = 0;
35 this.notes = [];
36
37 // Parser state
38 this.currentPath = [];
39 this.currentTrack = null;
40 this.currentClip = null;
41 this.attributes = {};
42 this.textContent = '';
43 }
44
45 async parseXML(xmlPath) {
46 console.log(chalk.blue('🎵 Parsing Ableton XML...'));
47
48 const parser = new SaxesParser();
49 const stream = createReadStream(xmlPath);
50
51 parser.on('opentag', (node) => {
52 this.currentPath.push(node.name);
53 this.attributes = node.attributes || {};
54 this.textContent = '';
55
56 // Track detection
57 if (node.name.endsWith('Track') && node.name !== 'GroupTrackSlot') {
58 this.currentTrack = {
59 id: this.attributes.Id,
60 type: node.name,
61 name: '',
62 color: '',
63 clips: []
64 };
65 }
66
67 // Clip detection
68 if (node.name.endsWith('Clip') && !node.name.includes('Slot')) {
69 this.currentClip = {
70 id: this.attributes.Id,
71 name: '',
72 start: 0,
73 end: 0,
74 loop: false,
75 notes: []
76 };
77 }
78 });
79
80 parser.on('text', (text) => {
81 this.textContent += text.trim();
82 });
83
84 parser.on('closetag', (node) => {
85 const path = this.currentPath.join('/');
86
87 // Extract track name
88 if (path.endsWith('Name/EffectiveName') && this.currentTrack) {
89 this.currentTrack.name = this.attributes.Value || this.textContent;
90 }
91
92 // Extract track color
93 if (path.endsWith('Color') && this.currentTrack && this.attributes.Value) {
94 this.currentTrack.color = parseInt(this.attributes.Value);
95 }
96
97 // Extract clip timing
98 if (this.currentClip) {
99 if (path.endsWith('CurrentStart') && this.attributes.Value) {
100 this.currentClip.start = parseFloat(this.attributes.Value);
101 }
102 if (path.endsWith('CurrentEnd') && this.attributes.Value) {
103 this.currentClip.end = parseFloat(this.attributes.Value);
104 }
105 }
106
107 // Extract MIDI notes
108 if (path.endsWith('KeyTrack/Notes/MidiNoteEvent')) {
109 if (this.currentClip) {
110 const note = {
111 time: parseFloat(this.attributes.Time || 0),
112 duration: parseFloat(this.attributes.Duration || 0.25),
113 pitch: parseInt(this.attributes.Pitch || 60),
114 velocity: parseInt(this.attributes.Velocity || 100)
115 };
116 this.currentClip.notes.push(note);
117 this.notes.push({
118 ...note,
119 track: this.currentTrack?.name || 'Unknown',
120 clipStart: this.currentClip.start
121 });
122 }
123 }
124
125 // Extract tempo
126 if (path.endsWith('MasterTrack/DeviceChain/Mixer/Tempo/Manual') && this.attributes.Value) {
127 this.tempo = parseFloat(this.attributes.Value);
128 }
129
130 // Track completion
131 if (node.name.endsWith('Track') && this.currentTrack) {
132 if (this.currentClip) {
133 this.currentTrack.clips.push(this.currentClip);
134 }
135 this.tracks.push(this.currentTrack);
136 this.currentTrack = null;
137 }
138
139 // Clip completion
140 if (node.name.endsWith('Clip') && this.currentClip) {
141 this.currentClip = null;
142 }
143
144 this.currentPath.pop();
145 });
146
147 return new Promise((resolve, reject) => {
148 parser.on('error', reject);
149 parser.on('end', () => {
150 this.calculateTotalLength();
151 resolve(this.getTimelineData());
152 });
153
154 stream.on('data', chunk => parser.write(chunk));
155 stream.on('end', () => parser.close());
156 stream.on('error', reject);
157 });
158 }
159
160 calculateTotalLength() {
161 let maxTime = 0;
162 this.notes.forEach(note => {
163 const noteEnd = note.clipStart + note.time + note.duration;
164 if (noteEnd > maxTime) maxTime = noteEnd;
165 });
166 this.tracks.forEach(track => {
167 track.clips.forEach(clip => {
168 if (clip.end > maxTime) maxTime = clip.end;
169 });
170 });
171 this.totalLength = Math.max(maxTime, 64); // Minimum 64 beats
172 }
173
174 getTimelineData() {
175 return {
176 tracks: this.tracks,
177 clips: this.clips,
178 notes: this.notes,
179 tempo: this.tempo,
180 timeSignature: this.timeSignature,
181 totalLength: this.totalLength
182 };
183 }
184}
185
186class LiveVisualizer {
187 constructor(timelineData) {
188 this.data = timelineData;
189 this.currentBeat = 0;
190 this.isPlaying = false;
191 this.startTime = null;
192 this.playbackRate = 1.0;
193 this.fps = 30;
194
195 // Visual settings
196 this.terminalWidth = process.stdout.columns || 120;
197 this.timelineWidth = Math.min(80, this.terminalWidth - 40);
198
199 this.setupInput();
200 }
201
202 setupInput() {
203 if (process.stdin.isTTY) {
204 process.stdin.setRawMode(true);
205 process.stdin.resume();
206 process.stdin.on('data', (key) => {
207 if (key[0] === 3 || key.toString() === 'q') { // Ctrl+C or 'q'
208 this.stop();
209 } else if (key.toString() === ' ') {
210 this.togglePlayback();
211 }
212 });
213 }
214 }
215
216 start() {
217 console.clear();
218 console.log(chalk.green('🎵 Ableton Live Timeline Viewer'));
219 console.log(chalk.gray('Press SPACE to play/pause, Q to quit\n'));
220
221 this.isPlaying = true;
222 this.startTime = Date.now();
223 this.render();
224
225 this.interval = setInterval(() => {
226 if (this.isPlaying) {
227 this.updateCurrentBeat();
228 }
229 this.render();
230 }, 1000 / this.fps);
231 }
232
233 updateCurrentBeat() {
234 const elapsed = (Date.now() - this.startTime) / 1000;
235 const beatsPerSecond = (this.data.tempo / 60) * this.playbackRate;
236 this.currentBeat = elapsed * beatsPerSecond;
237
238 if (this.currentBeat >= this.data.totalLength) {
239 this.currentBeat = 0;
240 this.startTime = Date.now();
241 }
242 }
243
244 togglePlayback() {
245 this.isPlaying = !this.isPlaying;
246 if (this.isPlaying) {
247 this.startTime = Date.now() - (this.currentBeat / ((this.data.tempo / 60) * this.playbackRate)) * 1000;
248 }
249 }
250
251 stop() {
252 if (this.interval) {
253 clearInterval(this.interval);
254 }
255 console.log(chalk.yellow('\n👋 Goodbye!'));
256 process.exit(0);
257 }
258
259 formatTime(beats) {
260 const totalSeconds = beats / (this.data.tempo / 60);
261 const minutes = Math.floor(totalSeconds / 60);
262 const seconds = Math.floor(totalSeconds % 60);
263 const beatInMeasure = Math.floor(beats % 4) + 1;
264 return `${minutes.toString().padStart(2, '0')}:${seconds.toString().padStart(2, '0')}.${beatInMeasure}`;
265 }
266
267 getActiveNotes() {
268 return this.data.notes.filter(note => {
269 const noteStart = note.clipStart + note.time;
270 const noteEnd = noteStart + note.duration;
271 return this.currentBeat >= noteStart && this.currentBeat <= noteEnd;
272 });
273 }
274
275 getRecentNotes(windowBeats = 0.5) {
276 return this.data.notes.filter(note => {
277 const noteStart = note.clipStart + note.time;
278 return this.currentBeat >= noteStart && this.currentBeat <= noteStart + windowBeats;
279 });
280 }
281
282 renderProgressBar() {
283 const progress = this.currentBeat / this.data.totalLength;
284 const filled = Math.floor(progress * this.timelineWidth);
285 const empty = this.timelineWidth - filled;
286
287 const bar = '█'.repeat(filled) + '░'.repeat(empty);
288 const percentage = (progress * 100).toFixed(1);
289
290 return chalk.cyan(bar) + ` ${percentage}%`;
291 }
292
293 renderTrackActivity() {
294 const recentNotes = this.getRecentNotes();
295 const trackActivity = {};
296
297 // Group recent notes by track
298 recentNotes.forEach(note => {
299 if (!trackActivity[note.track]) {
300 trackActivity[note.track] = [];
301 }
302 trackActivity[note.track].push(note);
303 });
304
305 const lines = [];
306 this.data.tracks.forEach(track => {
307 if (!track.name) return;
308
309 const activity = trackActivity[track.name] || [];
310 const symbols = activity.map(note => {
311 const pitchClass = ['C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B'][note.pitch % 12];
312 return pitchClass;
313 }).join('');
314
315 const colorCode = [
316 chalk.red, chalk.green, chalk.yellow, chalk.blue,
317 chalk.magenta, chalk.cyan, chalk.white, chalk.gray
318 ][track.color % 8] || chalk.white;
319
320 const trackName = track.name.substring(0, 12).padEnd(12);
321 const activityBar = symbols.padEnd(20).substring(0, 20);
322
323 lines.push(colorCode(`${trackName} │ ${activityBar}`));
324 });
325
326 return lines;
327 }
328
329 renderNoteStream() {
330 const recentNotes = this.getRecentNotes(2).slice(-40); // Last 40 notes
331 const stream = recentNotes.map(note => {
332 const pitchClass = ['C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B'][note.pitch % 12];
333 const octave = Math.floor(note.pitch / 12);
334 return chalk.yellow(`${pitchClass}${octave}`);
335 }).join(' ');
336
337 return stream;
338 }
339
340 render() {
341 // Clear previous frame
342 process.stdout.write('\x1B[2J\x1B[H');
343
344 const timeDisplay = this.formatTime(this.currentBeat);
345 const statusIcon = this.isPlaying ? '▶️' : '⏸️';
346 const activeNotes = this.getActiveNotes();
347
348 // Header
349 console.log(chalk.bold.blue('🎵 Ableton Live Timeline Viewer'));
350 console.log(chalk.gray('━'.repeat(this.terminalWidth)));
351
352 // Time and status
353 console.log(`${statusIcon} ${chalk.bold(timeDisplay)} | Tempo: ${this.data.tempo} BPM | Beat: ${this.currentBeat.toFixed(2)}`);
354
355 // Progress bar
356 console.log(`Progress: ${this.renderProgressBar()}`);
357
358 // Active notes count
359 console.log(chalk.green(`Active Notes: ${activeNotes.length}`));
360 console.log('');
361
362 // Track activity
363 console.log(chalk.bold('Track Activity:'));
364 console.log(chalk.gray('Track Name │ Recent Notes'));
365 console.log(chalk.gray('─'.repeat(35)));
366
367 const trackLines = this.renderTrackActivity();
368 trackLines.forEach(line => console.log(line));
369
370 // Note stream
371 console.log('');
372 console.log(chalk.bold('Recent Notes:'));
373 console.log(this.renderNoteStream());
374
375 // Controls
376 console.log('');
377 console.log(chalk.gray('Controls: SPACE = play/pause, Q = quit'));
378 }
379}
380
381// Main execution
382async function main() {
383 const xmlPath = process.argv[2] ||
384 (existsSync(DEFAULT_XML_PATH) ? DEFAULT_XML_PATH : FALLBACK_XML_PATH);
385
386 if (!existsSync(xmlPath)) {
387 console.error(chalk.red('❌ XML file not found:'), xmlPath);
388 console.log(chalk.yellow('Usage: node ableton-live-viewer.mjs [path-to-extracted.xml]'));
389 process.exit(1);
390 }
391
392 try {
393 const parser = new AbletonParser();
394 const timelineData = await parser.parseXML(xmlPath);
395
396 console.log(chalk.green('✅ Parsed successfully!'));
397 console.log(chalk.gray(`Tracks: ${timelineData.tracks.length}`));
398 console.log(chalk.gray(`Notes: ${timelineData.notes.length}`));
399 console.log(chalk.gray(`Total Length: ${timelineData.totalLength.toFixed(2)} beats`));
400 console.log('');
401
402 const visualizer = new LiveVisualizer(timelineData);
403 visualizer.start();
404
405 } catch (error) {
406 console.error(chalk.red('❌ Error:'), error.message);
407 process.exit(1);
408 }
409}
410
411if (import.meta.url === `file://${process.argv[1]}`) {
412 main();
413}