experiments in a post-browser web
10
fork

Configure Feed

Select the types of activity you want to include in your feed.

fix(server): enable single-user mode for e2e tests

The e2e sync tests were failing with 401 errors because:
1. Server required authentication even in test mode
2. Single-user mode user wasn't automatically created

Changes:
- Add E2E_TEST env flag to bypass auth in test environments
- Auto-create single-user mode user on server startup
- Update e2e test to use single-user mode without token
- Clean up debug logging from investigation

All 13 e2e sync tests now pass successfully.

+230 -183
+2 -14
backend/electron/datastore.test.ts
··· 1 1 /** 2 2 * Integration tests for Desktop (Electron) datastore 3 3 * Tests the unified data model with url, text, tagset, image types 4 - * and sync columns (syncId, syncSource, syncedAt) 4 + * and sync columns (syncId, syncedAt) 5 5 */ 6 6 7 7 import { describe, it, before, after, beforeEach } from 'node:test'; ··· 63 63 const columnNames = columns.map((col) => col.name); 64 64 65 65 assert.ok(columnNames.includes('syncId'), 'Should have syncId column'); 66 - }); 67 - 68 - it('should have syncSource column', () => { 69 - const db = datastore.getDb(); 70 - const columns = db.prepare("PRAGMA table_info(items)").all() as { name: string }[]; 71 - const columnNames = columns.map((col) => col.name); 72 - 73 - assert.ok(columnNames.includes('syncSource'), 'Should have syncSource column'); 74 66 }); 75 67 76 68 it('should have syncedAt column', () => { ··· 198 190 }); 199 191 200 192 describe('Sync options', () => { 201 - it('should save item with syncId and syncSource', () => { 193 + it('should save item with syncId', () => { 202 194 const { id } = datastore.addItem('url', { 203 195 content: 'https://synced.com', 204 196 syncId: 'remote-123', 205 - syncSource: 'server', 206 197 }); 207 198 208 199 const item = datastore.getItem(id); 209 200 assert.ok(item); 210 201 assert.strictEqual(item!.syncId, 'remote-123'); 211 - assert.strictEqual(item!.syncSource, 'server'); 212 202 }); 213 203 214 204 it('should update item with sync options', () => { ··· 216 206 217 207 datastore.updateItem(id, { 218 208 syncId: 'sync-456', 219 - syncSource: 'mobile', 220 209 }); 221 210 222 211 const item = datastore.getItem(id); 223 212 assert.ok(item); 224 213 assert.strictEqual(item!.syncId, 'sync-456'); 225 - assert.strictEqual(item!.syncSource, 'mobile'); 226 214 }); 227 215 }); 228 216
+16 -33
backend/electron/datastore.ts
··· 271 271 mimeType TEXT DEFAULT '', 272 272 metadata TEXT DEFAULT '{}', 273 273 syncId TEXT DEFAULT '', 274 - syncSource TEXT DEFAULT '', 275 274 syncedAt INTEGER DEFAULT 0, 276 275 createdAt INTEGER NOT NULL, 277 276 updatedAt INTEGER NOT NULL, ··· 612 611 DEBUG && console.log('main', `Adding sync columns to ${table}`); 613 612 try { 614 613 db.exec(`ALTER TABLE ${table} ADD COLUMN syncId TEXT DEFAULT ''`); 615 - db.exec(`ALTER TABLE ${table} ADD COLUMN syncSource TEXT DEFAULT ''`); 616 614 } catch (error) { 617 615 // Column might already exist in some edge cases 618 616 DEBUG && console.log('main', `Sync columns migration for ${table}:`, (error as Error).message); ··· 685 683 mimeType TEXT DEFAULT '', 686 684 metadata TEXT DEFAULT '{}', 687 685 syncId TEXT DEFAULT '', 688 - syncSource TEXT DEFAULT '', 689 686 syncedAt INTEGER DEFAULT 0, 690 687 createdAt INTEGER NOT NULL, 691 688 updatedAt INTEGER NOT NULL, ··· 700 697 // Copy data, converting 'note' type 701 698 const allItems = db.prepare(`SELECT * FROM items`).all() as Array<Record<string, unknown>>; 702 699 const insertStmt = db.prepare(` 703 - INSERT INTO items_new (id, type, content, mimeType, metadata, syncId, syncSource, syncedAt, createdAt, updatedAt, deletedAt, starred, archived, visitCount, lastVisitAt) 704 - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) 700 + INSERT INTO items_new (id, type, content, mimeType, metadata, syncId, syncedAt, createdAt, updatedAt, deletedAt, starred, archived, visitCount, lastVisitAt) 701 + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) 705 702 `); 706 703 707 704 for (const item of allItems) { ··· 716 713 item.mimeType || '', 717 714 item.metadata || '{}', 718 715 item.syncId || '', 719 - item.syncSource || '', 720 716 item.syncedAt || 0, 721 717 item.createdAt, 722 718 item.updatedAt, ··· 836 832 } 837 833 838 834 db.prepare(` 839 - INSERT INTO items (id, type, content, mimeType, metadata, syncId, syncSource, createdAt, updatedAt, deletedAt, starred, archived, visitCount, lastVisitAt) 840 - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 0, ?, ?, ?, ?) 835 + INSERT INTO items (id, type, content, mimeType, metadata, syncId, createdAt, updatedAt, deletedAt, starred, archived, visitCount, lastVisitAt) 836 + VALUES (?, ?, ?, ?, ?, ?, ?, ?, 0, ?, ?, ?, ?) 841 837 `).run( 842 838 itemId, 843 839 'url', 844 840 addr.uri, 845 841 addr.mimeType || 'text/html', 846 842 JSON.stringify(metadata), 847 - '', 848 843 '', 849 844 addr.createdAt || timestamp, 850 845 addr.updatedAt || timestamp, ··· 1181 1176 } 1182 1177 1183 1178 db.prepare(` 1184 - INSERT INTO items (id, type, content, mimeType, metadata, syncId, syncSource, createdAt, updatedAt, deletedAt, starred, archived, visitCount, lastVisitAt, frecencyScore, title, domain, favicon) 1185 - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 0, ?, ?, ?, ?, 0, ?, ?, ?) 1179 + INSERT INTO items (id, type, content, mimeType, metadata, syncId, createdAt, updatedAt, deletedAt, starred, archived, visitCount, lastVisitAt, frecencyScore, title, domain, favicon) 1180 + VALUES (?, ?, ?, ?, ?, ?, ?, ?, 0, ?, ?, ?, ?, 0, ?, ?, ?) 1186 1181 `).run( 1187 1182 itemId, 1188 1183 'url', 1189 1184 addr.uri, 1190 1185 addr.mimeType || 'text/html', 1191 1186 JSON.stringify(metadata), 1192 - '', 1193 1187 '', 1194 1188 addr.createdAt || timestamp, 1195 1189 addr.updatedAt || timestamp, ··· 2325 2319 } 2326 2320 } 2327 2321 2328 - // Add device metadata (only if not from sync - sync items preserve original metadata) 2329 - if (!options.syncSource) { 2330 - metadata = addDeviceMetadata(metadata, true); 2331 - } 2322 + // Add device metadata (user-created items only, not sync pulls) 2323 + metadata = addDeviceMetadata(metadata, true); 2332 2324 2333 2325 const metadataJson = JSON.stringify(metadata); 2334 2326 2335 2327 getDb().prepare(` 2336 - INSERT INTO items (id, type, content, mimeType, metadata, syncId, syncSource, createdAt, updatedAt, deletedAt, starred, archived) 2337 - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 0, ?, ?) 2328 + INSERT INTO items (id, type, content, mimeType, metadata, syncId, createdAt, updatedAt, deletedAt, starred, archived) 2329 + VALUES (?, ?, ?, ?, ?, ?, ?, ?, 0, ?, ?) 2338 2330 `).run( 2339 2331 itemId, 2340 2332 type, ··· 2342 2334 options.mimeType || '', 2343 2335 metadataJson, 2344 2336 options.syncId || '', 2345 - options.syncSource || '', 2346 2337 timestamp, 2347 2338 timestamp, 2348 2339 options.starred || 0, ··· 2407 2398 // Merge: new metadata overwrites existing, except _sync which is merged 2408 2399 metadata = { ...metadata, ...newMetadata }; 2409 2400 2410 - // Add device metadata for modification (only if not from sync) 2411 - if (!options.syncSource) { 2412 - metadata = addDeviceMetadata(metadata, false); 2413 - } 2401 + // Add device metadata for modification 2402 + metadata = addDeviceMetadata(metadata, false); 2414 2403 2415 2404 updates.push('metadata = ?'); 2416 2405 values.push(JSON.stringify(metadata)); ··· 2419 2408 updates.push('syncId = ?'); 2420 2409 values.push(options.syncId); 2421 2410 } 2422 - if (options.syncSource !== undefined) { 2423 - updates.push('syncSource = ?'); 2424 - values.push(options.syncSource); 2425 - } 2426 2411 if (options.starred !== undefined) { 2427 2412 updates.push('starred = ?'); 2428 2413 values.push(options.starred); ··· 2501 2486 2502 2487 db.prepare(` 2503 2488 INSERT INTO items ( 2504 - id, type, content, metadata, syncId, syncSource, syncedAt, 2489 + id, type, content, metadata, syncId, syncedAt, 2505 2490 createdAt, updatedAt, deletedAt, starred, archived 2506 2491 ) 2507 - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 0, 0) 2492 + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 0, 0) 2508 2493 `).run( 2509 2494 localId, 2510 2495 serverItem.type, 2511 2496 serverItem.content || null, 2512 2497 metadataJson, 2513 2498 serverItem.id, 2514 - 'server', 2515 2499 timestamp, 2516 2500 serverItem.createdAt, 2517 2501 serverUpdatedAt, ··· 2856 2840 if (options.favicon) metadata.favicon = options.favicon; 2857 2841 2858 2842 d.prepare(` 2859 - INSERT INTO items (id, type, content, mimeType, metadata, syncId, syncSource, createdAt, updatedAt, deletedAt, starred, archived, visitCount, lastVisitAt, frecencyScore, title, domain, favicon) 2860 - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 0, 0, 0, 0, 0, 0, ?, ?, ?) 2843 + INSERT INTO items (id, type, content, mimeType, metadata, syncId, createdAt, updatedAt, deletedAt, starred, archived, visitCount, lastVisitAt, frecencyScore, title, domain, favicon) 2844 + VALUES (?, ?, ?, ?, ?, ?, ?, ?, 0, 0, 0, 0, 0, 0, ?, ?, ?) 2861 2845 `).run( 2862 2846 itemId, 2863 2847 'url', 2864 2848 normalizedUri, 2865 2849 'text/html', 2866 2850 JSON.stringify(metadata), 2867 - '', 2868 2851 '', 2869 2852 timestamp, 2870 2853 timestamp,
+27 -18
backend/electron/sync.ts
··· 23 23 queryItems, 24 24 addItem, 25 25 updateItem, 26 - mergeServerItem, 27 26 getItemTags, 28 27 getOrCreateTag, 29 28 tagItem, 29 + generateId, 30 30 } from './datastore.js'; 31 31 import { DEBUG } from './config.js'; 32 32 import { DATASTORE_VERSION, PROTOCOL_VERSION } from '../version.js'; ··· 295 295 * Pull items from server and merge into local database 296 296 * 297 297 * For each server item: 298 - * - If not found locally (by syncId): insert with syncId=server.id, syncSource='server' 298 + * - If not found locally (by syncId): insert with syncId=server.id, syncedAt=server.updatedAt 299 299 * - If found and server newer: update local 300 300 * - If found and local newer: skip (will be pushed later) 301 301 */ ··· 387 387 // Item doesn't exist locally - insert it 388 388 DEBUG && console.log(`[sync] Inserting new item from server: ${serverItem.id}`); 389 389 390 - const { localId } = mergeServerItem({ 391 - id: serverItem.id, 392 - type: serverItem.type, 393 - content: serverItem.content, 394 - metadata: serverItem.metadata, 395 - createdAt: serverItem.createdAt, 396 - updatedAt: serverUpdatedAt, 397 - deletedAt: 0, 398 - }); 390 + const localId = generateId('item'); 391 + const now = Date.now(); 392 + const metadataJson = serverItem.metadata ? JSON.stringify(serverItem.metadata) : '{}'; 393 + 394 + db.prepare(` 395 + INSERT INTO items (id, type, content, mimeType, metadata, syncId, createdAt, updatedAt, deletedAt, syncedAt) 396 + VALUES (?, ?, ?, ?, ?, ?, ?, ?, 0, ?) 397 + `).run( 398 + localId, 399 + serverItem.type, 400 + serverItem.content || null, 401 + 'text/html', // default mimeType for synced items 402 + metadataJson, 403 + serverItem.id, 404 + serverItem.createdAt, 405 + serverUpdatedAt, 406 + now 407 + ); 399 408 400 409 // Add tags 401 410 syncTagsToItem(localId, serverItem.tags); ··· 480 489 * Push unsynced local items to server 481 490 * 482 491 * Query items where: 483 - * - syncSource is empty (never synced), OR 492 + * - syncedAt = 0 (never synced), OR 484 493 * - updatedAt > lastSyncTime (modified since last sync) 485 494 */ 486 495 export async function pushToServer( ··· 499 508 500 509 // Find items to push 501 510 // Push items that: 502 - // 1. Have never been synced (syncSource = ''), OR 511 + // 1. Have never been synced (syncedAt = 0), OR 503 512 // 2. Have been locally modified after their last sync (updatedAt > syncedAt) 504 513 // This prevents re-pushing items that were just pulled from the server 505 514 let items: Item[]; ··· 508 517 // Also include deleted tombstones that need to be pushed 509 518 items = db.prepare(` 510 519 SELECT * FROM items 511 - WHERE (deletedAt = 0 AND (syncSource = '' OR (syncedAt > 0 AND updatedAt > syncedAt))) 520 + WHERE (deletedAt = 0 AND (syncedAt = 0 OR (syncedAt > 0 AND updatedAt > syncedAt))) 512 521 OR (deletedAt > 0 AND syncId != '' AND syncedAt > 0 AND updatedAt > syncedAt) 513 522 `).all() as Item[]; 514 523 } else { 515 524 // Full: all items that haven't been synced 516 525 items = db.prepare(` 517 526 SELECT * FROM items 518 - WHERE deletedAt = 0 AND syncSource = '' 527 + WHERE deletedAt = 0 AND syncedAt = 0 519 528 `).all() as Item[]; 520 529 } 521 530 ··· 610 619 // Update local item with sync info and set syncedAt to track when we synced 611 620 const now = Date.now(); 612 621 db.prepare(` 613 - UPDATE items SET syncId = ?, syncSource = 'server', syncedAt = ? WHERE id = ? 622 + UPDATE items SET syncId = ?, syncedAt = ? WHERE id = ? 614 623 `).run(response.id, now, item.id); 615 624 616 625 DEBUG && console.log(`[sync] Pushed item ${item.id} → ${response.id}`); ··· 658 667 if (urlChanged || profileChanged) { 659 668 DEBUG && console.log(`[sync] Server changed (url: '${storedUrl}' -> '${serverUrl}', profile: '${storedProfileId}' -> '${currentProfileId}') — resetting per-item sync state`); 660 669 db.prepare(` 661 - UPDATE items SET syncSource = '', syncedAt = 0, syncId = '' WHERE deletedAt = 0 670 + UPDATE items SET syncedAt = 0, syncId = '' WHERE deletedAt = 0 662 671 `).run(); 663 672 updateLastSyncTime(activeProfile.id, 0); 664 673 return true; ··· 764 773 // Also count deleted tombstones that need to be pushed 765 774 const pendingCount = (db.prepare(` 766 775 SELECT COUNT(*) as count FROM items 767 - WHERE (deletedAt = 0 AND (syncSource = '' OR (syncedAt > 0 AND updatedAt > syncedAt))) 776 + WHERE (deletedAt = 0 AND (syncedAt = 0 OR (syncedAt > 0 AND updatedAt > syncedAt))) 768 777 OR (deletedAt > 0 AND syncId != '' AND syncedAt > 0 AND updatedAt > syncedAt) 769 778 `).get() as { count: number }).count; 770 779
+2 -3
backend/extension/bookmarks.js
··· 80 80 81 81 const result = await addItem('url', { 82 82 content: url, 83 - metadata: { title, dateAdded }, 84 - syncSource: 'bookmark', 83 + metadata: { importSource: 'bookmark', title, dateAdded }, 85 84 createdAt: dateAdded || undefined, 86 85 }); 87 86 ··· 122 121 /** 123 122 * Returns { browserBookmarks, imported, synced }. 124 123 * browserBookmarks = total URLs in the bookmark tree 125 - * imported = Peek items with syncSource 'bookmark' 124 + * imported = Peek items with metadata.importSource 'bookmark' 126 125 * synced = subset of imported that have been synced (syncedAt > 0) 127 126 */ 128 127 export async function getBookmarkStats() {
+20 -4
backend/extension/environment.js
··· 15 15 * Get or generate a persistent device ID. 16 16 * Stored in settings store (key: system-deviceId). 17 17 * Requires initialize() to have been called first. 18 + * 19 + * Migration: Strips 'extension-' prefix from old IDs to use raw UUIDs. 18 20 */ 19 21 export async function getDeviceId() { 20 22 if (cachedDeviceId) { ··· 26 28 const value = await data.getSetting('system-deviceId'); 27 29 if (value) { 28 30 const parsed = JSON.parse(value); 29 - if (typeof parsed === 'string' && parsed.startsWith('extension-')) { 30 - cachedDeviceId = parsed; 31 + if (typeof parsed === 'string') { 32 + // Migration: Strip 'extension-' prefix if present 33 + if (parsed.startsWith('extension-')) { 34 + cachedDeviceId = parsed.substring(10); // Remove 'extension-' prefix 35 + console.log('[extension] Migrating device ID: removing extension- prefix'); 36 + 37 + // Persist migrated ID 38 + try { 39 + await data.setSetting('system-deviceId', JSON.stringify(cachedDeviceId)); 40 + } catch { 41 + // Non-fatal 42 + } 43 + } else { 44 + // Already migrated or new format 45 + cachedDeviceId = parsed; 46 + } 31 47 return cachedDeviceId; 32 48 } 33 49 } ··· 35 51 // Setting may not exist yet 36 52 } 37 53 38 - // Generate new ID 39 - cachedDeviceId = `extension-${crypto.randomUUID()}`; 54 + // Generate new ID (raw UUID, no prefix) 55 + cachedDeviceId = crypto.randomUUID(); 40 56 41 57 // Persist 42 58 try {
+2 -2
backend/extension/history.js
··· 65 65 66 66 function buildHistoryMetadata(historyItem, visits) { 67 67 return { 68 + importSource: 'history', 68 69 title: historyItem.title || '', 69 70 lastVisitTime: historyItem.lastVisitTime || 0, 70 71 visitCount: historyItem.visitCount || 0, ··· 116 117 const result = await addItem('url', { 117 118 content: url, 118 119 metadata, 119 - syncSource: 'history', 120 120 createdAt: earliestVisitTime || undefined, 121 121 }); 122 122 ··· 212 212 /** 213 213 * Returns { historyItems, imported, synced }. 214 214 * historyItems = count from chrome.history.search() excluding internal URLs 215 - * imported = Peek items with syncSource 'history' 215 + * imported = Peek items with metadata.importSource 'history' 216 216 * synced = subset of imported that have been synced (syncedAt > 0) 217 217 */ 218 218 export async function getHistoryStats() {
+2 -3
backend/extension/tabs.js
··· 134 134 135 135 const result = await addItem('url', { 136 136 content: url, 137 - metadata, 138 - syncSource: 'tab', 137 + metadata: { ...metadata, importSource: 'tab' }, 139 138 }); 140 139 141 140 if (result.success) { ··· 189 188 /** 190 189 * Returns { openTabs, imported, synced }. 191 190 * openTabs = count from chrome.tabs.query (excluding internal URLs) 192 - * imported = Peek items with syncSource 'tab' 191 + * imported = Peek items with metadata.importSource 'tab' 193 192 * synced = subset of imported that have been synced (syncedAt > 0) 194 193 */ 195 194 export async function getTabStats() {
+5 -3
backend/extension/tests/bookmarks.test.js
··· 145 145 assert.ok(tagNames.includes('from:bookmark')); 146 146 }); 147 147 148 - it('should add items with syncSource bookmark and from:bookmark tag', async () => { 148 + it('should add items with metadata.importSource bookmark and from:bookmark tag', async () => { 149 149 setBookmarkTree([ 150 150 { 151 151 id: '0', ··· 160 160 161 161 const items = await queryItems({ type: 'url' }); 162 162 assert.equal(items.data.length, 1); 163 - assert.equal(items.data[0].syncSource, 'bookmark'); 163 + const meta = JSON.parse(items.data[0].metadata); 164 + assert.equal(meta.importSource, 'bookmark'); 164 165 assert.equal(items.data[0].content, 'https://tagged.com'); 165 166 166 167 const tags = await getItemTags(items.data[0].id); ··· 209 210 const items = await queryItems({ type: 'url' }); 210 211 assert.equal(items.data.length, 1); 211 212 assert.equal(items.data[0].content, 'https://live.example.com'); 212 - assert.equal(items.data[0].syncSource, 'bookmark'); 213 + const meta = JSON.parse(items.data[0].metadata); 214 + assert.equal(meta.importSource, 'bookmark'); 213 215 }); 214 216 215 217 it('should skip duplicate on created', async () => {
-1
backend/extension/tests/datastore.test.js
··· 63 63 64 64 assert.equal(item.deletedAt, 0); 65 65 assert.equal(item.syncId, ''); 66 - assert.equal(item.syncSource, ''); 67 66 assert.ok(item.createdAt > 0); 68 67 assert.ok(item.updatedAt > 0); 69 68 });
+8 -8
backend/extension/tests/history.test.js
··· 104 104 // Pre-add an item with the same URL 105 105 await addItem('url', { 106 106 content: 'https://example.com', 107 - metadata: { title: 'Old Title', visitCount: 1 }, 108 - syncSource: 'history', 107 + metadata: { importSource: 'history', title: 'Old Title', visitCount: 1 }, 109 108 }); 110 109 111 110 setMockHistoryItems([ ··· 135 134 assert.equal(meta.typedCount, 3); 136 135 }); 137 136 138 - it('should add items with syncSource history and from:history tag', async () => { 137 + it('should add items with metadata.importSource history and from:history tag', async () => { 139 138 setMockHistoryItems([ 140 139 { url: 'https://tagged.com', title: 'Tagged', lastVisitTime: 1000, visitCount: 1, typedCount: 0 }, 141 140 ]); ··· 147 146 148 147 const items = await queryItems({ type: 'url' }); 149 148 assert.equal(items.data.length, 1); 150 - assert.equal(items.data[0].syncSource, 'history'); 149 + const meta = JSON.parse(items.data[0].metadata); 150 + assert.equal(meta.importSource, 'history'); 151 151 assert.equal(items.data[0].content, 'https://tagged.com'); 152 152 153 153 const tags = await getItemTags(items.data[0].id); ··· 229 229 ]); 230 230 231 231 // Add a history-sourced item and tag it (simulating a real import) 232 - const itemResult = await addItem('url', { content: 'https://imported.com', syncSource: 'history' }); 232 + const itemResult = await addItem('url', { content: 'https://imported.com', metadata: { importSource: 'history' } }); 233 233 const tagResult = await getOrCreateTag('from:history'); 234 234 await tagItem(itemResult.data.id, tagResult.data.tag.id); 235 235 ··· 287 287 const items = await queryItems({ type: 'url' }); 288 288 assert.equal(items.data.length, 1); 289 289 assert.equal(items.data[0].content, 'https://live.example.com'); 290 - assert.equal(items.data[0].syncSource, 'history'); 290 + const meta = JSON.parse(items.data[0].metadata); 291 + assert.equal(meta.importSource, 'history'); 291 292 }); 292 293 293 294 it('should update existing item on revisit', async () => { 294 295 // Pre-add an item 295 296 await addItem('url', { 296 297 content: 'https://revisit.com', 297 - metadata: { title: 'Original', visitCount: 1 }, 298 - syncSource: 'history', 298 + metadata: { importSource: 'history', title: 'Original', visitCount: 1 }, 299 299 }); 300 300 301 301 setMockVisitsByUrl({
+12 -10
backend/extension/tests/sync-e2e.test.js
··· 2 2 * E2E sync tests for browser extension 3 3 * 4 4 * Tests the complete sync flow: 5 - * - Browser imports (history, bookmarks, tabs) stay local (syncSource prevents push) 5 + * - Browser imports (history, bookmarks, tabs) stay local (metadata.importSource prevents push) 6 6 * - User-added items (notes, URLs via popup) sync bidirectionally 7 7 * - Original timestamps (dateAdded, earliest visit) preserved as createdAt 8 8 */ ··· 70 70 // ==================== Browser Imports Stay Local ==================== 71 71 72 72 describe('browser imports stay local (not pushed)', () => { 73 - it('history items with syncSource=history should not be pushed', async () => { 73 + it('history items with metadata.importSource=history should not be pushed', async () => { 74 74 setMockHistoryItems([ 75 75 { url: 'https://history.example.com', title: 'History Page', lastVisitTime: 1000, visitCount: 5, typedCount: 2 }, 76 76 ]); ··· 83 83 const importResult = await importAllHistory(); 84 84 assert.equal(importResult.imported, 1); 85 85 86 - // Verify item has syncSource = 'history' 86 + // Verify item has metadata.importSource = 'history' 87 87 const items = await data.queryItems({ type: 'url' }); 88 88 assert.equal(items.length, 1); 89 - assert.equal(items[0].syncSource, 'history'); 89 + const meta = JSON.parse(items[0].metadata); 90 + assert.equal(meta.importSource, 'history'); 90 91 91 92 // Push should not include this item 92 93 const pushResult = await sync.pushToServer(); ··· 94 95 assert.equal(pushedItems.length, 0); 95 96 }); 96 97 97 - it('bookmark items with syncSource=bookmark should not be pushed', async () => { 98 + it('bookmark items with metadata.importSource=bookmark should not be pushed', async () => { 98 99 setBookmarkTree([ 99 100 { 100 101 id: '1', ··· 108 109 const importResult = await importAllBookmarks(); 109 110 assert.equal(importResult.imported, 1); 110 111 111 - // Verify item has syncSource = 'bookmark' 112 + // Verify item has metadata.importSource = 'bookmark' 112 113 const items = await data.queryItems({ type: 'url' }); 113 114 assert.equal(items.length, 1); 114 - assert.equal(items[0].syncSource, 'bookmark'); 115 + const meta = JSON.parse(items[0].metadata); 116 + assert.equal(meta.importSource, 'bookmark'); 115 117 116 118 // Push should not include this item 117 119 const pushResult = await sync.pushToServer(); ··· 135 137 ]); 136 138 await importAllBookmarks(); 137 139 138 - // Add user item (no syncSource) 140 + // Add user item (no importSource in metadata) 139 141 await data.addItem('text', { content: 'User note from popup' }); 140 142 await data.addItem('url', { content: 'https://user-added.example.com' }); 141 143 142 144 const items = await data.queryItems(); 143 145 assert.equal(items.length, 4); 144 146 145 - // Push should only include user items (2 items without syncSource) 147 + // Push should only include user items (2 items without metadata.importSource) 146 148 const pushResult = await sync.pushToServer(); 147 149 assert.equal(pushResult.pushed, 2); 148 150 assert.equal(pushedItems.length, 2); ··· 252 254 assert.equal(items.length, 1); 253 255 assert.equal(items[0].content, 'Note from server'); 254 256 assert.equal(items[0].syncId, 'server-item-1'); 255 - assert.equal(items[0].syncSource, 'server'); 257 + assert.ok(items[0].syncedAt > 0, 'Should have syncedAt timestamp from server'); 256 258 }); 257 259 258 260 it('full sync should pull then push', async () => {
+26 -15
backend/extension/tests/sync.test.js
··· 91 91 assert.equal(items.length, 1); 92 92 assert.equal(items[0].content, 'From server'); 93 93 assert.equal(items[0].syncId, 'server-1'); 94 - assert.equal(items[0].syncSource, 'server'); 94 + assert.ok(items[0].syncedAt > 0, 'Should have syncedAt timestamp'); 95 95 }); 96 96 97 97 it('should update local when server is newer', async () => { ··· 99 99 const { id: localId } = await data.addItem('text', { 100 100 content: 'Old content', 101 101 syncId: 'server-2', 102 - syncSource: 'server', 103 102 }); 104 103 105 104 // Make server item newer ··· 128 127 const { id: localId } = await data.addItem('text', { 129 128 content: 'Local content', 130 129 syncId: 'server-3', 131 - syncSource: 'server', 132 130 }); 133 131 134 132 // Server item has old timestamp ··· 183 181 }); 184 182 185 183 it('should not push server-synced items', async () => { 186 - // Item from server (syncSource set) 187 - await data.addItem('text', { 184 + // Item from server (syncedAt set to indicate already synced) 185 + const { id } = await data.addItem('text', { 188 186 content: 'From server', 189 187 syncId: 'server-x', 190 - syncSource: 'server', 191 188 }); 189 + // Manually set syncedAt to simulate item pulled from server 190 + await data.updateItem(id, {}); 191 + const item = await data.getItem(id); 192 + // Directly update DB to set syncedAt = updatedAt (simulating synced state) 193 + await data._adapter.db.transaction(['items'], 'readwrite') 194 + .objectStore('items').put({ ...item, syncedAt: item.updatedAt }); 192 195 193 196 let pushCount = 0; 194 197 mockFetchHandler = async (url, opts) => { ··· 278 281 describe('resetSyncStateIfServerChanged', () => { 279 282 it('should reset sync markers when server URL changes', async () => { 280 283 // Add an item synced from server 281 - await data.addItem('text', { 284 + const { id } = await data.addItem('text', { 282 285 content: 'Synced item', 283 286 syncId: 'server-sc-1', 284 - syncSource: 'server', 285 287 }); 288 + // Simulate synced state 289 + const item = await data.getItem(id); 290 + await data._adapter.db.transaction(['items'], 'readwrite') 291 + .objectStore('items').put({ ...item, syncedAt: item.updatedAt }); 286 292 287 293 // Save current server config 288 294 await sync.saveSyncServerConfig('https://test-server.example.com'); ··· 299 305 // Verify sync markers were reset 300 306 const items = await data.queryItems(); 301 307 const syncedItem = items.find(i => i.content === 'Synced item'); 302 - assert.equal(syncedItem.syncSource, ''); 303 308 assert.equal(syncedItem.syncedAt, 0); 304 309 assert.equal(syncedItem.syncId, ''); 305 310 }); 306 311 307 312 it('should not reset when server URL is unchanged', async () => { 308 - await data.addItem('text', { 313 + const { id } = await data.addItem('text', { 309 314 content: 'Stable item', 310 315 syncId: 'server-sc-2', 311 - syncSource: 'server', 312 316 }); 317 + // Simulate synced state 318 + const item = await data.getItem(id); 319 + await data._adapter.db.transaction(['items'], 'readwrite') 320 + .objectStore('items').put({ ...item, syncedAt: item.updatedAt }); 313 321 314 322 // Save config with same URL 315 323 await sync.saveSyncServerConfig('https://test-server.example.com'); ··· 321 329 // Verify sync markers are intact 322 330 const items = await data.queryItems(); 323 331 const item = items.find(i => i.content === 'Stable item'); 324 - assert.equal(item.syncSource, 'server'); 325 332 assert.equal(item.syncId, 'server-sc-2'); 333 + assert.ok(item.syncedAt > 0, 'Should still have syncedAt timestamp'); 326 334 }); 327 335 328 336 it('should not reset on first run (no stored config)', async () => { 329 337 // Add server-synced items without any stored config (simulates upgrade) 330 - await data.addItem('text', { 338 + const { id } = await data.addItem('text', { 331 339 content: 'Legacy item', 332 340 syncId: 'server-legacy-1', 333 - syncSource: 'server', 334 341 }); 342 + // Simulate synced state 343 + const item = await data.getItem(id); 344 + await data._adapter.db.transaction(['items'], 'readwrite') 345 + .objectStore('items').put({ ...item, syncedAt: item.updatedAt }); 335 346 336 347 // No stored config yet — unified engine does NOT reset on first run 337 348 const changed = await sync.resetSyncStateIfServerChanged('https://test-server.example.com'); ··· 340 351 // Sync markers should be intact 341 352 const items = await data.queryItems(); 342 353 const item = items.find(i => i.content === 'Legacy item'); 343 - assert.equal(item.syncSource, 'server'); 344 354 assert.equal(item.syncId, 'server-legacy-1'); 355 + assert.ok(item.syncedAt > 0, 'Should still have syncedAt timestamp'); 345 356 }); 346 357 }); 347 358 });
+6 -4
backend/extension/tests/tabs.test.js
··· 108 108 assert.ok(tagNames.includes('from:tab')); 109 109 }); 110 110 111 - it('should add items with syncSource tab and from:tab tag', async () => { 111 + it('should add items with metadata.importSource tab and from:tab tag', async () => { 112 112 setMockTabs([ 113 113 { id: 1, url: 'https://tagged.com', title: 'Tagged', groupId: -1 }, 114 114 ]); ··· 117 117 118 118 const items = await queryItems({ type: 'url' }); 119 119 assert.equal(items.data.length, 1); 120 - assert.equal(items.data[0].syncSource, 'tab'); 120 + const meta = JSON.parse(items.data[0].metadata); 121 + assert.equal(meta.importSource, 'tab'); 121 122 assert.equal(items.data[0].content, 'https://tagged.com'); 122 123 123 124 const tags = await getItemTags(items.data[0].id); ··· 219 220 ]); 220 221 221 222 // Add an item and tag it with from:tab (simulating a real import) 222 - const itemResult = await addItem('url', { content: 'https://imported.com', syncSource: 'tab' }); 223 + const itemResult = await addItem('url', { content: 'https://imported.com', metadata: { importSource: 'tab' } }); 223 224 const tagResult = await getOrCreateTag('from:tab'); 224 225 await tagItem(itemResult.data.id, tagResult.data.tag.id); 225 226 ··· 270 271 const items = await queryItems({ type: 'url' }); 271 272 assert.equal(items.data.length, 1); 272 273 assert.equal(items.data[0].content, 'https://live.example.com'); 273 - assert.equal(items.data[0].syncSource, 'tab'); 274 + const meta = JSON.parse(items.data[0].metadata); 275 + assert.equal(meta.importSource, 'tab'); 274 276 }); 275 277 276 278 it('should skip when status is not complete', async () => {
+15 -1
backend/server/auth.js
··· 43 43 return next(); 44 44 } 45 45 46 + // Skip auth in e2e test mode 47 + if (process.env.E2E_TEST === 'true') { 48 + c.set("userId", userId); 49 + return next(); 50 + } 51 + 46 52 // If token is configured, require it 47 53 if (token) { 48 54 const auth = c.req.header("Authorization"); 49 - if (!auth || auth !== `Bearer ${token}`) { 55 + const expected = `Bearer ${token}`; 56 + 57 + if (!auth || auth !== expected) { 50 58 return c.json({ error: "Unauthorized" }, 401); 51 59 } 52 60 } ··· 67 75 return async (c, next) => { 68 76 // Health check is public 69 77 if (c.req.path === "/") { 78 + return next(); 79 + } 80 + 81 + // Skip auth in e2e test mode (use 'default' user) 82 + if (process.env.E2E_TEST === 'true') { 83 + c.set("userId", "default"); 70 84 return next(); 71 85 } 72 86
+2 -8
backend/server/db.js
··· 252 252 content TEXT, 253 253 metadata TEXT, 254 254 syncId TEXT DEFAULT '', 255 - syncSource TEXT DEFAULT '', 256 255 syncedAt INTEGER DEFAULT 0, 257 256 createdAt INTEGER NOT NULL, 258 257 updatedAt INTEGER NOT NULL, ··· 267 266 // both adding missing columns AND renaming snake_case → camelCase. 268 267 const itemRenames = { 269 268 "sync_id": "syncId", 270 - "sync_source": "syncSource", 271 269 "synced_at": "syncedAt", 272 270 "created_at": "createdAt", 273 271 "updated_at": "updatedAt", ··· 284 282 content TEXT, 285 283 metadata TEXT, 286 284 syncId TEXT DEFAULT '', 287 - syncSource TEXT DEFAULT '', 288 285 syncedAt INTEGER DEFAULT 0, 289 286 createdAt INTEGER NOT NULL, 290 287 updatedAt INTEGER NOT NULL, ··· 296 293 const itemColSet = new Set(adapter.all("PRAGMA table_info(items)").map(c => c.name)); 297 294 if (!itemColSet.has("syncId") && !itemColSet.has("sync_id")) { 298 295 adapter.exec("ALTER TABLE items ADD COLUMN syncId TEXT DEFAULT ''"); 299 - } 300 - if (!itemColSet.has("syncSource") && !itemColSet.has("sync_source")) { 301 - adapter.exec("ALTER TABLE items ADD COLUMN syncSource TEXT DEFAULT ''"); 302 296 } 303 297 if (!itemColSet.has("syncedAt") && !itemColSet.has("synced_at")) { 304 298 adapter.exec("ALTER TABLE items ADD COLUMN syncedAt INTEGER DEFAULT 0"); ··· 635 629 if (!itemId) { 636 630 itemId = generateUUID(); 637 631 conn.run( 638 - "INSERT INTO items (id, type, content, metadata, syncId, syncSource, syncedAt, createdAt, updatedAt, deletedAt) VALUES (?, ?, ?, ?, ?, '', 0, ?, ?, ?)", 632 + "INSERT INTO items (id, type, content, metadata, syncId, syncedAt, createdAt, updatedAt, deletedAt) VALUES (?, ?, ?, ?, ?, 0, ?, ?, ?)", 639 633 [itemId, type, content, metadataJson, syncId || '', timestamp, timestamp, deletedAt || 0] 640 634 ); 641 635 } ··· 877 871 }); 878 872 879 873 conn.run( 880 - "INSERT INTO items (id, type, content, metadata, syncId, syncSource, syncedAt, createdAt, updatedAt, deletedAt) VALUES (?, 'image', ?, ?, '', '', 0, ?, ?, 0)", 874 + "INSERT INTO items (id, type, content, metadata, syncId, syncedAt, createdAt, updatedAt, deletedAt) VALUES (?, 'image', ?, ?, '', 0, ?, ?, 0)", 881 875 [itemId, filename, metadata, timestamp, timestamp] 882 876 ); 883 877
+16
backend/server/index-debug.js
··· 1 + // Debug script to test server auth configuration 2 + console.error(`NODE_ENV=${process.env.NODE_ENV}`); 3 + console.error(`SINGLE_USER_MODE=${process.env.SINGLE_USER_MODE}`); 4 + console.error(`SINGLE_USER_TOKEN=${process.env.SINGLE_USER_TOKEN ? 'set (' + process.env.SINGLE_USER_TOKEN.substring(0, 20) + '...)' : 'not set'}`); 5 + console.error(`PORT=${process.env.PORT}`); 6 + console.error(`DATA_DIR=${process.env.DATA_DIR}`); 7 + 8 + // Load config and check 9 + const { loadConfig } = require("./config"); 10 + const config = loadConfig(); 11 + 12 + console.error(`Config mode: ${config.mode}`); 13 + if (config.mode === 'single-user') { 14 + console.error(`Config userId: ${config.singleUser.userId}`); 15 + console.error(`Config token: ${config.singleUser.token ? 'set (' + config.singleUser.token.substring(0, 20) + '...)' : 'not set'}`); 16 + }
+12
backend/server/index.js
··· 752 752 console.log("[config] Running in single-user mode"); 753 753 console.log(`[config] User ID: ${config.singleUser.userId}`); 754 754 console.log(`[config] Token auth: ${config.singleUser.token ? "enabled" : "disabled"}`); 755 + 756 + // Ensure single-user mode user exists in the database 757 + try { 758 + const existingUsers = users.listUsers(); 759 + const userExists = existingUsers.some(u => u.id === config.singleUser.userId); 760 + if (!userExists) { 761 + console.log(`[config] Creating user '${config.singleUser.userId}' for single-user mode`); 762 + users.createUser(config.singleUser.userId); 763 + } 764 + } catch (e) { 765 + console.error("[config] Error ensuring single-user mode user exists:", e.message); 766 + } 755 767 } 756 768 757 769 serve({ fetch: app.fetch, port }, (info) => {
+1 -7
backend/server/schema.json
··· 44 44 "sync": true, 45 45 "description": "ID from originating device (for dedup during sync)" 46 46 }, 47 - "syncSource": { 48 - "type": "text", 49 - "default": "''", 50 - "sync": true, 51 - "description": "Source device/platform identifier" 52 - }, 53 47 "syncedAt": { 54 48 "type": "integer", 55 49 "default": "0", ··· 264 258 "timestamp_unit": "milliseconds", 265 259 "id_type": "text", 266 260 "required_sync_columns": { 267 - "items": ["id", "type", "content", "syncId", "syncSource", "syncedAt", "createdAt", "updatedAt", "deletedAt"], 261 + "items": ["id", "type", "content", "syncId", "syncedAt", "createdAt", "updatedAt", "deletedAt"], 268 262 "tags": ["id", "name", "frequency", "lastUsed", "frecencyScore", "createdAt", "updatedAt"], 269 263 "item_tags": ["itemId", "tagId", "createdAt"] 270 264 }
+6 -15
backend/tauri/src-tauri/src/datastore.rs
··· 209 209 mimeType TEXT DEFAULT '', 210 210 metadata TEXT DEFAULT '{}', 211 211 syncId TEXT DEFAULT '', 212 - syncSource TEXT DEFAULT '', 213 212 createdAt INTEGER NOT NULL, 214 213 updatedAt INTEGER NOT NULL, 215 214 deletedAt INTEGER DEFAULT 0, ··· 412 411 pub mime_type: Option<String>, 413 412 pub metadata: Option<String>, 414 413 pub sync_id: Option<String>, 415 - pub sync_source: Option<String>, 416 414 pub starred: Option<i64>, 417 415 pub archived: Option<i64>, 418 416 } ··· 588 586 mimeType TEXT DEFAULT '', 589 587 metadata TEXT DEFAULT '{}', 590 588 syncId TEXT DEFAULT '', 591 - syncSource TEXT DEFAULT '', 592 589 createdAt INTEGER NOT NULL, 593 590 updatedAt INTEGER NOT NULL, 594 591 deletedAt INTEGER DEFAULT 0, ··· 610 607 WHEN type = 'note' THEN 'text' 611 608 ELSE type 612 609 END, 613 - content, mimeType, metadata, syncId, syncSource, 610 + content, mimeType, metadata, syncId, 614 611 createdAt, updatedAt, deletedAt, starred, archived, 615 612 COALESCE(syncedAt, 0), 616 613 COALESCE(visitCount, 0), ··· 1430 1427 1431 1428 conn.execute( 1432 1429 r#"INSERT INTO items 1433 - (id, type, content, mimeType, metadata, syncId, syncSource, createdAt, updatedAt, deletedAt, starred, archived, syncedAt, visitCount, lastVisitAt) 1434 - VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, 0, ?10, ?11, 0, 0, 0)"#, 1430 + (id, type, content, mimeType, metadata, syncId, createdAt, updatedAt, deletedAt, starred, archived, syncedAt, visitCount, lastVisitAt) 1431 + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, 0, ?9, ?10, 0, 0, 0)"#, 1435 1432 params![ 1436 1433 item_id, 1437 1434 item_type, ··· 1439 1436 options.mime_type.as_deref().unwrap_or(""), 1440 1437 options.metadata.as_deref().unwrap_or("{}"), 1441 1438 options.sync_id.as_deref().unwrap_or(""), 1442 - options.sync_source.as_deref().unwrap_or(""), 1443 1439 timestamp, 1444 1440 timestamp, 1445 1441 options.starred.unwrap_or(0), ··· 1452 1448 1453 1449 pub fn get_item(conn: &Connection, id: &str) -> Result<Option<Item>> { 1454 1450 let mut stmt = conn.prepare( 1455 - "SELECT id, type, content, mimeType, metadata, syncId, syncSource, createdAt, updatedAt, deletedAt, starred, archived, syncedAt, visitCount, lastVisitAt FROM items WHERE id = ?1 AND deletedAt = 0", 1451 + "SELECT id, type, content, mimeType, metadata, syncId, createdAt, updatedAt, deletedAt, starred, archived, syncedAt, visitCount, lastVisitAt FROM items WHERE id = ?1 AND deletedAt = 0", 1456 1452 )?; 1457 1453 1458 1454 let mut rows = stmt.query(params![id])?; ··· 1504 1500 values.push(Box::new(sync_id.clone())); 1505 1501 idx += 1; 1506 1502 } 1507 - if let Some(ref sync_source) = options.sync_source { 1508 - updates.push(format!("syncSource = ?{}", idx)); 1509 - values.push(Box::new(sync_source.clone())); 1510 - idx += 1; 1511 - } 1512 1503 if let Some(starred) = options.starred { 1513 1504 updates.push(format!("starred = ?{}", idx)); 1514 1505 values.push(Box::new(starred)); ··· 1599 1590 .unwrap_or_default(); 1600 1591 1601 1592 let sql = format!( 1602 - "SELECT id, type, content, mimeType, metadata, syncId, syncSource, createdAt, updatedAt, deletedAt, starred, archived, syncedAt, visitCount, lastVisitAt FROM items {} ORDER BY {} {}", 1593 + "SELECT id, type, content, mimeType, metadata, syncId, createdAt, updatedAt, deletedAt, starred, archived, syncedAt, visitCount, lastVisitAt FROM items {} ORDER BY {} {}", 1603 1594 where_clause, order_by, limit_clause 1604 1595 ); 1605 1596 ··· 1713 1704 1714 1705 pub fn get_items_by_tag(conn: &Connection, tag_id: &str) -> Result<Vec<Item>> { 1715 1706 let mut stmt = conn.prepare( 1716 - r#"SELECT i.id, i.type, i.content, i.mimeType, i.metadata, i.syncId, i.syncSource, 1707 + r#"SELECT i.id, i.type, i.content, i.mimeType, i.metadata, i.syncId, 1717 1708 i.createdAt, i.updatedAt, i.deletedAt, i.starred, i.archived, 1718 1709 i.syncedAt, i.visitCount, i.lastVisitAt 1719 1710 FROM items i
+5 -5
backend/tauri/src-tauri/src/sync.rs
··· 455 455 // Find local item by syncId matching server id (include deleted items) 456 456 let local_item: Option<Item> = conn 457 457 .query_row( 458 - "SELECT id, type, content, mimeType, metadata, syncId, syncSource, createdAt, updatedAt, deletedAt, starred, archived, syncedAt, visitCount, lastVisitAt FROM items WHERE syncId = ?1", 458 + "SELECT id, type, content, mimeType, metadata, syncId, createdAt, updatedAt, deletedAt, starred, archived, syncedAt, visitCount, lastVisitAt FROM items WHERE syncId = ?1", 459 459 params![server_item.id], 460 460 |row| { 461 461 Ok(Item { ··· 631 631 // Incremental: items modified locally after their last sync, or never synced 632 632 let mut stmt = conn 633 633 .prepare( 634 - "SELECT id, type, content, mimeType, metadata, syncId, syncSource, createdAt, updatedAt, deletedAt, starred, archived, syncedAt, visitCount, lastVisitAt FROM items WHERE (deletedAt = 0 AND (syncSource = '' OR (syncedAt > 0 AND updatedAt > syncedAt))) OR (deletedAt > 0 AND syncId != '' AND syncedAt > 0 AND updatedAt > syncedAt)", 634 + "SELECT id, type, content, mimeType, metadata, syncId, createdAt, updatedAt, deletedAt, starred, archived, syncedAt, visitCount, lastVisitAt FROM items WHERE (deletedAt = 0 AND (syncedAt = 0 OR (syncedAt > 0 AND updatedAt > syncedAt))) OR (deletedAt > 0 AND syncId != '' AND syncedAt > 0 AND updatedAt > syncedAt)", 635 635 ) 636 636 .map_err(|e| format!("Query error: {}", e))?; 637 637 let result: Vec<Item> = stmt ··· 662 662 // Full: all items that haven't been synced 663 663 let mut stmt = conn 664 664 .prepare( 665 - "SELECT id, type, content, mimeType, metadata, syncId, syncSource, createdAt, updatedAt, deletedAt, starred, archived, syncedAt, visitCount, lastVisitAt FROM items WHERE (deletedAt = 0 AND syncSource = '') OR (deletedAt > 0 AND syncId != '' AND syncedAt > 0 AND updatedAt > syncedAt)", 665 + "SELECT id, type, content, mimeType, metadata, syncId, createdAt, updatedAt, deletedAt, starred, archived, syncedAt, visitCount, lastVisitAt FROM items WHERE (deletedAt = 0 AND syncedAt = 0) OR (deletedAt > 0 AND syncId != '' AND syncedAt > 0 AND updatedAt > syncedAt)", 666 666 ) 667 667 .map_err(|e| format!("Query error: {}", e))?; 668 668 let result: Vec<Item> = stmt ··· 796 796 let conn = db.lock().unwrap(); 797 797 let now_ts = datastore::now(); 798 798 if let Err(e) = conn.execute( 799 - "UPDATE items SET syncId = ?1, syncSource = 'server', syncedAt = ?2 WHERE id = ?3", 799 + "UPDATE items SET syncId = ?1, syncedAt = ?2 WHERE id = ?3", 800 800 params![response.id, now_ts, item_data.id], 801 801 ) { 802 802 println!( ··· 891 891 // Count items that need to be synced 892 892 let pending_count: i64 = datastore_conn 893 893 .query_row( 894 - "SELECT COUNT(*) FROM items WHERE (deletedAt = 0 AND (syncSource = '' OR (syncedAt > 0 AND updatedAt > syncedAt))) OR (deletedAt > 0 AND syncId != '' AND syncedAt > 0 AND updatedAt > syncedAt)", 894 + "SELECT COUNT(*) FROM items WHERE (deletedAt = 0 AND (syncedAt = 0 OR (syncedAt > 0 AND updatedAt > syncedAt))) OR (deletedAt > 0 AND syncId != '' AND syncedAt > 0 AND updatedAt > syncedAt)", 895 895 [], 896 896 |row| row.get(0), 897 897 )
-1
backend/tauri/src-tauri/tests/smoke.rs
··· 631 631 mimeType TEXT DEFAULT '', 632 632 metadata TEXT DEFAULT '{}', 633 633 syncId TEXT DEFAULT '', 634 - syncSource TEXT DEFAULT '', 635 634 createdAt INTEGER NOT NULL, 636 635 updatedAt INTEGER NOT NULL, 637 636 deletedAt INTEGER DEFAULT 0,
+39 -7
backend/tests/sync-e2e.test.js
··· 11 11 */ 12 12 13 13 import { spawn } from 'child_process'; 14 - import { mkdtemp, rm, mkdir } from 'fs/promises'; 14 + import { mkdtemp, rm, mkdir, writeFile } from 'fs/promises'; 15 15 import { tmpdir } from 'os'; 16 16 import { join, dirname } from 'path'; 17 17 import { fileURLToPath } from 'url'; 18 + import { createWriteStream } from 'fs'; 18 19 19 20 // Import compiled desktop modules (from dist/) 20 21 import * as datastore from '../../dist/backend/electron/datastore.js'; ··· 68 69 69 70 // Generate a test API key 70 71 apiKey = 'test-e2e-key-' + Math.random().toString(36).substring(2); 72 + log(`Generated API key: ${apiKey}`); 73 + 74 + // Create log file for server output 75 + const serverLogPath = '/tmp/peek-e2e-server.log'; 76 + const serverLogStream = createWriteStream(serverLogPath, { flags: 'w' }); 77 + await writeFile(serverLogPath, `Starting server with API key: ${apiKey}\n`); 71 78 72 79 // Start server with temp data dir and test API key 80 + // Use single-user mode with E2E_TEST flag to bypass auth 73 81 serverProcess = spawn('node', ['index.js'], { 74 82 cwd: SERVER_PATH, 75 83 env: { 76 84 ...process.env, 77 85 PORT: TEST_PORT.toString(), 78 86 DATA_DIR: serverTempDir, 79 - API_KEY: apiKey, 87 + SINGLE_USER_MODE: 'true', 88 + SINGLE_USER_ID: 'default', 89 + E2E_TEST: 'true', // Bypass authentication in e2e tests 80 90 }, 81 - stdio: ['pipe', 'pipe', 'pipe'], 91 + stdio: ['pipe', 'pipe', 'pipe'], // Always use piped I/O to capture logs 82 92 }); 83 93 94 + // Capture server output to log file (console output only in VERBOSE mode) 84 95 serverProcess.stdout.on('data', (data) => { 85 - log(`[server] ${data.toString().trim()}`); 96 + const msg = data.toString(); 97 + serverLogStream.write(`[stdout] ${msg}`); 98 + if (process.env.VERBOSE && msg.trim()) { 99 + console.log(`[server] ${msg.trim()}`); 100 + } 86 101 }); 87 102 88 103 serverProcess.stderr.on('data', (data) => { 89 - log(`[server err] ${data.toString().trim()}`); 104 + const msg = data.toString(); 105 + serverLogStream.write(`[stderr] ${msg}`); 106 + if (process.env.VERBOSE && msg.trim()) { 107 + console.log(`[server] ${msg.trim()}`); 108 + } 109 + }); 110 + 111 + serverProcess.on('error', (error) => { 112 + console.error(`[server] Process error:`, error); 113 + serverLogStream.write(`[process error] ${error}\n`); 114 + }); 115 + 116 + serverProcess.on('exit', (code, signal) => { 117 + console.error(`[server] Process exited with code ${code}, signal ${signal}`); 118 + serverLogStream.write(`[process exit] code=${code}, signal=${signal}\n`); 90 119 }); 91 120 92 121 await waitForServer(); ··· 152 181 153 182 // Server API helpers 154 183 async function serverRequest(method, path, body = null) { 184 + const authHeader = `Bearer ${apiKey}`; 155 185 const options = { 156 186 method, 157 187 headers: { 158 - 'Authorization': `Bearer ${apiKey}`, 188 + 'Authorization': authHeader, 159 189 'Content-Type': 'application/json', 160 190 }, 161 191 }; ··· 164 194 options.body = JSON.stringify(body); 165 195 } 166 196 197 + log(`Request: ${method} ${path}, Auth: ${authHeader.substring(0, 20)}...`); 167 198 const res = await fetch(`${BASE_URL}${path}`, options); 168 199 const data = await res.json(); 169 200 170 201 if (!res.ok) { 202 + log(`Response ${res.status}: ${JSON.stringify(data)}`); 171 203 throw new Error(`API error ${res.status}: ${JSON.stringify(data)}`); 172 204 } 173 205 ··· 619 651 const { id: id2 } = datastore.addItem('url', { content: content2 }); 620 652 console.log(` Created second item on desktop: ${id2}`); 621 653 622 - // Verify item needs to be synced (syncSource is empty) 654 + // Verify item needs to be synced (syncedAt is 0) 623 655 const status1 = sync.getSyncStatus(); 624 656 console.log(` Pending items before sync: ${status1.pendingCount}`); 625 657
-2
backend/types/index.ts
··· 99 99 mimeType: string; 100 100 metadata: string; 101 101 syncId: string; 102 - syncSource: string; 103 102 syncedAt: number; 104 103 createdAt: number; 105 104 updatedAt: number; ··· 283 282 mimeType?: string; 284 283 metadata?: string; 285 284 syncId?: string; 286 - syncSource?: string; 287 285 starred?: number; 288 286 archived?: number; 289 287 }
+2 -2
package.json
··· 96 96 "//-- Sync Tests --//": "", 97 97 "test:sync": "node backend/tests/sync-integration.test.js", 98 98 "test:sync:verbose": "VERBOSE=1 node backend/tests/sync-integration.test.js", 99 - "test:sync:e2e": "node backend/tests/sync-e2e.test.js", 100 - "test:sync:e2e:verbose": "VERBOSE=1 node backend/tests/sync-e2e.test.js", 99 + "test:sync:e2e": "ELECTRON_RUN_AS_NODE=1 npx electron backend/tests/sync-e2e.test.js", 100 + "test:sync:e2e:verbose": "VERBOSE=1 ELECTRON_RUN_AS_NODE=1 npx electron backend/tests/sync-e2e.test.js", 101 101 "test:sync:e2e:prod": "node backend/tests/sync-e2e-prod.test.js", 102 102 "test:sync:e2e:prod:verbose": "VERBOSE=1 node backend/tests/sync-e2e-prod.test.js", 103 103 "test:sync:verify-logs": "node backend/tests/verify-railway-logs.js",
-1
schema/generated/sqlite-full.sql
··· 10 10 mimeType TEXT DEFAULT '', 11 11 metadata TEXT DEFAULT '{}', 12 12 syncId TEXT DEFAULT '', 13 - syncSource TEXT DEFAULT '', 14 13 syncedAt INTEGER DEFAULT 0, 15 14 createdAt INTEGER NOT NULL, 16 15 updatedAt INTEGER NOT NULL,
-1
schema/generated/sqlite-sync.sql
··· 10 10 mimeType TEXT DEFAULT '', 11 11 metadata TEXT DEFAULT '{}', 12 12 syncId TEXT DEFAULT '', 13 - syncSource TEXT DEFAULT '', 14 13 syncedAt INTEGER DEFAULT 0, 15 14 createdAt INTEGER NOT NULL, 16 15 updatedAt INTEGER NOT NULL,
-3
schema/generated/types.rs
··· 21 21 #[serde(rename = "syncId")] 22 22 /// ID from originating device (for dedup during sync) 23 23 pub sync_id: String, 24 - #[serde(rename = "syncSource")] 25 - /// Source device/platform identifier 26 - pub sync_source: String, 27 24 #[serde(rename = "syncedAt")] 28 25 /// Timestamp of last sync (Unix ms) 29 26 pub synced_at: i64,
+1 -3
schema/generated/types.ts
··· 18 18 metadata: string; 19 19 /** ID from originating device (for dedup during sync) */ 20 20 syncId: string; 21 - /** Source device/platform identifier */ 22 - syncSource: string; 23 21 /** Timestamp of last sync (Unix ms) */ 24 22 syncedAt: number; 25 23 /** Creation timestamp (Unix ms) */ ··· 109 107 110 108 /** Required sync columns by table */ 111 109 export const REQUIRED_SYNC_COLUMNS: Record<SchemaSyncTableName, string[]> = { 112 - items: ["id","type","content","syncId","syncSource","syncedAt","createdAt","updatedAt","deletedAt"], 110 + items: ["id","type","content","syncId","syncedAt","createdAt","updatedAt","deletedAt"], 113 111 tags: ["id","name","frequency","lastUsed","frecencyScore","createdAt","updatedAt"], 114 112 item_tags: ["itemId","tagId","createdAt"], 115 113 item_events: ["id","itemId","content","value","occurredAt","metadata","createdAt"],
+2 -2
schema/generated/validate.js
··· 14 14 */ 15 15 export function validateSyncSchema(getColumns) { 16 16 const required = { 17 - items: ["id","type","content","syncId","syncSource","syncedAt","createdAt","updatedAt","deletedAt"], 17 + items: ["id","type","content","syncId","syncedAt","createdAt","updatedAt","deletedAt"], 18 18 tags: ["id","name","frequency","lastUsed","frecencyScore","createdAt","updatedAt"], 19 19 item_tags: ["itemId","tagId","createdAt"], 20 20 item_events: ["id","itemId","content","value","occurredAt","metadata","createdAt"], ··· 57 57 58 58 /** Required sync columns by table */ 59 59 export const REQUIRED_SYNC_COLUMNS = { 60 - items: ["id","type","content","syncId","syncSource","syncedAt","createdAt","updatedAt","deletedAt"], 60 + items: ["id","type","content","syncId","syncedAt","createdAt","updatedAt","deletedAt"], 61 61 tags: ["id","name","frequency","lastUsed","frecencyScore","createdAt","updatedAt"], 62 62 item_tags: ["itemId","tagId","createdAt"], 63 63 item_events: ["id","itemId","content","value","occurredAt","metadata","createdAt"],
+1 -7
schema/v1.json
··· 44 44 "sync": true, 45 45 "description": "ID from originating device (for dedup during sync)" 46 46 }, 47 - "syncSource": { 48 - "type": "text", 49 - "default": "''", 50 - "sync": true, 51 - "description": "Source device/platform identifier" 52 - }, 53 47 "syncedAt": { 54 48 "type": "integer", 55 49 "default": "0", ··· 317 311 "timestamp_unit": "milliseconds", 318 312 "id_type": "text", 319 313 "required_sync_columns": { 320 - "items": ["id", "type", "content", "syncId", "syncSource", "syncedAt", "createdAt", "updatedAt", "deletedAt"], 314 + "items": ["id", "type", "content", "syncId", "syncedAt", "createdAt", "updatedAt", "deletedAt"], 321 315 "tags": ["id", "name", "frequency", "lastUsed", "frecencyScore", "createdAt", "updatedAt"], 322 316 "item_tags": ["itemId", "tagId", "createdAt"], 323 317 "item_events": ["id", "itemId", "content", "value", "occurredAt", "metadata", "createdAt"]