Personal save-for-later and Miniflux e-reader proxy for Xteink X4 (wip)
1
fork

Configure Feed

Select the types of activity you want to include in your feed.

feat: witness cache and jetstream

+441 -24
+5
lexicons/net/solanaceae/nightshade/feed.json
··· 15 15 "format": "uri", 16 16 "description": "Absolute URL of the feed (RSS/Atom XML endpoint)." 17 17 }, 18 + "siteUrl": { 19 + "type": "string", 20 + "format": "uri", 21 + "description": "Absolute URL of the feed's homepage/site." 22 + }, 18 23 "title": { 19 24 "type": "string", 20 25 "maxLength": 500,
+3 -2
src/server/atproto.ts
··· 24 24 return listAll<FeedRecord>(this.agent(), this.did, FEED_NSID); 25 25 } 26 26 27 - async createFeed(url: string, title?: string): Promise<void> { 27 + async createFeed(url: string, opts?: { title?: string; siteUrl?: string }): Promise<void> { 28 28 await this.agent().com.atproto.repo.createRecord({ 29 29 repo: this.did, 30 30 collection: FEED_NSID, 31 31 record: { 32 32 $type: FEED_NSID, 33 33 url, 34 - ...(title ? { title } : {}), 34 + ...(opts?.siteUrl ? { siteUrl: opts.siteUrl } : {}), 35 + ...(opts?.title ? { title: opts.title } : {}), 35 36 createdAt: new Date().toISOString(), 36 37 } satisfies FeedRecord, 37 38 });
+28
src/server/db.ts
··· 36 36 last_seen_at INTEGER NOT NULL 37 37 ); 38 38 CREATE INDEX IF NOT EXISTS idx_browser_session_did ON browser_session(did); 39 + 40 + CREATE TABLE IF NOT EXISTS cached_feed ( 41 + did TEXT NOT NULL, 42 + rkey TEXT NOT NULL, 43 + url TEXT NOT NULL, 44 + site_url TEXT, 45 + title TEXT, 46 + created_at TEXT NOT NULL, 47 + PRIMARY KEY (did, rkey) 48 + ); 49 + 50 + CREATE TABLE IF NOT EXISTS cached_save ( 51 + did TEXT NOT NULL, 52 + rkey TEXT NOT NULL, 53 + url TEXT NOT NULL, 54 + title TEXT, 55 + created_at TEXT NOT NULL, 56 + read_at TEXT, 57 + PRIMARY KEY (did, rkey) 58 + ); 59 + 60 + CREATE TABLE IF NOT EXISTS cache_meta ( 61 + did TEXT NOT NULL, 62 + collection TEXT NOT NULL, 63 + last_synced_at INTEGER NOT NULL, 64 + PRIMARY KEY (did, collection) 65 + ); 39 66 `); 40 67 instance = d; 41 68 migrateFileStores(d); ··· 84 111 } 85 112 } 86 113 } 114 +
+31 -4
src/server/index.ts
··· 12 12 import { buildOAuth } from "./oauth.js"; 13 13 import { AtprotoRepo } from "./atproto.js"; 14 14 import { Syncer } from "./sync.js"; 15 + import { RecordCache } from "./record-cache.js"; 16 + import { JetstreamListener } from "./jetstream.js"; 15 17 import { authRoutes } from "./routes-auth.js"; 16 18 import { apiRoutes } from "./routes-api.js"; 17 19 import { deviceRoutes } from "./routes-device.js"; ··· 20 22 const bodies = new BodyCache(); 21 23 const deviceTokens = new DeviceTokenStore(config.dataDir); 22 24 const oauth = await buildOAuth(); 25 + const cache = new RecordCache(); 23 26 24 27 const app = new Hono(); 25 28 app.use(logger()); ··· 35 38 } 36 39 37 40 app.route("/auth", authRoutes(oauth, deviceTokens)); 38 - app.route("/api", apiRoutes(oauth, mf)); 39 - app.route("/device", deviceRoutes(oauth, mf, bodies, deviceTokens)); 41 + app.route("/api", apiRoutes(oauth, mf, cache)); 42 + app.route("/device", deviceRoutes(oauth, mf, bodies, deviceTokens, cache)); 40 43 41 44 const packageRoot = resolve(import.meta.dirname, "../.."); 42 45 const publicDir = resolve(packageRoot, "dist/public"); ··· 50 53 return c.html(indexHtml); 51 54 }); 52 55 56 + // Populate cache on startup for all stored DIDs. 57 + (async () => { 58 + for (const did of oauth.listDids()) { 59 + const session = await oauth.getSessionForDid(did); 60 + if (!session) continue; 61 + try { 62 + await cache.syncAll(new AtprotoRepo(session)); 63 + console.log(`cache: populated for ${did}`); 64 + } catch (e) { 65 + console.error(`cache: startup sync failed for ${did}:`, e); 66 + } 67 + } 68 + })(); 69 + 70 + // Jetstream listener for real-time cache updates. 71 + const jetstream = new JetstreamListener(cache, () => oauth.listDids()); 72 + jetstream.start(); 73 + 53 74 // Background sync every 5 minutes for every stored DID. 54 75 setInterval( 55 76 async () => { 56 77 for (const did of oauth.listDids()) { 57 - if (!minifluxAllowed(did)) continue; 58 78 const session = await oauth.getSessionForDid(did); 59 79 if (!session) continue; 60 - const syncer = new Syncer(config.dataDir, new AtprotoRepo(session), mf); 80 + const repo = new AtprotoRepo(session); 81 + try { 82 + await cache.syncAll(repo); 83 + } catch (e) { 84 + console.error(`cache: periodic sync failed for ${did}:`, e); 85 + } 86 + if (!minifluxAllowed(did)) continue; 87 + const syncer = new Syncer(config.dataDir, repo, mf); 61 88 try { 62 89 const res = await syncer.run(); 63 90 if (res.added || res.removed) {
+122
src/server/jetstream.ts
··· 1 + import { FEED_NSID, SAVE_NSID, type FeedRecord, type SaveRecord } from "../shared/lexicons.js"; 2 + import type { RecordCache } from "./record-cache.js"; 3 + 4 + const JETSTREAM_URL = "wss://jetstream1.us-east.bsky.network/subscribe"; 5 + const RECONNECT_DELAY_MS = 5_000; 6 + const COLLECTIONS = [FEED_NSID, SAVE_NSID]; 7 + 8 + type CommitEvent = { 9 + did: string; 10 + time_us: number; 11 + kind: "commit"; 12 + commit: { 13 + rev: string; 14 + operation: "create" | "update" | "delete"; 15 + collection: string; 16 + rkey: string; 17 + record?: Record<string, unknown>; 18 + cid?: string; 19 + }; 20 + }; 21 + 22 + export class JetstreamListener { 23 + private ws: WebSocket | null = null; 24 + private stopped = false; 25 + private reconnectTimer: ReturnType<typeof setTimeout> | null = null; 26 + 27 + constructor( 28 + private cache: RecordCache, 29 + private getDids: () => string[], 30 + ) {} 31 + 32 + start(): void { 33 + this.stopped = false; 34 + this.connect(); 35 + } 36 + 37 + stop(): void { 38 + this.stopped = true; 39 + if (this.reconnectTimer) { 40 + clearTimeout(this.reconnectTimer); 41 + this.reconnectTimer = null; 42 + } 43 + if (this.ws) { 44 + this.ws.close(); 45 + this.ws = null; 46 + } 47 + } 48 + 49 + private connect(): void { 50 + if (this.stopped) return; 51 + 52 + const dids = this.getDids(); 53 + if (dids.length === 0) { 54 + this.scheduleReconnect(); 55 + return; 56 + } 57 + 58 + const url = new URL(JETSTREAM_URL); 59 + for (const c of COLLECTIONS) { 60 + url.searchParams.append("wantedCollections", c); 61 + } 62 + for (const d of dids) { 63 + url.searchParams.append("wantedDids", d); 64 + } 65 + 66 + const ws = new WebSocket(url.toString()); 67 + this.ws = ws; 68 + 69 + ws.addEventListener("open", () => { 70 + console.log("jetstream: connected"); 71 + }); 72 + 73 + ws.addEventListener("message", (ev) => { 74 + try { 75 + const data = JSON.parse(String(ev.data)) as CommitEvent; 76 + if (data.kind === "commit") this.handleCommit(data); 77 + } catch { 78 + // ignore malformed 79 + } 80 + }); 81 + 82 + ws.addEventListener("close", () => { 83 + console.log("jetstream: disconnected"); 84 + this.ws = null; 85 + this.scheduleReconnect(); 86 + }); 87 + 88 + ws.addEventListener("error", (e) => { 89 + console.error("jetstream: error", e); 90 + ws.close(); 91 + }); 92 + } 93 + 94 + private scheduleReconnect(): void { 95 + if (this.stopped) return; 96 + this.reconnectTimer = setTimeout(() => { 97 + this.reconnectTimer = null; 98 + this.connect(); 99 + }, RECONNECT_DELAY_MS); 100 + } 101 + 102 + private handleCommit(event: CommitEvent): void { 103 + const { did, commit } = event; 104 + const { collection, rkey, operation, record } = commit; 105 + 106 + if (collection === FEED_NSID) { 107 + if (operation === "delete") { 108 + this.cache.deleteFeed(did, rkey); 109 + } else if (record) { 110 + this.cache.upsertFeed(did, rkey, record as unknown as FeedRecord); 111 + } 112 + } else if (collection === SAVE_NSID) { 113 + if (operation === "delete") { 114 + this.cache.deleteSave(did, rkey); 115 + } else if (record) { 116 + const saveRecord = record as unknown as SaveRecord; 117 + this.cache.upsertSave(did, rkey, saveRecord); 118 + if (!saveRecord.title) this.cache.backfillTitles(did); 119 + } 120 + } 121 + } 122 + }
+182
src/server/record-cache.ts
··· 1 + import { db } from "./db.js"; 2 + import type { AtprotoRepo } from "./atproto.js"; 3 + import { fetchAndExtract } from "./readability.js"; 4 + import { 5 + FEED_NSID, 6 + SAVE_NSID, 7 + type FeedRecord, 8 + type FeedView, 9 + type SaveRecord, 10 + type SaveView, 11 + } from "../shared/lexicons.js"; 12 + 13 + export class RecordCache { 14 + listFeeds(did: string): FeedView[] { 15 + const rows = db() 16 + .prepare("SELECT rkey, url, site_url, title, created_at FROM cached_feed WHERE did = ?") 17 + .all(did) as Array<{ 18 + rkey: string; url: string; site_url: string | null; 19 + title: string | null; created_at: string; 20 + }>; 21 + return rows.map((r) => ({ 22 + rkey: r.rkey, 23 + record: { 24 + url: r.url, 25 + siteUrl: r.site_url ?? undefined, 26 + title: r.title ?? undefined, 27 + createdAt: r.created_at, 28 + }, 29 + })); 30 + } 31 + 32 + listSaves(did: string): SaveView[] { 33 + const rows = db() 34 + .prepare("SELECT rkey, url, title, created_at, read_at FROM cached_save WHERE did = ?") 35 + .all(did) as Array<{ 36 + rkey: string; 37 + url: string; 38 + title: string | null; 39 + created_at: string; 40 + read_at: string | null; 41 + }>; 42 + return rows.map((r) => ({ 43 + rkey: r.rkey, 44 + record: { 45 + url: r.url, 46 + title: r.title ?? undefined, 47 + createdAt: r.created_at, 48 + readAt: r.read_at ?? undefined, 49 + }, 50 + })); 51 + } 52 + 53 + getSave(did: string, rkey: string): SaveView | null { 54 + const row = db() 55 + .prepare("SELECT rkey, url, title, created_at, read_at FROM cached_save WHERE did = ? AND rkey = ?") 56 + .get(did, rkey) as { 57 + rkey: string; 58 + url: string; 59 + title: string | null; 60 + created_at: string; 61 + read_at: string | null; 62 + } | undefined; 63 + if (!row) return null; 64 + return { 65 + rkey: row.rkey, 66 + record: { 67 + url: row.url, 68 + title: row.title ?? undefined, 69 + createdAt: row.created_at, 70 + readAt: row.read_at ?? undefined, 71 + }, 72 + }; 73 + } 74 + 75 + upsertFeed(did: string, rkey: string, record: FeedRecord): void { 76 + db() 77 + .prepare( 78 + `INSERT OR REPLACE INTO cached_feed (did, rkey, url, site_url, title, created_at) 79 + VALUES (?, ?, ?, ?, ?, ?)`, 80 + ) 81 + .run(did, rkey, record.url, record.siteUrl ?? null, record.title ?? null, record.createdAt); 82 + } 83 + 84 + upsertSave(did: string, rkey: string, record: SaveRecord): void { 85 + db() 86 + .prepare( 87 + `INSERT OR REPLACE INTO cached_save (did, rkey, url, title, created_at, read_at) 88 + VALUES (?, ?, ?, ?, ?, ?)`, 89 + ) 90 + .run(did, rkey, record.url, record.title ?? null, record.createdAt, record.readAt ?? null); 91 + } 92 + 93 + deleteFeed(did: string, rkey: string): void { 94 + db().prepare("DELETE FROM cached_feed WHERE did = ? AND rkey = ?").run(did, rkey); 95 + } 96 + 97 + deleteSave(did: string, rkey: string): void { 98 + db().prepare("DELETE FROM cached_save WHERE did = ? AND rkey = ?").run(did, rkey); 99 + } 100 + 101 + hasSynced(did: string): boolean { 102 + const row = db() 103 + .prepare("SELECT 1 FROM cache_meta WHERE did = ? LIMIT 1") 104 + .get(did) as { 1: number } | undefined; 105 + return !!row; 106 + } 107 + 108 + async syncAll(repo: AtprotoRepo): Promise<void> { 109 + await Promise.all([this.syncFeeds(repo), this.syncSaves(repo)]); 110 + } 111 + 112 + async syncFeeds(repo: AtprotoRepo): Promise<void> { 113 + const feeds = await repo.listFeeds(); 114 + const d = db(); 115 + const txn = d.prepare("BEGIN"); 116 + const commit = d.prepare("COMMIT"); 117 + const rollback = d.prepare("ROLLBACK"); 118 + txn.run(); 119 + try { 120 + d.prepare("DELETE FROM cached_feed WHERE did = ?").run(repo.did); 121 + const ins = d.prepare( 122 + `INSERT INTO cached_feed (did, rkey, url, site_url, title, created_at) VALUES (?, ?, ?, ?, ?, ?)`, 123 + ); 124 + for (const f of feeds) { 125 + ins.run(repo.did, f.rkey, f.record.url, f.record.siteUrl ?? null, f.record.title ?? null, f.record.createdAt); 126 + } 127 + d.prepare( 128 + `INSERT OR REPLACE INTO cache_meta (did, collection, last_synced_at) VALUES (?, ?, ?)`, 129 + ).run(repo.did, FEED_NSID, Date.now()); 130 + commit.run(); 131 + } catch (e) { 132 + rollback.run(); 133 + throw e; 134 + } 135 + } 136 + 137 + async syncSaves(repo: AtprotoRepo): Promise<void> { 138 + const saves = await repo.listSaves(); 139 + const d = db(); 140 + const txn = d.prepare("BEGIN"); 141 + const commit = d.prepare("COMMIT"); 142 + const rollback = d.prepare("ROLLBACK"); 143 + txn.run(); 144 + try { 145 + d.prepare("DELETE FROM cached_save WHERE did = ?").run(repo.did); 146 + const ins = d.prepare( 147 + `INSERT INTO cached_save (did, rkey, url, title, created_at, read_at) VALUES (?, ?, ?, ?, ?, ?)`, 148 + ); 149 + for (const s of saves) { 150 + ins.run( 151 + repo.did, s.rkey, s.record.url, s.record.title ?? null, 152 + s.record.createdAt, s.record.readAt ?? null, 153 + ); 154 + } 155 + d.prepare( 156 + `INSERT OR REPLACE INTO cache_meta (did, collection, last_synced_at) VALUES (?, ?, ?)`, 157 + ).run(repo.did, SAVE_NSID, Date.now()); 158 + commit.run(); 159 + } catch (e) { 160 + rollback.run(); 161 + throw e; 162 + } 163 + this.backfillTitles(repo.did); 164 + } 165 + 166 + backfillTitles(did: string): void { 167 + const rows = db() 168 + .prepare("SELECT rkey, url FROM cached_save WHERE did = ? AND title IS NULL") 169 + .all(did) as Array<{ rkey: string; url: string }>; 170 + if (rows.length === 0) return; 171 + const update = db().prepare( 172 + "UPDATE cached_save SET title = ? WHERE did = ? AND rkey = ?", 173 + ); 174 + for (const row of rows) { 175 + fetchAndExtract(row.url) 176 + .then(({ title }) => { 177 + if (title) update.run(title, did, row.rkey); 178 + }) 179 + .catch(() => {}); 180 + } 181 + } 182 + }
+41 -11
src/server/routes-api.ts
··· 2 2 import { getCookie, deleteCookie } from "hono/cookie"; 3 3 import { AtprotoRepo } from "./atproto.js"; 4 4 import type { MinifluxClient } from "./miniflux.js"; 5 + import type { RecordCache } from "./record-cache.js"; 5 6 import { Syncer } from "./sync.js"; 6 7 import { fetchAndExtract } from "./readability.js"; 7 8 import type { NightshadeOAuth } from "./oauth.js"; ··· 15 16 16 17 type Env = { Variables: { repo: AtprotoRepo } }; 17 18 18 - export function apiRoutes(oauth: NightshadeOAuth, mf: MinifluxClient) { 19 + export function apiRoutes(oauth: NightshadeOAuth, mf: MinifluxClient, cache: RecordCache) { 19 20 const app = new Hono<Env>(); 20 21 21 22 const makeSyncer = (repo: AtprotoRepo) => ··· 40 41 return next(); 41 42 }); 42 43 43 - // --- Saves --- 44 + // --- Saves (read from cache) --- 44 45 app.get("/saves", async (c) => { 45 46 const repo = c.get("repo"); 47 + if (!cache.hasSynced(repo.did)) await cache.syncSaves(repo); 46 48 const unreadOnly = c.req.query("all") === undefined; 47 49 const limit = Number(c.req.query("limit") ?? 200); 48 - const views = await repo.listSaves(); 50 + const views = cache.listSaves(repo.did); 49 51 const filtered = unreadOnly 50 52 ? views.filter((v) => !v.record.readAt) 51 53 : views; ··· 67 69 // filled in on the next successful body fetch from the e-reader path. 68 70 } 69 71 const view = await repo.createSave(url, title); 72 + cache.upsertSave(repo.did, view.rkey, view.record); 70 73 return c.json(view); 71 74 }); 72 75 73 76 app.delete("/saves/:rkey", async (c) => { 74 77 const repo = c.get("repo"); 75 - await repo.deleteSave(c.req.param("rkey")); 78 + const rkey = c.req.param("rkey"); 79 + await repo.deleteSave(rkey); 80 + cache.deleteSave(repo.did, rkey); 76 81 return c.body(null, 204); 77 82 }); 78 83 79 84 app.post("/saves/:rkey/read", async (c) => { 80 85 const repo = c.get("repo"); 81 - await repo.markSaveRead(c.req.param("rkey"), true); 86 + const rkey = c.req.param("rkey"); 87 + await repo.markSaveRead(rkey, true); 88 + const existing = cache.getSave(repo.did, rkey); 89 + if (existing) { 90 + cache.upsertSave(repo.did, rkey, { 91 + ...existing.record, 92 + readAt: new Date().toISOString(), 93 + }); 94 + } 82 95 return c.body(null, 204); 83 96 }); 84 97 85 98 app.post("/saves/:rkey/unread", async (c) => { 86 99 const repo = c.get("repo"); 87 - await repo.markSaveRead(c.req.param("rkey"), false); 100 + const rkey = c.req.param("rkey"); 101 + await repo.markSaveRead(rkey, false); 102 + const existing = cache.getSave(repo.did, rkey); 103 + if (existing) { 104 + const { readAt: _, ...rest } = existing.record; 105 + cache.upsertSave(repo.did, rkey, rest); 106 + } 88 107 return c.body(null, 204); 89 108 }); 90 109 91 - // --- Feeds (atproto-canonical, mirrored to Miniflux) --- 110 + // --- Feeds (read from cache, mirrored to Miniflux) --- 92 111 app.get("/feeds", async (c) => { 93 112 const repo = c.get("repo"); 94 113 if (!minifluxAllowed(repo.did)) 95 114 return c.json({ error: "miniflux sync not enabled for this account" }, 403); 115 + if (!cache.hasSynced(repo.did)) await cache.syncFeeds(repo); 96 116 const [feeds, mfFeeds] = await Promise.all([ 97 - repo.listFeeds(), 117 + Promise.resolve(cache.listFeeds(repo.did)), 98 118 mf.listFeeds().catch(() => []), 99 119 ]); 100 120 const siteByFeedUrl = new Map<string, string>(); ··· 106 126 const enriched = feeds.map((f) => ({ 107 127 rkey: f.rkey, 108 128 record: f.record, 109 - siteUrl: siteByFeedUrl.get(f.record.url) ?? null, 129 + siteUrl: f.record.siteUrl ?? siteByFeedUrl.get(f.record.url) ?? null, 110 130 minifluxTitle: titleByFeedUrl.get(f.record.url) ?? null, 111 131 })); 112 132 enriched.sort((a, b) => { ··· 123 143 return c.json({ error: "miniflux sync not enabled for this account" }, 403); 124 144 const { url, title } = await c.req.json<{ url: string; title?: string }>(); 125 145 if (!url) return c.json({ error: "missing url" }, 400); 126 - await repo.createFeed(url, title); 146 + await repo.createFeed(url, { title }); 147 + await cache.syncFeeds(repo); 127 148 try { 128 149 await makeSyncer(repo).run(); 129 150 } catch (e) { ··· 136 157 const repo = c.get("repo"); 137 158 if (!minifluxAllowed(repo.did)) 138 159 return c.json({ error: "miniflux sync not enabled for this account" }, 403); 139 - await repo.deleteFeed(c.req.param("rkey")); 160 + const rkey = c.req.param("rkey"); 161 + await repo.deleteFeed(rkey); 162 + cache.deleteFeed(repo.did, rkey); 140 163 try { 141 164 await makeSyncer(repo).run(); 142 165 } catch (e) { ··· 157 180 const repo = c.get("repo"); 158 181 if (!minifluxAllowed(repo.did)) 159 182 return c.json({ error: "miniflux sync not enabled for this account" }, 403); 183 + await cache.syncAll(repo); 160 184 const result = await makeSyncer(repo).run(); 161 185 return c.json(result); 186 + }); 187 + 188 + app.post("/cache/sync", async (c) => { 189 + const repo = c.get("repo"); 190 + await cache.syncAll(repo); 191 + return c.body(null, 204); 162 192 }); 163 193 164 194 return app;
+4 -2
src/server/routes-device.ts
··· 5 5 import type { MinifluxClient } from "./miniflux.js"; 6 6 import type { NightshadeOAuth } from "./oauth.js"; 7 7 import type { DeviceTokenStore } from "./device-tokens.js"; 8 + import type { RecordCache } from "./record-cache.js"; 8 9 import { htmlToText } from "./readability.js"; 9 10 import { renderItem, renderList } from "./reader-format.js"; 10 11 import { minifluxAllowed } from "./config.js"; ··· 17 18 mf: MinifluxClient, 18 19 bodies: BodyCache, 19 20 tokens: DeviceTokenStore, 21 + cache: RecordCache, 20 22 ) { 21 23 const app = new Hono<Env>(); 22 24 ··· 47 49 direction: "desc", 48 50 }) 49 51 : { entries: [] }, 50 - repo.listSaves(), 52 + Promise.resolve(cache.listSaves(repo.did)), 51 53 ]); 52 54 53 55 const items: UnifiedItem[] = []; ··· 85 87 try { 86 88 if (rawId.startsWith("s")) { 87 89 const rkey = rawId.slice(1); 88 - const save = await repo.getSave(rkey); 90 + const save = cache.getSave(repo.did, rkey) ?? await repo.getSave(rkey); 89 91 if (!save) return c.text("not found\n", 404); 90 92 const cached = await bodies.get(save.record.url); 91 93 return c.text(
+1 -1
src/server/sync.ts
··· 73 73 if (!(url in curA)) { 74 74 const feed = mfList.find((f) => f.feed_url === url); 75 75 try { 76 - await this.repo.createFeed(url, feed?.title); 76 + await this.repo.createFeed(url, { title: feed?.title, siteUrl: feed?.site_url }); 77 77 added++; 78 78 } catch (e) { 79 79 console.error(`sync: failed to create atproto feed ${url}:`, e);
+1
src/shared/lexicons.ts
··· 4 4 export type FeedRecord = { 5 5 $type?: typeof FEED_NSID; 6 6 url: string; 7 + siteUrl?: string; 7 8 title?: string; 8 9 createdAt: string; 9 10 };
+20 -4
src/web/App.tsx
··· 140 140 141 141 function SavesView() { 142 142 const [unreadOnly, setUnreadOnly] = useState(true); 143 + const [syncing, setSyncing] = useState(false); 143 144 const { data, err, loading, reload } = useAsync<SaveView[]>( 144 145 () => api.listSaves(!unreadOnly), 145 146 [unreadOnly], 146 147 ); 148 + 149 + const syncFromPds = async () => { 150 + setSyncing(true); 151 + try { 152 + await api.syncCache(); 153 + } finally { 154 + setSyncing(false); 155 + reload(); 156 + } 157 + }; 147 158 148 159 return ( 149 160 <> ··· 161 172 unread only 162 173 </label> 163 174 </div> 164 - <button class="ghost" onClick={reload} disabled={loading}> 165 - {loading ? "…" : "refresh"} 166 - </button> 175 + <div class="actions"> 176 + <button class="ghost" onClick={syncFromPds} disabled={syncing}> 177 + {syncing ? "syncing…" : "sync"} 178 + </button> 179 + <button class="ghost" onClick={reload} disabled={loading}> 180 + {loading ? "…" : "refresh"} 181 + </button> 182 + </div> 167 183 </div> 168 184 {err && <div class="error">{err}</div>} 169 185 {loading && !data && <div class="loading">loading…</div>} ··· 384 400 }) { 385 401 const title = 386 402 view.record.title ?? view.minifluxTitle ?? view.siteUrl ?? view.record.url; 387 - const homepage = view.siteUrl ?? view.record.url; 403 + const homepage = view.record.siteUrl ?? view.siteUrl ?? view.record.url; 388 404 const remove = async () => { 389 405 if (!confirm(`Unsubscribe from "${title}"?`)) return; 390 406 await api.unsubscribe(view.rkey);
+3
src/web/api.ts
··· 83 83 syncNow: (): Promise<{ added: number; removed: number }> => 84 84 req("/api/sync", { method: "POST" }), 85 85 86 + syncCache: (): Promise<void> => 87 + req("/api/cache/sync", { method: "POST" }), 88 + 86 89 listDeviceTokens: (): Promise<DeviceToken[]> => req("/auth/device-tokens"), 87 90 88 91 createDeviceToken: (label: string): Promise<CreatedDeviceToken> =>