AppView in a box as a Vite plugin thing hatk.dev
2
fork

Configure Feed

Select the types of activity you want to include in your feed.

fix: cap DuckDB memory and remove backfillChildTables

Set DuckDB memory_limit=512MB and threads=2 to prevent FTS shadow
table rebuilds from consuming all container RAM alongside V8.
Remove backfillChildTables() — was a one-time migration, child rows
are already populated during normal indexing and backfill.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

+6 -46
+1 -1
packages/hatk/package.json
··· 1 1 { 2 2 "name": "@hatk/hatk", 3 - "version": "0.0.1-alpha.11", 3 + "version": "0.0.1-alpha.12", 4 4 "license": "MIT", 5 5 "bin": { 6 6 "hatk": "dist/cli.js"
+4 -42
packages/hatk/src/db.ts
··· 123 123 tableSchemas: TableSchema[], 124 124 ddlStatements: string[], 125 125 ): Promise<void> { 126 - instance = await DuckDBInstance.create(dbPath === ':memory:' ? undefined : dbPath) 126 + instance = await DuckDBInstance.create(dbPath === ':memory:' ? undefined : dbPath, { 127 + memory_limit: '512MB', 128 + threads: '2', 129 + }) 127 130 con = await instance.connect() 128 131 readCon = await instance.connect() 129 132 ··· 1555 1558 return new Set(rows.map((r: any) => r.did)) 1556 1559 } 1557 1560 1558 - export async function backfillChildTables(): Promise<void> { 1559 - for (const [, schema] of schemas) { 1560 - for (const child of schema.children) { 1561 - // Check if child table needs backfill (significantly fewer rows than parent) 1562 - const mainCount = (await all(`SELECT COUNT(*)::INTEGER as n FROM ${schema.tableName}`))[0]?.n || 0 1563 - if (mainCount === 0) continue 1564 - const childCount = 1565 - (await all(`SELECT COUNT(DISTINCT parent_uri)::INTEGER as n FROM ${child.tableName}`))[0]?.n || 0 1566 - if (childCount >= mainCount * 0.9) continue 1567 - 1568 - console.log(`[db] Backfilling ${child.tableName} from ${schema.tableName}...`) 1569 - 1570 - const snakeField = toSnakeCase(child.fieldName) 1571 - const childColSelects = child.columns 1572 - .map((c) => `json_extract_string(item.val, '$.${c.originalName}')`) 1573 - .join(', ') 1574 - const childColNames = ['parent_uri', 'parent_did', ...child.columns.map((c) => c.name)] 1575 - 1576 - const notNullFilters = child.columns 1577 - .filter((c) => c.notNull) 1578 - .map((c) => `json_extract_string(item.val, '$.${c.originalName}') IS NOT NULL`) 1579 - 1580 - const whereClause = [`p.${snakeField} IS NOT NULL`, ...notNullFilters].join(' AND ') 1581 - 1582 - try { 1583 - await run(`DELETE FROM ${child.tableName}`) 1584 - await run(` 1585 - INSERT INTO ${child.tableName} (${childColNames.join(', ')}) 1586 - SELECT p.uri, p.did, ${childColSelects} 1587 - FROM ${schema.tableName} p, 1588 - unnest(from_json(p.${snakeField}::JSON, '["json"]')) AS item(val) 1589 - WHERE ${whereClause} 1590 - `) 1591 - const result = await all(`SELECT COUNT(*)::INTEGER as n FROM ${child.tableName}`) 1592 - console.log(`[db] Backfilled ${child.tableName}: ${result[0]?.n || 0} rows`) 1593 - } catch (err: any) { 1594 - console.warn(`[db] Backfill skipped for ${child.tableName}: ${err.message}`) 1595 - } 1596 - } 1597 - } 1598 - }
+1 -3
packages/hatk/src/main.ts
··· 11 11 generateCreateTableSQL, 12 12 } from './schema.ts' 13 13 import { discoverViews } from './views.ts' 14 - import { initDatabase, getCursor, querySQL, backfillChildTables } from './db.ts' 14 + import { initDatabase, getCursor, querySQL } from './db.ts' 15 15 import { initFeeds, listFeeds } from './feeds.ts' 16 16 import { initXrpc, listXrpc, configureRelay } from './xrpc.ts' 17 17 import { initOpengraph } from './opengraph.ts' ··· 93 93 await initDatabase(config.database, schemas, ddlStatements) 94 94 log(`[main] DuckDB initialized (${config.database === ':memory:' ? 'in-memory' : config.database})`) 95 95 96 - // 3a. Backfill child tables for decomposed arrays (one-time migration) 97 - await backfillChildTables() 98 96 99 97 // 3b. Run setup hooks (after DB init, before server) 100 98 await initSetup(resolve(configDir, 'setup'))