AppView in a box as a Vite plugin thing hatk.dev
2
fork

Configure Feed

Select the types of activity you want to include in your feed.

fix: quote all SQL column names to prevent reserved word conflicts

Double-quote all column names in generated DDL, DML, and FTS queries
to avoid SQLite errors when lexicon field names collide with reserved
words (e.g. "index" from app.bsky.feed.post).

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

+35 -30
+18 -18
packages/hatk/src/database/db.ts
··· 1 - import { type TableSchema, toSnakeCase } from './schema.ts' 1 + import { type TableSchema, toSnakeCase, q } from './schema.ts' 2 2 import type { Row } from '../lex-types.ts' 3 3 import { getSearchColumns, stripStopWords, getSearchPort, updateFtsRecord, deleteFtsRecord } from './fts.ts' 4 4 import { emit, timer } from '../logger.ts' ··· 603 603 } else if (col.originalName.endsWith('__cid') && record[col.originalName.replace('__cid', '')]) { 604 604 rawValue = record[col.originalName.replace('__cid', '')].cid 605 605 } 606 - colNames.push(col.name) 606 + colNames.push(q(col.name)) 607 607 placeholders.push(`$${paramIdx++}`) 608 608 609 609 if (rawValue === undefined || rawValue === null) { ··· 646 646 let idx = 3 647 647 648 648 for (const col of child.columns) { 649 - colNames.push(col.name) 649 + colNames.push(q(col.name)) 650 650 placeholders.push(`$${idx++}`) 651 651 const raw = item[col.originalName] 652 652 if (raw === undefined || raw === null) { ··· 685 685 const values: any[] = [uri, authorDid] 686 686 let idx = 3 687 687 for (const col of branch.columns) { 688 - colNames.push(col.name) 688 + colNames.push(q(col.name)) 689 689 placeholders.push(`$${idx++}`) 690 690 const raw = item[col.originalName] 691 691 if (raw === undefined || raw === null) { ··· 709 709 const values: any[] = [uri, authorDid] 710 710 let idx = 3 711 711 for (const col of branch.columns) { 712 - colNames.push(col.name) 712 + colNames.push(q(col.name)) 713 713 placeholders.push(`$${idx++}`) 714 714 const raw = branchData[col.originalName] 715 715 if (raw === undefined || raw === null) { ··· 830 830 if (!schema) continue 831 831 832 832 const stagingTable = `_staging_${collection.replace(/\./g, '_')}` 833 - const allCols = ['uri', 'cid', 'did', 'indexed_at', ...schema.columns.map((c) => c.name)] 833 + const allCols = ['uri', 'cid', 'did', 'indexed_at', ...schema.columns.map((c) => q(c.name))] 834 834 const colDefs = [ 835 835 'uri TEXT', 836 836 'cid TEXT', ··· 839 839 ...schema.columns.map((c) => { 840 840 const t = c.sqlType 841 841 // Use TEXT for timestamp columns in staging (will cast on merge) 842 - return `${c.name} ${t === 'TIMESTAMP' || t === 'TIMESTAMPTZ' ? 'TEXT' : t}` 842 + return `${q(c.name)} ${t === 'TIMESTAMP' || t === 'TIMESTAMPTZ' ? 'TEXT' : t}` 843 843 }), 844 844 ] 845 845 ··· 867 867 868 868 // Merge into target, filtering rows that would violate NOT NULL 869 869 const selectCols = allCols.map((name) => { 870 - const col = schema.columns.find((c) => c.name === name) 870 + const col = schema.columns.find((c) => q(c.name) === name) 871 871 if (name === 'indexed_at' || (col && (col.sqlType === 'TIMESTAMP' || col.sqlType === 'TIMESTAMPTZ'))) { 872 872 return `${dialect.tryCastTimestamp(name)} AS ${name}` 873 873 } ··· 877 877 for (const col of schema.columns) { 878 878 if (col.notNull) { 879 879 if (col.sqlType === 'TIMESTAMP' || col.sqlType === 'TIMESTAMPTZ') { 880 - notNullChecks.push(`${dialect.tryCastTimestamp(col.name)} IS NOT NULL`) 880 + notNullChecks.push(`${dialect.tryCastTimestamp(q(col.name))} IS NOT NULL`) 881 881 } else { 882 - notNullChecks.push(`${col.name} IS NOT NULL`) 882 + notNullChecks.push(`${q(col.name)} IS NOT NULL`) 883 883 } 884 884 } 885 885 } ··· 898 898 'parent_did TEXT', 899 899 ...child.columns.map((c) => { 900 900 const t = c.sqlType 901 - return `${c.name} ${t === 'TIMESTAMP' || t === 'TIMESTAMPTZ' ? 'TEXT' : t}` 901 + return `${q(c.name)} ${t === 'TIMESTAMP' || t === 'TIMESTAMPTZ' ? 'TEXT' : t}` 902 902 }), 903 903 ] 904 - const childAllCols = ['parent_uri', 'parent_did', ...child.columns.map((c) => c.name)] 904 + const childAllCols = ['parent_uri', 'parent_did', ...child.columns.map((c) => q(c.name))] 905 905 906 906 await port.execute(`DROP TABLE IF EXISTS ${childStagingTable}`, []) 907 907 await port.execute(`CREATE TABLE ${childStagingTable} (${childColDefs.join(', ')})`, []) ··· 935 935 ) 936 936 937 937 const childSelectCols = childAllCols.map((name) => { 938 - const col = child.columns.find((c) => c.name === name) 938 + const col = child.columns.find((c) => q(c.name) === name) 939 939 if (col && (col.sqlType === 'TIMESTAMP' || col.sqlType === 'TIMESTAMPTZ')) { 940 940 return `${dialect.tryCastTimestamp(name)} AS ${name}` 941 941 } ··· 957 957 'parent_did TEXT', 958 958 ...branch.columns.map((c) => { 959 959 const t = c.sqlType 960 - return `${c.name} ${t === 'TIMESTAMP' || t === 'TIMESTAMPTZ' ? 'TEXT' : t}` 960 + return `${q(c.name)} ${t === 'TIMESTAMP' || t === 'TIMESTAMPTZ' ? 'TEXT' : t}` 961 961 }), 962 962 ] 963 - const branchAllCols = ['parent_uri', 'parent_did', ...branch.columns.map((c) => c.name)] 963 + const branchAllCols = ['parent_uri', 'parent_did', ...branch.columns.map((c) => q(c.name))] 964 964 965 965 await port.execute(`DROP TABLE IF EXISTS ${branchStagingTable}`, []) 966 966 await port.execute(`CREATE TABLE ${branchStagingTable} (${branchColDefs.join(', ')})`, []) ··· 1010 1010 ) 1011 1011 1012 1012 const branchSelectCols = branchAllCols.map((name) => { 1013 - const col = branch.columns.find((c) => c.name === name) 1013 + const col = branch.columns.find((c) => q(c.name) === name) 1014 1014 if (col && (col.sqlType === 'TIMESTAMP' || col.sqlType === 'TIMESTAMPTZ')) { 1015 1015 return `${dialect.tryCastTimestamp(name)} AS ${name}` 1016 1016 } ··· 1333 1333 const remaining = limit - bm25Results.length 1334 1334 const jwFn = dialect.jaroWinklerSimilarity 1335 1335 const simExprs = [ 1336 - ...textCols.map((c) => `${jwFn}(lower(t.${c.name}), lower($1))`), 1336 + ...textCols.map((c) => `${jwFn}(lower(t.${q(c.name)}), lower($1))`), 1337 1337 `${jwFn}(lower(r.handle), lower($1))`, 1338 1338 ] 1339 1339 // Include child table TEXT columns via correlated subquery ··· 1341 1341 for (const col of child.columns) { 1342 1342 if (col.sqlType === 'TEXT') { 1343 1343 simExprs.push( 1344 - `COALESCE((SELECT MAX(${jwFn}(lower(c.${col.name}), lower($1))) FROM ${child.tableName} c WHERE c.parent_uri = t.uri), 0)`, 1344 + `COALESCE((SELECT MAX(${jwFn}(lower(c.${q(col.name)}), lower($1))) FROM ${child.tableName} c WHERE c.parent_uri = t.uri), 0)`, 1345 1345 ) 1346 1346 } 1347 1347 }
+6 -6
packages/hatk/src/database/fts.ts
··· 1 1 import { getSchema, runSQL, getSqlDialect, querySQL } from './db.ts' 2 - import { getLexicon } from './schema.ts' 2 + import { getLexicon, q } from './schema.ts' 3 3 import { emit, timer } from '../logger.ts' 4 4 import type { SearchPort } from './ports.ts' 5 5 ··· 128 128 129 129 for (const col of schema.columns) { 130 130 if (col.sqlType === 'TEXT') { 131 - selectExprs.push(`t.${col.name}`) 131 + selectExprs.push(`t.${q(col.name)}`) 132 132 searchColNames.push(col.name) 133 133 } else if (col.isJson && record?.properties) { 134 134 const prop = record.properties[col.originalName] 135 135 if (prop?.type === 'blob') continue // skip blobs 136 136 if (prop && lexicon) { 137 - const derived = jsonSearchColumns(`t.${col.name}`, prop, lexicon, dialect) 137 + const derived = jsonSearchColumns(`t.${q(col.name)}`, prop, lexicon, dialect) 138 138 if (derived.length > 0) { 139 139 for (const d of derived) { 140 140 selectExprs.push(`${d.expr} AS ${d.alias}`) ··· 144 144 } 145 145 } 146 146 // Fallback: cast JSON to TEXT 147 - selectExprs.push(`CAST(t.${col.name} AS TEXT) AS ${col.name}`) 147 + selectExprs.push(`CAST(t.${q(col.name)} AS TEXT) AS ${q(col.name)}`) 148 148 searchColNames.push(col.name) 149 149 } 150 150 } ··· 154 154 for (const col of child.columns) { 155 155 if (col.sqlType === 'TEXT') { 156 156 const alias = `${child.fieldName}_${col.name}` 157 - const agg = dialect.stringAgg(`c.${col.name}`, "' '") 157 + const agg = dialect.stringAgg(`c.${q(col.name)}`, "' '") 158 158 selectExprs.push(`(SELECT ${agg} FROM ${child.tableName} c WHERE c.parent_uri = t.uri) AS ${alias}`) 159 159 searchColNames.push(alias) 160 160 } ··· 167 167 for (const col of branch.columns) { 168 168 if (col.sqlType === 'TEXT') { 169 169 const alias = `${union.fieldName}_${branch.branchName}_${col.name}` 170 - const agg = dialect.stringAgg(`c.${col.name}`, "' '") 170 + const agg = dialect.stringAgg(`c.${q(col.name)}`, "' '") 171 171 selectExprs.push(`(SELECT ${agg} FROM ${branch.tableName} c WHERE c.parent_uri = t.uri) AS ${alias}`) 172 172 searchColNames.push(alias) 173 173 }
+11 -6
packages/hatk/src/database/schema.ts
··· 48 48 return str.replace(/([A-Z])/g, '_$1').toLowerCase() 49 49 } 50 50 51 + // Quote a column name to avoid conflicts with SQL reserved words 52 + export function q(name: string): string { 53 + return `"${name}"` 54 + } 55 + 51 56 // Map lexicon property type to SQL type using dialect config 52 57 interface TypeMapping { 53 58 sqlType: string ··· 387 392 388 393 for (const col of schema.columns) { 389 394 const nullable = col.notNull ? ' NOT NULL' : '' 390 - lines.push(` ${col.name} ${col.sqlType}${nullable}`) 395 + lines.push(` ${q(col.name)} ${col.sqlType}${nullable}`) 391 396 } 392 397 393 398 const createTable = `CREATE TABLE IF NOT EXISTS ${schema.tableName} (\n${lines.join(',\n')}\n);` ··· 400 405 401 406 // Index ref columns for hydration lookups 402 407 for (const refCol of schema.refColumns) { 403 - indexes.push(`CREATE INDEX IF NOT EXISTS idx_${prefix}_${refCol} ON ${schema.tableName}(${refCol});`) 408 + indexes.push(`CREATE INDEX IF NOT EXISTS idx_${prefix}_${refCol} ON ${schema.tableName}(${q(refCol)});`) 404 409 } 405 410 406 411 // Child table DDL ··· 409 414 const childLines: string[] = [' parent_uri TEXT NOT NULL', ' parent_did TEXT NOT NULL'] 410 415 for (const col of child.columns) { 411 416 const nullable = col.notNull ? ' NOT NULL' : '' 412 - childLines.push(` ${col.name} ${col.sqlType}${nullable}`) 417 + childLines.push(` ${q(col.name)} ${col.sqlType}${nullable}`) 413 418 } 414 419 childDDL.push(`CREATE TABLE IF NOT EXISTS ${child.tableName} (\n${childLines.join(',\n')}\n);`) 415 420 ··· 419 424 420 425 for (const col of child.columns) { 421 426 if (col.isJson || col.sqlType === 'BLOB') continue 422 - childDDL.push(`CREATE INDEX IF NOT EXISTS idx_${childPrefix}_${col.name} ON ${child.tableName}(${col.name});`) 427 + childDDL.push(`CREATE INDEX IF NOT EXISTS idx_${childPrefix}_${col.name} ON ${child.tableName}(${q(col.name)});`) 423 428 } 424 429 } 425 430 ··· 429 434 const branchLines: string[] = [' parent_uri TEXT NOT NULL', ' parent_did TEXT NOT NULL'] 430 435 for (const col of branch.columns) { 431 436 const nullable = col.notNull ? ' NOT NULL' : '' 432 - branchLines.push(` ${col.name} ${col.sqlType}${nullable}`) 437 + branchLines.push(` ${q(col.name)} ${col.sqlType}${nullable}`) 433 438 } 434 439 childDDL.push(`CREATE TABLE IF NOT EXISTS ${branch.tableName} (\n${branchLines.join(',\n')}\n);`) 435 440 ··· 439 444 440 445 for (const col of branch.columns) { 441 446 if (col.isJson || col.sqlType === 'BLOB') continue 442 - childDDL.push(`CREATE INDEX IF NOT EXISTS idx_${branchPrefix}_${col.name} ON ${branch.tableName}(${col.name});`) 447 + childDDL.push(`CREATE INDEX IF NOT EXISTS idx_${branchPrefix}_${col.name} ON ${branch.tableName}(${q(col.name)});`) 443 448 } 444 449 } 445 450 }