Suite of AT Protocol TypeScript libraries built on web standards
20
fork

Configure Feed

Select the types of activity you want to include in your feed.

that part

+12324
+136
.gitignore
··· 1 + # Logs 2 + logs 3 + *.log 4 + npm-debug.log* 5 + yarn-debug.log* 6 + yarn-error.log* 7 + lerna-debug.log* 8 + .pnpm-debug.log* 9 + 10 + # Diagnostic reports (https://nodejs.org/api/report.html) 11 + report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 12 + 13 + # Runtime data 14 + pids 15 + *.pid 16 + *.seed 17 + *.pid.lock 18 + 19 + # Directory for instrumented libs generated by jscoverage/JSCover 20 + lib-cov 21 + 22 + # Coverage directory used by tools like istanbul 23 + coverage 24 + *.lcov 25 + 26 + # nyc test coverage 27 + .nyc_output 28 + 29 + # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 30 + .grunt 31 + 32 + # Bower dependency directory (https://bower.io/) 33 + bower_components 34 + 35 + # node-waf configuration 36 + .lock-wscript 37 + 38 + # Compiled binary addons (https://nodejs.org/api/addons.html) 39 + build/Release 40 + 41 + # Dependency directories 42 + node_modules/ 43 + jspm_packages/ 44 + 45 + # Snowpack dependency directory (https://snowpack.dev/) 46 + web_modules/ 47 + 48 + # TypeScript cache 49 + *.tsbuildinfo 50 + 51 + # Optional npm cache directory 52 + .npm 53 + 54 + # Optional eslint cache 55 + .eslintcache 56 + 57 + # Optional stylelint cache 58 + .stylelintcache 59 + 60 + # Microbundle cache 61 + .rpt2_cache/ 62 + .rts2_cache_cjs/ 63 + .rts2_cache_es/ 64 + .rts2_cache_umd/ 65 + 66 + # Optional REPL history 67 + .node_repl_history 68 + 69 + # Output of 'npm pack' 70 + *.tgz 71 + 72 + # Yarn Integrity file 73 + .yarn-integrity 74 + 75 + # dotenv environment variable files 76 + .env 77 + .env.development.local 78 + .env.test.local 79 + .env.production.local 80 + .env.local 81 + 82 + # parcel-bundler cache (https://parceljs.org/) 83 + .cache 84 + .parcel-cache 85 + 86 + # Next.js build output 87 + .next 88 + out 89 + 90 + # Nuxt.js build / generate output 91 + .nuxt 92 + dist 93 + 94 + # Gatsby files 95 + .cache/ 96 + # Comment in the public line in if your project uses Gatsby and not Next.js 97 + # https://nextjs.org/blog/next-9-1#public-directory-support 98 + # public 99 + 100 + # vuepress build output 101 + .vuepress/dist 102 + 103 + # vuepress v2.x temp and cache directory 104 + .temp 105 + .cache 106 + 107 + # vitepress build output 108 + **/.vitepress/dist 109 + 110 + # vitepress cache directory 111 + **/.vitepress/cache 112 + 113 + # Docusaurus cache and generated files 114 + .docusaurus 115 + 116 + # Serverless directories 117 + .serverless/ 118 + 119 + # FuseBox cache 120 + .fusebox/ 121 + 122 + # DynamoDB Local files 123 + .dynamodb/ 124 + 125 + # TernJS port file 126 + .tern-port 127 + 128 + # Stores VSCode versions used for testing VSCode extensions 129 + .vscode-test 130 + 131 + # yarn v2 132 + .yarn/cache 133 + .yarn/unplugged 134 + .yarn/build-state.yml 135 + .yarn/install-state.gz 136 + .pnp.*
+18
LICENSE
··· 1 + Copyright 2025 Spark Social PBC 2 + 3 + Permission is hereby granted, free of charge, to any person obtaining a copy of 4 + this software and associated documentation files (the “Software”), to deal in 5 + the Software without restriction, including without limitation the rights to 6 + use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 7 + the Software, and to permit persons to whom the Software is furnished to do so, 8 + subject to the following conditions: 9 + 10 + The above copyright notice and this permission notice shall be included in all 11 + copies or substantial portions of the Software. 12 + 13 + THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 15 + FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 16 + COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 17 + IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 18 + CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+210
common/async.ts
··· 1 + import { bailableWait } from "./util.ts"; 2 + 3 + // reads values from a generator into a list 4 + // breaks when isDone signals `true` AND `waitFor` completes OR when a max length is reached 5 + // NOTE: does not signal generator to close. it *will* continue to produce values 6 + export const readFromGenerator = async <T>( 7 + gen: AsyncGenerator<T>, 8 + isDone: (last?: T) => Promise<boolean> | boolean, 9 + waitFor: Promise<unknown> = Promise.resolve(), 10 + maxLength = Number.MAX_SAFE_INTEGER, 11 + ): Promise<T[]> => { 12 + const evts: T[] = []; 13 + let bail: undefined | (() => void); 14 + let hasBroke = false; 15 + const awaitDone = async () => { 16 + if (await isDone(evts.at(-1))) { 17 + return true; 18 + } 19 + const bailable = bailableWait(20); 20 + await bailable.wait(); 21 + bail = bailable.bail; 22 + if (hasBroke) return false; 23 + return await awaitDone(); 24 + }; 25 + const breakOn: Promise<void> = new Promise((resolve) => { 26 + waitFor.then(() => { 27 + awaitDone().then(() => resolve()); 28 + }); 29 + }); 30 + 31 + try { 32 + while (evts.length < maxLength) { 33 + const maybeEvt = await Promise.race([gen.next(), breakOn]); 34 + if (!maybeEvt) break; 35 + const evt = maybeEvt as IteratorResult<T>; 36 + if (evt.done) break; 37 + evts.push(evt.value); 38 + } 39 + } finally { 40 + hasBroke = true; 41 + bail && bail(); 42 + } 43 + return evts; 44 + }; 45 + 46 + export type Deferrable = { 47 + resolve: () => void; 48 + complete: Promise<void>; 49 + }; 50 + 51 + export const createDeferrable = (): Deferrable => { 52 + let resolve!: () => void; 53 + const promise: Promise<void> = new Promise((res) => { 54 + resolve = () => res(); 55 + }); 56 + return { resolve, complete: promise }; 57 + }; 58 + 59 + export const createDeferrables = (count: number): Deferrable[] => { 60 + const list: Deferrable[] = []; 61 + for (let i = 0; i < count; i++) { 62 + list.push(createDeferrable()); 63 + } 64 + return list; 65 + }; 66 + 67 + export const allComplete = async (deferrables: Deferrable[]): Promise<void> => { 68 + await Promise.all(deferrables.map((d) => d.complete)); 69 + }; 70 + 71 + export class AsyncBuffer<T> { 72 + private buffer: T[] = []; 73 + private promise: Promise<void>; 74 + private resolve: () => void; 75 + private closed = false; 76 + private toThrow: unknown | undefined; 77 + 78 + constructor(public maxSize?: number) { 79 + // Initializing to satisfy types/build, immediately reset by resetPromise() 80 + this.promise = Promise.resolve(); 81 + this.resolve = () => null; 82 + this.resetPromise(); 83 + } 84 + 85 + get curr(): T[] { 86 + return this.buffer; 87 + } 88 + 89 + get size(): number { 90 + return this.buffer.length; 91 + } 92 + 93 + get isClosed(): boolean { 94 + return this.closed; 95 + } 96 + 97 + resetPromise() { 98 + this.promise = new Promise<void>((r) => (this.resolve = r)); 99 + } 100 + 101 + push(item: T) { 102 + this.buffer.push(item); 103 + this.resolve(); 104 + } 105 + 106 + pushMany(items: T[]) { 107 + items.forEach((i) => this.buffer.push(i)); 108 + this.resolve(); 109 + } 110 + 111 + async *events(): AsyncGenerator<T> { 112 + while (true) { 113 + if (this.closed && this.buffer.length === 0) { 114 + if (this.toThrow) { 115 + throw this.toThrow; 116 + } else { 117 + return; 118 + } 119 + } 120 + await this.promise; 121 + if (this.toThrow) { 122 + throw this.toThrow; 123 + } 124 + if (this.maxSize && this.size > this.maxSize) { 125 + throw new AsyncBufferFullError(this.maxSize); 126 + } 127 + const [first, ...rest] = this.buffer; 128 + if (first) { 129 + this.buffer = rest; 130 + yield first; 131 + } else { 132 + this.resetPromise(); 133 + } 134 + } 135 + } 136 + 137 + throw(err: unknown) { 138 + this.toThrow = err; 139 + this.closed = true; 140 + this.resolve(); 141 + } 142 + 143 + close() { 144 + this.closed = true; 145 + this.resolve(); 146 + } 147 + } 148 + 149 + export class AsyncBufferFullError extends Error { 150 + constructor(maxSize: number) { 151 + super(`ReachedMaxBufferSize: ${maxSize}`); 152 + } 153 + } 154 + 155 + /** 156 + * Utility function that behaves like {@link Promise.allSettled} but returns the 157 + * same result as {@link Promise.all} in case every promise is fulfilled, and 158 + * throws an {@link AggregateError} if there are more than one errors. 159 + */ 160 + export function allFulfilled<T extends readonly unknown[] | []>( 161 + promises: T, 162 + ): Promise<{ -readonly [P in keyof T]: Awaited<T[P]> }>; 163 + export function allFulfilled<T>( 164 + promises: Iterable<T | PromiseLike<T>>, 165 + ): Promise<Awaited<T>[]>; 166 + export function allFulfilled( 167 + promises: Iterable<Promise<unknown>>, 168 + ): Promise<unknown[]> { 169 + return Promise.allSettled(promises).then(handleAllSettledErrors); 170 + } 171 + 172 + export function handleAllSettledErrors< 173 + T extends readonly PromiseSettledResult<unknown>[] | [], 174 + >( 175 + results: T, 176 + ): { 177 + -readonly [P in keyof T]: T[P] extends PromiseSettledResult<infer U> ? U 178 + : never; 179 + }; 180 + export function handleAllSettledErrors<T>( 181 + results: PromiseSettledResult<T>[], 182 + ): T[]; 183 + export function handleAllSettledErrors( 184 + results: PromiseSettledResult<unknown>[], 185 + ): unknown[] { 186 + if (results.every(isFulfilledResult)) return results.map(extractValue); 187 + 188 + const errors = results.filter(isRejectedResult).map(extractReason); 189 + throw errors; 190 + } 191 + 192 + export function isRejectedResult( 193 + result: PromiseSettledResult<unknown>, 194 + ): result is PromiseRejectedResult { 195 + return result.status === "rejected"; 196 + } 197 + 198 + function extractReason(result: PromiseRejectedResult): unknown { 199 + return result.reason; 200 + } 201 + 202 + export function isFulfilledResult<T>( 203 + result: PromiseSettledResult<T>, 204 + ): result is PromiseFulfilledResult<T> { 205 + return result.status === "fulfilled"; 206 + } 207 + 208 + function extractValue<T>(result: PromiseFulfilledResult<T>): T { 209 + return result.value; 210 + }
+29
common/check.ts
··· 1 + // Explicitly not using "zod" types here to avoid mismatching types due to 2 + // version differences. 3 + 4 + export interface Checkable<T> { 5 + parse: (obj: unknown) => T; 6 + safeParse: ( 7 + obj: unknown, 8 + ) => { success: true; data: T } | { success: false; error: Error }; 9 + } 10 + 11 + export interface Def<T> { 12 + name: string; 13 + schema: Checkable<T>; 14 + } 15 + 16 + export const is = <T>(obj: unknown, def: Checkable<T>): obj is T => { 17 + return def.safeParse(obj).success; 18 + }; 19 + 20 + export const create = <T>(def: Checkable<T>) => (v: unknown): v is T => 21 + def.safeParse(v).success; 22 + 23 + export const assure = <T>(def: Checkable<T>, obj: unknown): T => { 24 + return def.parse(obj); 25 + }; 26 + 27 + export const isObject = (obj: unknown): obj is Record<string, unknown> => { 28 + return typeof obj === "object" && obj !== null; 29 + };
+17
common/deno.json
··· 1 + { 2 + "name": "@atp/common", 3 + "version": "0.1.0-alpha.1", 4 + "exports": "./mod.ts", 5 + "license": "MIT", 6 + "imports": { 7 + "@ipld/dag-cbor": "npm:@ipld/dag-cbor@^9.2.5", 8 + "@logtape/file": "jsr:@logtape/file@^1.0.4", 9 + "@logtape/logtape": "jsr:@logtape/logtape@^1.0.4", 10 + "@std/cbor": "jsr:@std/cbor@^0.1.8", 11 + "@std/encoding": "jsr:@std/encoding@^1.0.10", 12 + "@std/fs": "jsr:@std/fs@^1.0.19", 13 + "@std/streams": "jsr:@std/streams@^1.0.12", 14 + "multiformats": "npm:multiformats@^13.4.0", 15 + "zod": "jsr:@zod/zod@^4.1.5" 16 + } 17 + }
+42
common/env.ts
··· 1 + import { parseIntWithFallback } from "./util.ts"; 2 + import process from "node:process"; 3 + 4 + // Detect runtime environment 5 + const isDeno = typeof Deno !== "undefined"; 6 + 7 + // Runtime-agnostic environment variable getter 8 + const getEnvVar = (name: string): string | undefined => { 9 + if (isDeno) { 10 + return Deno.env.get(name); 11 + } else { 12 + try { 13 + return process?.env?.[name]; 14 + } catch { 15 + return undefined; 16 + } 17 + } 18 + }; 19 + 20 + export const envInt = (name: string): number | undefined => { 21 + const str = getEnvVar(name); 22 + return parseIntWithFallback(str, undefined); 23 + }; 24 + 25 + export const envStr = (name: string): string | undefined => { 26 + const str = getEnvVar(name); 27 + if (str === undefined || str.length === 0) return undefined; 28 + return str; 29 + }; 30 + 31 + export const envBool = (name: string): boolean | undefined => { 32 + const str = getEnvVar(name); 33 + if (str === "true" || str === "1") return true; 34 + if (str === "false" || str === "0") return false; 35 + return undefined; 36 + }; 37 + 38 + export const envList = (name: string): string[] => { 39 + const str = getEnvVar(name); 40 + if (str === undefined || str.length === 0) return []; 41 + return str.split(","); 42 + };
+35
common/fs.ts
··· 1 + import { readFile } from "@std/fs/unstable-read-file"; 2 + import { remove } from "@std/fs/unstable-remove"; 3 + import { rename } from "@std/fs/unstable-rename"; 4 + 5 + export const readIfExists = async ( 6 + filepath: string, 7 + ): Promise<Uint8Array | undefined> => { 8 + try { 9 + return await readFile(filepath); 10 + } catch (err) { 11 + throw err; 12 + } 13 + }; 14 + 15 + export const rmIfExists = async ( 16 + filepath: string, 17 + recursive = false, 18 + ): Promise<void> => { 19 + try { 20 + await remove(filepath, { recursive }); 21 + } catch (err) { 22 + throw err; 23 + } 24 + }; 25 + 26 + export const renameIfExists = async ( 27 + oldPath: string, 28 + newPath: string, 29 + ): Promise<void> => { 30 + try { 31 + await rename(oldPath, newPath); 32 + } catch (err) { 33 + throw err; 34 + } 35 + };
+83
common/ipld-multi.ts
··· 1 + import { decodeCbor } from "@std/cbor"; 2 + import { CID } from "multiformats/cid"; 3 + 4 + // Custom CBOR decoder that handles CIDs and multiple values 5 + class CborMultiDecoder { 6 + private buffer: Uint8Array; 7 + private position: number = 0; 8 + 9 + constructor(encoded: Uint8Array) { 10 + this.buffer = encoded; 11 + } 12 + 13 + private decodeCid(bytes: Uint8Array): CID { 14 + if (bytes[0] !== 0) { 15 + throw new Error("Invalid CID for CBOR tag 42; expected leading 0x00"); 16 + } 17 + return CID.decode(bytes.subarray(1)); // ignore leading 0x00 18 + } 19 + 20 + private decodeValue(): unknown { 21 + // Find the next complete CBOR value 22 + const remaining = this.buffer.subarray(this.position); 23 + 24 + // Use @std/cbor to decode the next value 25 + // Note: @std/cbor doesn't have built-in support for decoding multiple values 26 + // or custom tags like cborx, so we need to handle this manually 27 + 28 + // For now, we'll decode the entire remaining buffer and handle CID tags manually 29 + const decoded = decodeCbor(remaining); 30 + 31 + // Update position to end of buffer (simplified approach) 32 + this.position = this.buffer.length; 33 + 34 + return this.processValue(decoded); 35 + } 36 + 37 + private processValue(value: unknown): unknown { 38 + // Handle CID tag 42 if present 39 + if ( 40 + value && typeof value === "object" && "tag" in value && 41 + "value" in value && 42 + (value as { tag: number }).tag === 42 43 + ) { 44 + return this.decodeCid((value as { value: Uint8Array }).value); 45 + } 46 + 47 + // Recursively process arrays and objects 48 + if (Array.isArray(value)) { 49 + return value.map((item) => this.processValue(item)); 50 + } 51 + 52 + if (value && typeof value === "object") { 53 + const result: Record<string, unknown> = {}; 54 + for (const [key, val] of Object.entries(value)) { 55 + result[key] = this.processValue(val); 56 + } 57 + return result; 58 + } 59 + 60 + return value; 61 + } 62 + 63 + decodeMultiple(): unknown[] { 64 + const decoded: unknown[] = []; 65 + 66 + // Note: This is a simplified implementation 67 + // @std/cbor doesn't have native support for decoding multiple concatenated CBOR values 68 + // A more robust implementation would need to parse the CBOR structure manually 69 + try { 70 + const value = decodeCbor(this.buffer); 71 + decoded.push(this.processValue(value)); 72 + } catch (error) { 73 + throw new Error(`Failed to decode CBOR: ${error}`); 74 + } 75 + 76 + return decoded; 77 + } 78 + } 79 + 80 + export const cborDecodeMulti = (encoded: Uint8Array): unknown[] => { 81 + const decoder = new CborMultiDecoder(encoded); 82 + return decoder.decodeMultiple(); 83 + };
+236
common/ipld.ts
··· 1 + import * as cborCodec from "@ipld/dag-cbor"; 2 + import * as mf from "multiformats"; 3 + import * as Block from "multiformats/block"; 4 + import { CID } from "multiformats/cid"; 5 + import * as rawCodec from "multiformats/codecs/raw"; 6 + import { sha256 } from "multiformats/hashes/sha2"; 7 + import { schema } from "./types.ts"; 8 + import * as check from "./check.ts"; 9 + import { crypto } from "jsr:@std/crypto"; 10 + import { concat, equals } from "jsr:@std/bytes"; 11 + 12 + export const cborEncode = cborCodec.encode; 13 + export const cborDecode = cborCodec.decode; 14 + 15 + export const dataToCborBlock = (data: unknown) => { 16 + return Block.encode({ 17 + value: data, 18 + codec: cborCodec, 19 + hasher: sha256, 20 + }); 21 + }; 22 + 23 + export const cidForCbor = async (data: unknown): Promise<CID> => { 24 + const block = await dataToCborBlock(data); 25 + return block.cid; 26 + }; 27 + 28 + export const isValidCid = (cidStr: string): boolean => { 29 + try { 30 + const parsed = CID.parse(cidStr); 31 + return parsed.toString() === cidStr; 32 + } catch { 33 + return false; 34 + } 35 + }; 36 + 37 + export const cborBytesToRecord = ( 38 + bytes: Uint8Array, 39 + ): Record<string, unknown> => { 40 + const val = cborDecode(bytes); 41 + if (!check.is(val, schema.map)) { 42 + throw new Error(`Expected object, got: ${val}`); 43 + } 44 + return val as Record<string, unknown>; 45 + }; 46 + 47 + export const verifyCidForBytes = async (cid: CID, bytes: Uint8Array) => { 48 + const digest = await sha256.digest(bytes); 49 + const expected = CID.createV1(cid.code, digest); 50 + if (!cid.equals(expected)) { 51 + throw new Error( 52 + `Not a valid CID for bytes. Expected: ${expected} Got: ${cid}`, 53 + ); 54 + } 55 + }; 56 + 57 + export const sha256ToCid = (hash: Uint8Array, codec: number): CID => { 58 + const digest = mf.digest.create(sha256.code, hash); 59 + return CID.createV1(codec, digest); 60 + }; 61 + 62 + export const sha256RawToCid = (hash: Uint8Array): CID => { 63 + return sha256ToCid(hash, rawCodec.code); 64 + }; 65 + 66 + // @NOTE: Only supports DASL CIDs 67 + // https://dasl.ing/cid.html 68 + export const parseCidFromBytes = (cidBytes: Uint8Array): CID => { 69 + const version = cidBytes[0]; 70 + if (version !== 0x01) { 71 + throw new Error(`Unsupported CID version: ${version}`); 72 + } 73 + const codec = cidBytes[1]; 74 + if (codec !== 0x55 && codec !== 0x71) { 75 + throw new Error(`Unsupported CID codec: ${codec}`); 76 + } 77 + const hashType = cidBytes[2]; 78 + if (hashType !== 0x12) { 79 + throw new Error(`Unsupported CID hash function: ${hashType}`); 80 + } 81 + const hashLength = cidBytes[3]; 82 + if (hashLength !== 32) { 83 + throw new Error(`Unexpected CID hash length: ${hashLength}`); 84 + } 85 + const rest = cidBytes.slice(4); 86 + return sha256ToCid(rest, codec); 87 + }; 88 + 89 + export class VerifyCidTransform 90 + extends TransformStream<Uint8Array, Uint8Array> { 91 + private chunks: Uint8Array[] = []; 92 + 93 + constructor(public cid: CID) { 94 + super({ 95 + transform: (chunk: Uint8Array, controller) => { 96 + this.chunks.push(chunk); 97 + controller.enqueue(chunk); 98 + }, 99 + flush: async (controller) => { 100 + try { 101 + const data = concat(this.chunks); 102 + const hash = new Uint8Array( 103 + await crypto.subtle.digest("SHA-256", data), 104 + ); 105 + const actual = sha256RawToCid(hash); 106 + if (!actual.equals(cid)) { 107 + controller.error(new VerifyCidError(cid, actual)); 108 + } 109 + } catch (err) { 110 + controller.error(asError(err)); 111 + } 112 + }, 113 + }); 114 + } 115 + } 116 + 117 + const asError = (err: unknown): Error => 118 + err instanceof Error ? err : new Error("Unexpected error", { cause: err }); 119 + 120 + export class VerifyCidError extends Error { 121 + constructor( 122 + public expected: CID, 123 + public actual: CID, 124 + ) { 125 + super("Bad cid check"); 126 + } 127 + } 128 + 129 + export type JsonValue = 130 + | boolean 131 + | number 132 + | string 133 + | null 134 + | undefined 135 + | unknown 136 + | Array<JsonValue> 137 + | { [key: string]: JsonValue }; 138 + 139 + export type IpldValue = 140 + | JsonValue 141 + | CID 142 + | Uint8Array 143 + | Array<IpldValue> 144 + | { [key: string]: IpldValue }; 145 + 146 + // @NOTE avoiding use of check.is() here only because it makes 147 + // these implementations slow, and they often live in hot paths. 148 + 149 + export const jsonToIpld = (val: JsonValue): IpldValue => { 150 + // walk arrays 151 + if (Array.isArray(val)) { 152 + return val.map((item) => jsonToIpld(item)); 153 + } 154 + // objects 155 + if (val && typeof val === "object") { 156 + const obj = val as Record<string, unknown>; 157 + // check for dag json values 158 + if (typeof obj["$link"] === "string" && Object.keys(obj).length === 1) { 159 + return CID.parse(obj["$link"]); 160 + } 161 + if (typeof obj["$bytes"] === "string" && Object.keys(obj).length === 1) { 162 + return new Uint8Array( 163 + atob(obj["$bytes"]).split("").map((c) => c.charCodeAt(0)), 164 + ); 165 + } 166 + // walk plain objects 167 + const toReturn: Record<string, IpldValue> = {}; 168 + for (const key of Object.keys(obj)) { 169 + toReturn[key] = jsonToIpld(obj[key] as JsonValue); 170 + } 171 + return toReturn; 172 + } 173 + // pass through 174 + return val; 175 + }; 176 + 177 + export const ipldToJson = (val: IpldValue): JsonValue => { 178 + // walk arrays 179 + if (Array.isArray(val)) { 180 + return val.map((item) => ipldToJson(item)); 181 + } 182 + // objects 183 + if (val && typeof val === "object") { 184 + // convert bytes 185 + if (val instanceof Uint8Array) { 186 + return { 187 + $bytes: btoa(String.fromCharCode(...val)), 188 + }; 189 + } 190 + // convert cids 191 + if (CID.asCID(val)) { 192 + return { 193 + $link: (val as CID).toString(), 194 + }; 195 + } 196 + // walk plain objects 197 + const toReturn: Record<string, JsonValue> = {}; 198 + for (const key of Object.keys(val as Record<string, unknown>)) { 199 + toReturn[key] = ipldToJson((val as Record<string, IpldValue>)[key]); 200 + } 201 + return toReturn; 202 + } 203 + // pass through 204 + return val as JsonValue; 205 + }; 206 + 207 + export const ipldEquals = (a: IpldValue, b: IpldValue): boolean => { 208 + // walk arrays 209 + if (Array.isArray(a) && Array.isArray(b)) { 210 + if (a.length !== b.length) return false; 211 + for (let i = 0; i < a.length; i++) { 212 + if (!ipldEquals(a[i], b[i])) return false; 213 + } 214 + return true; 215 + } 216 + // objects 217 + if (a && b && typeof a === "object" && typeof b === "object") { 218 + // check bytes 219 + if (a instanceof Uint8Array && b instanceof Uint8Array) { 220 + return equals(a, b); 221 + } 222 + // check cids 223 + if (CID.asCID(a) && CID.asCID(b)) { 224 + return CID.asCID(a)!.equals(CID.asCID(b)!); 225 + } 226 + // walk plain objects 227 + const objA = a as Record<string, IpldValue>; 228 + const objB = b as Record<string, IpldValue>; 229 + if (Object.keys(objA).length !== Object.keys(objB).length) return false; 230 + for (const key of Object.keys(objA)) { 231 + if (!ipldEquals(objA[key], objB[key])) return false; 232 + } 233 + return true; 234 + } 235 + return a === b; 236 + };
+83
common/logger.ts
··· 1 + import { 2 + configure, 3 + getConsoleSink, 4 + getLogger, 5 + type Logger, 6 + type LogLevel, 7 + type Sink, 8 + } from "jsr:@logtape/logtape"; 9 + import { getFileSink } from "jsr:@logtape/file"; 10 + 11 + const allSystemsEnabled = !Deno.env.get("LOG_SYSTEMS"); 12 + const enabledSystems = (Deno.env.get("LOG_SYSTEMS") || "") 13 + .replace(",", " ") 14 + .split(" ") 15 + .filter(Boolean); 16 + 17 + const enabledEnv = Deno.env.get("LOG_ENABLED"); 18 + const enabled = enabledEnv === "true" || enabledEnv === "t" || 19 + enabledEnv === "1"; 20 + 21 + const level = (Deno.env.get("LOG_LEVEL") || "info") as LogLevel; 22 + const logDestination = Deno.env.get("LOG_DESTINATION"); 23 + 24 + // Initialize LogTape configuration 25 + let configured = false; 26 + 27 + async function ensureConfigured() { 28 + if (configured || !enabled) return; 29 + 30 + const sinks: Record<string, Sink> = { 31 + console: getConsoleSink(), 32 + }; 33 + 34 + // Add file sink if LOG_DESTINATION is specified 35 + if (logDestination) { 36 + sinks.file = getFileSink(logDestination); 37 + } 38 + 39 + await configure({ 40 + sinks, 41 + loggers: [ 42 + { 43 + category: [], // Root logger 44 + lowestLevel: level, 45 + sinks: logDestination ? ["console", "file"] : ["console"], 46 + }, 47 + ], 48 + }); 49 + 50 + configured = true; 51 + } 52 + 53 + const subsystemLoggers: Record<string, Logger> = {}; 54 + 55 + export const subsystemLogger = (name: string) => { 56 + if (subsystemLoggers[name]) return subsystemLoggers[name]; 57 + 58 + const subsystemEnabled = enabled && 59 + (allSystemsEnabled || enabledSystems.includes(name)); 60 + 61 + // Ensure LogTape is configured before creating loggers 62 + ensureConfigured().catch(console.error); 63 + 64 + // Create LogTape logger for this subsystem 65 + const logger = getLogger([name]); 66 + 67 + if (!subsystemEnabled) { 68 + // Create a wrapper that no-ops all logging methods for disabled subsystems 69 + const noOpLogger: Logger = { 70 + ...logger, 71 + debug: () => {}, 72 + info: () => {}, 73 + warn: () => {}, 74 + error: () => {}, 75 + fatal: () => {}, 76 + }; 77 + subsystemLoggers[name] = noOpLogger; 78 + return subsystemLoggers[name]; 79 + } 80 + 81 + subsystemLoggers[name] = logger; 82 + return subsystemLoggers[name]; 83 + };
+14
common/mod.ts
··· 1 + export * as util from "./util.ts"; 2 + export * as check from "./check.ts"; 3 + 4 + export * from "./env.ts"; 5 + export * from "./fs.ts"; 6 + export * from "./ipld.ts"; 7 + export * from "./ipld-multi.ts"; 8 + export * from "./obfuscate.ts"; 9 + export * from "./streams.ts"; 10 + export * from "./async.ts"; 11 + export * from "./types.ts"; 12 + export * from "./tid.ts"; 13 + export * from "./strings.ts"; 14 + export * from "./logger.ts";
+91
common/obfuscate.ts
··· 1 + import { decodeBase64 } from "@std/encoding"; 2 + 3 + export function obfuscateEmail(email: string) { 4 + const [local, domain] = email.split("@"); 5 + return `${obfuscateWord(local)}@${obfuscateWord(domain)}`; 6 + } 7 + 8 + export function obfuscateWord(word: string) { 9 + return `${word.charAt(0)}***${word.charAt(word.length - 1)}`; 10 + } 11 + 12 + export function obfuscateHeaders(headers: Record<string, string>) { 13 + const obfuscatedHeaders: Record<string, string> = {}; 14 + for (const key in headers) { 15 + if (key.toLowerCase() === "authorization") { 16 + obfuscatedHeaders[key] = obfuscateAuthHeader(headers[key]); 17 + } else if (key.toLowerCase() === "dpop") { 18 + obfuscatedHeaders[key] = obfuscateJwt(headers[key]) || "Invalid"; 19 + } else { 20 + obfuscatedHeaders[key] = headers[key]; 21 + } 22 + } 23 + return obfuscatedHeaders; 24 + } 25 + 26 + export function obfuscateAuthHeader(authHeader: string): string { 27 + // This is a hot path (runs on every request). Avoid using split() or regex. 28 + 29 + const spaceIdx = authHeader.indexOf(" "); 30 + if (spaceIdx === -1) return "Invalid"; 31 + 32 + const type = authHeader.slice(0, spaceIdx); 33 + switch (type.toLowerCase()) { 34 + case "bearer": 35 + case "dpop": 36 + return `${type} ${obfuscateBearer(authHeader.slice(spaceIdx + 1))}`; 37 + case "basic": 38 + return `${type} ${ 39 + obfuscateBasic(authHeader.slice(spaceIdx + 1)) || "Invalid" 40 + }`; 41 + default: 42 + return `Invalid`; 43 + } 44 + } 45 + 46 + export function obfuscateBasic(token: string): null | string { 47 + if (!token) return null; 48 + const buffer = decodeBase64(token); 49 + if (!buffer.length) return null; 50 + const authHeader = new TextDecoder("utf-8").decode(buffer); 51 + const colIdx = authHeader.indexOf(":"); 52 + if (colIdx === -1) return null; 53 + const username = authHeader.slice(0, colIdx); 54 + return `${username}:***`; 55 + } 56 + 57 + export function obfuscateBearer(token: string): string { 58 + return obfuscateJwt(token) || obfuscateToken(token); 59 + } 60 + 61 + export function obfuscateToken(token: string): string { 62 + if (token.length >= 12) return obfuscateWord(token); 63 + return token ? "***" : ""; 64 + } 65 + 66 + export function obfuscateJwt(token: string): null | string { 67 + const firstDot = token.indexOf("."); 68 + if (firstDot === -1) return null; 69 + 70 + const secondDot = token.indexOf(".", firstDot + 1); 71 + if (secondDot === -1) return null; 72 + 73 + // Expected to be missing 74 + const thirdDot = token.indexOf(".", secondDot + 1); 75 + if (thirdDot !== -1) return null; 76 + 77 + try { 78 + const payloadEnc = token.slice(firstDot + 1, secondDot); 79 + const payloadJson = new TextDecoder("utf-8").decode( 80 + decodeBase64(payloadEnc), 81 + ); 82 + const payload = JSON.parse(payloadJson); 83 + if (typeof payload.sub === "string") return payload.sub; 84 + } catch { 85 + // Invalid JWT 86 + return null; 87 + } 88 + 89 + // Strip the signature 90 + return token.slice(0, secondDot) + ".obfuscated"; 91 + }
+216
common/streams.ts
··· 1 + import { concat } from "jsr:@std/bytes"; 2 + import { Buffer } from "jsr:@std/io"; 3 + 4 + export const forwardStreamErrors = (..._streams: ReadableStream[]) => { 5 + // Web Streams don't have the same error forwarding mechanism as Node streams 6 + // This is a no-op in the Web Streams world since error handling is done differently 7 + }; 8 + 9 + export const cloneStream = ( 10 + stream: ReadableStream<Uint8Array>, 11 + ): ReadableStream<Uint8Array> => { 12 + const [_stream1, stream2] = stream.tee(); 13 + return stream2; 14 + }; 15 + 16 + export const streamSize = async ( 17 + stream: ReadableStream<Uint8Array>, 18 + ): Promise<number> => { 19 + let size = 0; 20 + const reader = stream.getReader(); 21 + try { 22 + while (true) { 23 + const { done, value } = await reader.read(); 24 + if (done) break; 25 + size += value.byteLength; 26 + } 27 + } finally { 28 + reader.releaseLock(); 29 + } 30 + return size; 31 + }; 32 + 33 + export const streamToBytes = async ( 34 + stream: AsyncIterable<Uint8Array> | ReadableStream<Uint8Array>, 35 + ): Promise<Uint8Array> => { 36 + const chunks: Uint8Array[] = []; 37 + 38 + if (stream instanceof ReadableStream) { 39 + const reader = stream.getReader(); 40 + try { 41 + while (true) { 42 + const { done, value } = await reader.read(); 43 + if (done) break; 44 + chunks.push(value); 45 + } 46 + } finally { 47 + reader.releaseLock(); 48 + } 49 + } else { 50 + for await (const chunk of stream) { 51 + if (chunk instanceof Uint8Array) { 52 + chunks.push(chunk); 53 + } else { 54 + throw new TypeError("expected Uint8Array"); 55 + } 56 + } 57 + } 58 + 59 + return concat(chunks); 60 + }; 61 + 62 + // streamToBuffer identifier name already taken by @atproto/common-web 63 + export const streamToNodeBuffer = async ( 64 + stream: 65 + | Iterable<Uint8Array> 66 + | AsyncIterable<Uint8Array> 67 + | ReadableStream<Uint8Array>, 68 + ): Promise<Buffer> => { 69 + const bytes = await streamToBytes(stream as AsyncIterable<Uint8Array>); 70 + const buffer = new Buffer(); 71 + await buffer.write(bytes); 72 + return buffer; 73 + }; 74 + 75 + export const byteIterableToStream = ( 76 + iter: AsyncIterable<Uint8Array>, 77 + ): ReadableStream<Uint8Array> => { 78 + return new ReadableStream({ 79 + async start(controller) { 80 + try { 81 + for await (const chunk of iter) { 82 + controller.enqueue(chunk); 83 + } 84 + controller.close(); 85 + } catch (error) { 86 + controller.error(error); 87 + } 88 + }, 89 + }); 90 + }; 91 + 92 + export const bytesToStream = ( 93 + bytes: Uint8Array, 94 + ): ReadableStream<Uint8Array> => { 95 + return new ReadableStream({ 96 + start(controller) { 97 + controller.enqueue(bytes); 98 + controller.close(); 99 + }, 100 + }); 101 + }; 102 + 103 + export class MaxSizeChecker extends TransformStream<Uint8Array, Uint8Array> { 104 + totalSize = 0; 105 + 106 + constructor( 107 + public maxSize: number, 108 + public createError: () => Error, 109 + ) { 110 + super({ 111 + transform: (chunk, controller) => { 112 + this.totalSize += chunk.byteLength; 113 + if (this.totalSize > this.maxSize) { 114 + controller.error(this.createError()); 115 + } else { 116 + controller.enqueue(chunk); 117 + } 118 + }, 119 + }); 120 + } 121 + } 122 + 123 + export function decodeStream( 124 + stream: ReadableStream<Uint8Array>, 125 + contentEncoding?: string | string[], 126 + ): ReadableStream<Uint8Array>; 127 + export function decodeStream( 128 + stream: AsyncIterable<Uint8Array>, 129 + contentEncoding?: string | string[], 130 + ): AsyncIterable<Uint8Array> | ReadableStream<Uint8Array>; 131 + export function decodeStream( 132 + stream: ReadableStream<Uint8Array> | AsyncIterable<Uint8Array>, 133 + contentEncoding?: string | string[], 134 + ): ReadableStream<Uint8Array> | AsyncIterable<Uint8Array> { 135 + const decoders = createDecoders(contentEncoding); 136 + if (decoders.length === 0) return stream; 137 + 138 + let result: ReadableStream<Uint8Array>; 139 + 140 + if (stream instanceof ReadableStream) { 141 + result = stream; 142 + } else { 143 + result = byteIterableToStream(stream); 144 + } 145 + 146 + // Chain the decoders together 147 + for (const decoder of decoders) { 148 + result = result.pipeThrough(decoder); 149 + } 150 + 151 + return result; 152 + } 153 + 154 + /** 155 + * Create a series of decoding streams based on the content-encoding header. The 156 + * resulting streams should be piped together to decode the content. 157 + * 158 + * @see {@link https://datatracker.ietf.org/doc/html/rfc9110#section-8.4.1} 159 + */ 160 + export function createDecoders( 161 + contentEncoding?: string | string[], 162 + ): TransformStream<Uint8Array, Uint8Array>[] { 163 + const decoders: TransformStream<Uint8Array, Uint8Array>[] = []; 164 + 165 + if (contentEncoding?.length) { 166 + const encodings: string[] = Array.isArray(contentEncoding) 167 + ? contentEncoding.flatMap(commaSplit) 168 + : contentEncoding.split(","); 169 + for (const encoding of encodings) { 170 + const normalizedEncoding = normalizeEncoding(encoding); 171 + 172 + // @NOTE 173 + // > The default (identity) encoding [...] is used only in the 174 + // > Accept-Encoding header, and SHOULD NOT be used in the 175 + // > Content-Encoding header. 176 + if (normalizedEncoding === "identity") continue; 177 + 178 + decoders.push(createDecoder(normalizedEncoding)); 179 + } 180 + } 181 + 182 + return decoders.reverse(); 183 + } 184 + 185 + function commaSplit(header: string): string[] { 186 + return header.split(","); 187 + } 188 + 189 + function normalizeEncoding(encoding: string) { 190 + // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 191 + // > All content-coding values are case-insensitive... 192 + return encoding.trim().toLowerCase(); 193 + } 194 + 195 + function createDecoder( 196 + normalizedEncoding: string, 197 + ): TransformStream<Uint8Array, Uint8Array> { 198 + switch (normalizedEncoding) { 199 + // https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2 200 + case "gzip": 201 + case "x-gzip": 202 + return new DecompressionStream("gzip"); 203 + case "deflate": 204 + return new DecompressionStream("deflate"); 205 + case "br": 206 + throw new TypeError( 207 + `Brotli decompression is not supported in this Deno implementation`, 208 + ); 209 + case "identity": 210 + return new TransformStream(); // Pass-through 211 + default: 212 + throw new TypeError( 213 + `Unsupported content-encoding: "${normalizedEncoding}"`, 214 + ); 215 + } 216 + }
+79
common/strings.ts
··· 1 + // counts the number of bytes in a utf8 string 2 + export const utf8Len = (str: string): number => { 3 + return new TextEncoder().encode(str).byteLength; 4 + }; 5 + 6 + // counts the number of graphemes (user-displayed characters) in a string 7 + // Using Intl.Segmenter which is supported in Deno and modern browsers 8 + export const graphemeLen = (str: string): number => { 9 + if (typeof Intl !== "undefined" && "Segmenter" in Intl) { 10 + const segmenter = new Intl.Segmenter(undefined, { 11 + granularity: "grapheme", 12 + }); 13 + return Array.from(segmenter.segment(str)).length; 14 + } 15 + 16 + // Fallback for environments without Intl.Segmenter 17 + // This is a simplified approach that handles basic cases 18 + return Array.from(str).length; 19 + }; 20 + 21 + export const utf8ToB64Url = (utf8: string): string => { 22 + const encoder = new TextEncoder(); 23 + const bytes = encoder.encode(utf8); 24 + return btoa(String.fromCharCode(...bytes)) 25 + .replace(/\+/g, "-") 26 + .replace(/\//g, "_") 27 + .replace(/=/g, ""); 28 + }; 29 + 30 + export const b64UrlToUtf8 = (b64: string): string => { 31 + // Convert base64url to base64 32 + const base64 = b64.replace(/-/g, "+").replace(/_/g, "/"); 33 + // Add padding if needed 34 + const padded = base64 + "=".repeat((4 - (base64.length % 4)) % 4); 35 + 36 + const binaryString = atob(padded); 37 + const bytes = new Uint8Array(binaryString.length); 38 + for (let i = 0; i < binaryString.length; i++) { 39 + bytes[i] = binaryString.charCodeAt(i); 40 + } 41 + 42 + const decoder = new TextDecoder(); 43 + return decoder.decode(bytes); 44 + }; 45 + 46 + export const parseLanguage = (langTag: string): LanguageTag | null => { 47 + const parsed = langTag.match(bcp47Regexp); 48 + if (!parsed?.groups) return null; 49 + const parts = parsed.groups; 50 + return { 51 + grandfathered: parts.grandfathered, 52 + language: parts.language, 53 + extlang: parts.extlang, 54 + script: parts.script, 55 + region: parts.region, 56 + variant: parts.variant, 57 + extension: parts.extension, 58 + privateUse: parts.privateUseA || parts.privateUseB, 59 + }; 60 + }; 61 + 62 + export const validateLanguage = (langTag: string): boolean => { 63 + return bcp47Regexp.test(langTag); 64 + }; 65 + 66 + export type LanguageTag = { 67 + grandfathered?: string; 68 + language?: string; 69 + extlang?: string; 70 + script?: string; 71 + region?: string; 72 + variant?: string; 73 + extension?: string; 74 + privateUse?: string; 75 + }; 76 + 77 + // Validates well-formed BCP 47 syntax: https://www.rfc-editor.org/rfc/rfc5646.html#section-2.1 78 + const bcp47Regexp = 79 + /^((?<grandfathered>(en-GB-oed|i-ami|i-bnn|i-default|i-enochian|i-hak|i-klingon|i-lux|i-mingo|i-navajo|i-pwn|i-tao|i-tay|i-tsu|sgn-BE-FR|sgn-BE-NL|sgn-CH-DE)|(art-lojban|cel-gaulish|no-bok|no-nyn|zh-guoyu|zh-hakka|zh-min|zh-min-nan|zh-xiang))|((?<language>([A-Za-z]{2,3}(-(?<extlang>[A-Za-z]{3}(-[A-Za-z]{3}){0,2}))?)|[A-Za-z]{4}|[A-Za-z]{5,8})(-(?<script>[A-Za-z]{4}))?(-(?<region>[A-Za-z]{2}|[0-9]{3}))?(-(?<variant>[A-Za-z0-9]{5,8}|[0-9][A-Za-z0-9]{3}))*(-(?<extension>[0-9A-WY-Za-wy-z](-[A-Za-z0-9]{2,8})+))*(-(?<privateUseA>x(-[A-Za-z0-9]{1,8})+))?)|(?<privateUseB>x(-[A-Za-z0-9]{1,8})+))$/;
+112
common/tid.ts
··· 1 + import { s32decode, s32encode } from "./util.ts"; 2 + 3 + const TID_LEN = 13; 4 + 5 + let lastTimestamp = 0; 6 + let timestampCount = 0; 7 + let clockid: number | null = null; 8 + 9 + function dedash(str: string): string { 10 + return str.replaceAll("-", ""); 11 + } 12 + 13 + export class TID { 14 + str: string; 15 + 16 + constructor(str: string) { 17 + const noDashes = dedash(str); 18 + if (noDashes.length !== TID_LEN) { 19 + throw new Error(`Poorly formatted TID: ${noDashes.length} length`); 20 + } 21 + this.str = noDashes; 22 + } 23 + 24 + static next(prev?: TID): TID { 25 + // javascript does not have microsecond precision 26 + // instead, we append a counter to the timestamp to indicate if multiple timestamps were created within the same millisecond 27 + // take max of current time & last timestamp to prevent tids moving backwards if system clock drifts backwards 28 + const time = Math.max(Date.now(), lastTimestamp); 29 + if (time === lastTimestamp) { 30 + timestampCount++; 31 + } 32 + lastTimestamp = time; 33 + const timestamp = time * 1000 + timestampCount; 34 + // the bottom 32 clock ids can be randomized & are not guaranteed to be collision resistant 35 + // we use the same clockid for all tids coming from this machine 36 + if (clockid === null) { 37 + clockid = Math.floor(Math.random() * 32); 38 + } 39 + const tid = TID.fromTime(timestamp, clockid); 40 + if (!prev || tid.newerThan(prev)) { 41 + return tid; 42 + } 43 + return TID.fromTime(prev.timestamp() + 1, clockid); 44 + } 45 + 46 + static nextStr(prev?: string): string { 47 + return TID.next(prev ? new TID(prev) : undefined).toString(); 48 + } 49 + 50 + static fromTime(timestamp: number, clockid: number): TID { 51 + // base32 encode with encoding variant sort (s32) 52 + const str = `${s32encode(timestamp)}${s32encode(clockid).padStart(2, "2")}`; 53 + return new TID(str); 54 + } 55 + 56 + static fromStr(str: string): TID { 57 + return new TID(str); 58 + } 59 + 60 + static oldestFirst(a: TID, b: TID): number { 61 + return a.compareTo(b); 62 + } 63 + 64 + static newestFirst(a: TID, b: TID): number { 65 + return b.compareTo(a); 66 + } 67 + 68 + static is(str: string): boolean { 69 + return dedash(str).length === TID_LEN; 70 + } 71 + 72 + timestamp(): number { 73 + return s32decode(this.str.slice(0, 11)); 74 + } 75 + 76 + clockid(): number { 77 + return s32decode(this.str.slice(11, 13)); 78 + } 79 + 80 + formatted(): string { 81 + const str = this.toString(); 82 + return `${str.slice(0, 4)}-${str.slice(4, 7)}-${ 83 + str.slice( 84 + 7, 85 + 11, 86 + ) 87 + }-${str.slice(11, 13)}`; 88 + } 89 + 90 + toString(): string { 91 + return this.str; 92 + } 93 + 94 + // newer > older 95 + compareTo(other: TID): number { 96 + if (this.str > other.str) return 1; 97 + if (this.str < other.str) return -1; 98 + return 0; 99 + } 100 + 101 + equals(other: TID): boolean { 102 + return this.str === other.str; 103 + } 104 + 105 + newerThan(other: TID): boolean { 106 + return this.compareTo(other) > 0; 107 + } 108 + 109 + olderThan(other: TID): boolean { 110 + return this.compareTo(other) < 0; 111 + } 112 + }
+64
common/types.ts
··· 1 + import { CID } from "multiformats/cid"; 2 + import { z } from "zod"; 3 + import type { Def } from "./check.ts"; 4 + 5 + const cidSchema = z.unknown().transform((obj, ctx): CID => { 6 + const cid = CID.asCID(obj); 7 + 8 + if (cid == null) { 9 + ctx.addIssue({ 10 + code: z.ZodIssueCode.custom, 11 + message: "Not a valid CID", 12 + }); 13 + return z.NEVER; 14 + } 15 + 16 + return cid; 17 + }); 18 + 19 + const carHeader = z.object({ 20 + version: z.literal(1), 21 + roots: z.array(cidSchema), 22 + }); 23 + export type CarHeader = z.infer<typeof carHeader>; 24 + 25 + export const schema = { 26 + cid: cidSchema, 27 + carHeader, 28 + bytes: z.instanceof(Uint8Array), 29 + string: z.string(), 30 + array: z.array(z.unknown()), 31 + map: z.record(z.string(), z.unknown()), 32 + unknown: z.unknown(), 33 + }; 34 + 35 + export const def = { 36 + cid: { 37 + name: "cid", 38 + schema: schema.cid, 39 + } as Def<CID>, 40 + carHeader: { 41 + name: "CAR header", 42 + schema: schema.carHeader, 43 + } as Def<CarHeader>, 44 + bytes: { 45 + name: "bytes", 46 + schema: schema.bytes, 47 + } as Def<Uint8Array>, 48 + string: { 49 + name: "string", 50 + schema: schema.string, 51 + } as Def<string>, 52 + map: { 53 + name: "map", 54 + schema: schema.map, 55 + } as Def<Record<string, unknown>>, 56 + unknown: { 57 + name: "unknown", 58 + schema: schema.unknown, 59 + } as Def<unknown>, 60 + }; 61 + 62 + export type ArrayEl<A> = A extends readonly (infer T)[] ? T : never; 63 + 64 + export type NotEmptyArray<T> = [T, ...T[]];
+239
common/util.ts
··· 1 + export const noUndefinedVals = <T>( 2 + obj: Record<string, T | undefined>, 3 + ): Record<string, T> => { 4 + Object.keys(obj).forEach((k) => { 5 + if (obj[k] === undefined) { 6 + delete obj[k]; 7 + } 8 + }); 9 + return obj as Record<string, T>; 10 + }; 11 + 12 + /** 13 + * Returns a shallow copy of the object without the specified keys. If the input 14 + * is nullish, it returns the input. 15 + */ 16 + export function omit< 17 + T extends undefined | null | Record<string, unknown>, 18 + K extends keyof NonNullable<T>, 19 + >( 20 + object: T, 21 + rejectedKeys: readonly K[], 22 + ): T extends undefined ? undefined : T extends null ? null : Omit<T, K>; 23 + export function omit( 24 + src: undefined | null | Record<string, unknown>, 25 + rejectedKeys: readonly string[], 26 + ): undefined | null | Record<string, unknown> { 27 + // Hot path 28 + 29 + if (!src) return src; 30 + 31 + const dst: Record<string, unknown> = {}; 32 + const srcKeys = Object.keys(src); 33 + for (let i = 0; i < srcKeys.length; i++) { 34 + const key = srcKeys[i]; 35 + if (!rejectedKeys.includes(key)) { 36 + dst[key] = src[key]; 37 + } 38 + } 39 + return dst; 40 + } 41 + 42 + export const jitter = (maxMs: number) => { 43 + return Math.round((Math.random() - 0.5) * maxMs * 2); 44 + }; 45 + 46 + export const wait = (ms: number) => { 47 + return new Promise((res) => setTimeout(res, ms)); 48 + }; 49 + 50 + export type BailableWait = { 51 + bail: () => void; 52 + wait: () => Promise<void>; 53 + }; 54 + 55 + export const bailableWait = (ms: number): BailableWait => { 56 + let bail!: () => void; 57 + const waitPromise = new Promise<void>((res) => { 58 + const timeout = setTimeout(res, ms); 59 + bail = () => { 60 + clearTimeout(timeout); 61 + res(); 62 + }; 63 + }); 64 + return { bail, wait: () => waitPromise }; 65 + }; 66 + 67 + export const flattenUint8Arrays = (arrs: Uint8Array[]): Uint8Array => { 68 + const length = arrs.reduce((acc, cur) => { 69 + return acc + cur.length; 70 + }, 0); 71 + const flattened = new Uint8Array(length); 72 + let offset = 0; 73 + arrs.forEach((arr) => { 74 + flattened.set(arr, offset); 75 + offset += arr.length; 76 + }); 77 + return flattened; 78 + }; 79 + 80 + export const streamToBuffer = async ( 81 + stream: AsyncIterable<Uint8Array>, 82 + ): Promise<Uint8Array> => { 83 + const arrays: Uint8Array[] = []; 84 + for await (const chunk of stream) { 85 + arrays.push(chunk); 86 + } 87 + return flattenUint8Arrays(arrays); 88 + }; 89 + 90 + const S32_CHAR = "234567abcdefghijklmnopqrstuvwxyz"; 91 + 92 + export const s32encode = (i: number): string => { 93 + let s = ""; 94 + while (i) { 95 + const c = i % 32; 96 + i = Math.floor(i / 32); 97 + s = S32_CHAR.charAt(c) + s; 98 + } 99 + return s; 100 + }; 101 + 102 + export const s32decode = (s: string): number => { 103 + let i = 0; 104 + for (const c of s) { 105 + i = i * 32 + S32_CHAR.indexOf(c); 106 + } 107 + return i; 108 + }; 109 + 110 + export const asyncFilter = async <T>( 111 + arr: T[], 112 + fn: (t: T) => Promise<boolean>, 113 + ) => { 114 + const results = await Promise.all(arr.map((t) => fn(t))); 115 + return arr.filter((_, i) => results[i]); 116 + }; 117 + 118 + export const errHasMsg = (err: unknown, msg: string): boolean => { 119 + return !!err && typeof err === "object" && "message" in err && 120 + (err as { message: unknown }).message === msg; 121 + }; 122 + 123 + export const chunkArray = <T>(arr: T[], chunkSize: number): T[][] => { 124 + return arr.reduce((acc, cur, i) => { 125 + const chunkI = Math.floor(i / chunkSize); 126 + if (!acc[chunkI]) { 127 + acc[chunkI] = []; 128 + } 129 + acc[chunkI].push(cur); 130 + return acc; 131 + }, [] as T[][]); 132 + }; 133 + 134 + export const range = (num: number): number[] => { 135 + const nums: number[] = []; 136 + for (let i = 0; i < num; i++) { 137 + nums.push(i); 138 + } 139 + return nums; 140 + }; 141 + 142 + export const dedupeStrs = (strs: string[]): string[] => { 143 + return [...new Set(strs)]; 144 + }; 145 + 146 + export const parseIntWithFallback = <T>( 147 + value: string | undefined, 148 + fallback: T, 149 + ): number | T => { 150 + const parsed = parseInt(value || "", 10); 151 + return isNaN(parsed) ? fallback : parsed; 152 + }; 153 + 154 + export function ui8ToArrayBuffer(bytes: Uint8Array): ArrayBuffer { 155 + return bytes.buffer.slice( 156 + bytes.byteOffset, 157 + bytes.byteLength + bytes.byteOffset, 158 + ) as ArrayBuffer; 159 + } 160 + 161 + export function toSimplifiedISOSafe(dateStr: string) { 162 + const date = new Date(dateStr); 163 + if (isNaN(date.getTime())) { 164 + return new Date(0).toISOString(); 165 + } 166 + const iso = date.toISOString(); 167 + return iso; // YYYY-MM-DDTHH:mm:ss.sssZ 168 + } 169 + 170 + export type RetryOptions = { 171 + maxRetries?: number; 172 + getWaitMs?: (n: number) => number | null; 173 + }; 174 + 175 + export async function retry<T>( 176 + fn: () => Promise<T>, 177 + opts: RetryOptions & { 178 + retryable?: (err: unknown) => boolean; 179 + } = {}, 180 + ): Promise<T> { 181 + const { maxRetries = 3, retryable = () => true, getWaitMs = backoffMs } = 182 + opts; 183 + let retries = 0; 184 + let doneError: unknown; 185 + while (!doneError) { 186 + try { 187 + return await fn(); 188 + } catch (err) { 189 + const waitMs = getWaitMs(retries); 190 + const willRetry = retries < maxRetries && waitMs !== null && 191 + retryable(err); 192 + if (willRetry) { 193 + retries += 1; 194 + if (waitMs !== 0) { 195 + await wait(waitMs); 196 + } 197 + } else { 198 + doneError = err; 199 + } 200 + } 201 + } 202 + throw doneError; 203 + } 204 + 205 + export function createRetryable(retryable: (err: unknown) => boolean) { 206 + return <T>(fn: () => Promise<T>, opts?: RetryOptions) => 207 + retry(fn, { ...opts, retryable }); 208 + } 209 + 210 + // Waits exponential backoff with max and jitter: ~100, ~200, ~400, ~800, ~1000, ~1000, ... 211 + export function backoffMs(n: number, multiplier = 100, max = 1000) { 212 + const exponentialMs = Math.pow(2, n) * multiplier; 213 + const ms = Math.min(exponentialMs, max); 214 + return jitter(ms); 215 + } 216 + 217 + export function keyBy<T, K extends keyof T>( 218 + arr: readonly T[], 219 + key: K, 220 + ): Map<T[K], T> { 221 + return arr.reduce((acc, cur) => { 222 + acc.set(cur[key], cur); 223 + return acc; 224 + }, new Map<T[K], T>()); 225 + } 226 + 227 + export const mapDefined = <T, S>( 228 + arr: T[], 229 + fn: (obj: T) => S | undefined, 230 + ): S[] => { 231 + const output: S[] = []; 232 + for (const item of arr) { 233 + const val = fn(item); 234 + if (val !== undefined) { 235 + output.push(val); 236 + } 237 + } 238 + return output; 239 + };
+3
deno.json
··· 1 + { 2 + "workspace": ["xrpc-server", "lex-cli", "common"] 3 + }
+361
deno.lock
··· 1 + { 2 + "version": "5", 3 + "specifiers": { 4 + "jsr:@cliffy/ansi@^1.0.0-rc.8": "1.0.0-rc.8", 5 + "jsr:@cliffy/command@^1.0.0-rc.8": "1.0.0-rc.8", 6 + "jsr:@cliffy/flags@1.0.0-rc.8": "1.0.0-rc.8", 7 + "jsr:@cliffy/internal@1.0.0-rc.8": "1.0.0-rc.8", 8 + "jsr:@cliffy/table@1.0.0-rc.8": "1.0.0-rc.8", 9 + "jsr:@david/code-block-writer@13": "13.0.3", 10 + "jsr:@hono/hono@^4.7.10": "4.9.6", 11 + "jsr:@logtape/file@*": "1.0.4", 12 + "jsr:@logtape/file@^1.0.4": "1.0.4", 13 + "jsr:@logtape/logtape@*": "1.0.4", 14 + "jsr:@logtape/logtape@^1.0.4": "1.0.4", 15 + "jsr:@std/assert@^1.0.14": "1.0.14", 16 + "jsr:@std/bytes@*": "1.0.6", 17 + "jsr:@std/bytes@^1.0.2": "1.0.6", 18 + "jsr:@std/bytes@^1.0.5": "1.0.6", 19 + "jsr:@std/bytes@^1.0.6": "1.0.6", 20 + "jsr:@std/cbor@~0.1.8": "0.1.8", 21 + "jsr:@std/crypto@*": "1.0.5", 22 + "jsr:@std/encoding@^1.0.10": "1.0.10", 23 + "jsr:@std/encoding@~1.0.5": "1.0.10", 24 + "jsr:@std/fmt@~1.0.2": "1.0.8", 25 + "jsr:@std/fs@1": "1.0.19", 26 + "jsr:@std/fs@^1.0.19": "1.0.19", 27 + "jsr:@std/internal@^1.0.10": "1.0.10", 28 + "jsr:@std/internal@^1.0.9": "1.0.10", 29 + "jsr:@std/io@*": "0.224.9", 30 + "jsr:@std/io@~0.224.9": "0.224.9", 31 + "jsr:@std/path@1": "1.1.2", 32 + "jsr:@std/path@^1.1.1": "1.1.2", 33 + "jsr:@std/path@^1.1.2": "1.1.2", 34 + "jsr:@std/streams@^1.0.12": "1.0.12", 35 + "jsr:@std/streams@^1.0.9": "1.0.12", 36 + "jsr:@std/text@~1.0.7": "1.0.16", 37 + "jsr:@ts-morph/common@0.27": "0.27.0", 38 + "jsr:@ts-morph/ts-morph@26": "26.0.0", 39 + "jsr:@zod/zod@^4.0.17": "4.1.5", 40 + "jsr:@zod/zod@^4.1.5": "4.1.5", 41 + "npm:@atproto/crypto@~0.4.4": "0.4.4", 42 + "npm:@atproto/lexicon@~0.4.11": "0.4.14", 43 + "npm:@atproto/lexicon@~0.4.14": "0.4.14", 44 + "npm:@atproto/syntax@~0.4.1": "0.4.1", 45 + "npm:@atproto/xrpc@0.7": "0.7.4", 46 + "npm:@ipld/dag-cbor@^9.2.5": "9.2.5", 47 + "npm:@types/node@*": "24.2.0", 48 + "npm:http-errors@2": "2.0.0", 49 + "npm:jose@*": "6.1.0", 50 + "npm:multiformats@^13.3.6": "13.4.0", 51 + "npm:multiformats@^13.4.0": "13.4.0", 52 + "npm:rate-limiter-flexible@^2.4.1": "2.4.2", 53 + "npm:uint8arrays@3.0.0": "3.0.0", 54 + "npm:ws@^8.12.0": "8.18.3" 55 + }, 56 + "jsr": { 57 + "@cliffy/ansi@1.0.0-rc.8": { 58 + "integrity": "ba37f10ce55bbfbdd8ddd987f91f029b17bce88385b98ba3058870f3b007b80c", 59 + "dependencies": [ 60 + "jsr:@cliffy/internal", 61 + "jsr:@std/encoding@~1.0.5", 62 + "jsr:@std/fmt", 63 + "jsr:@std/io@~0.224.9" 64 + ] 65 + }, 66 + "@cliffy/command@1.0.0-rc.8": { 67 + "integrity": "758147790797c74a707e5294cc7285df665422a13d2a483437092ffce40b5557", 68 + "dependencies": [ 69 + "jsr:@cliffy/flags", 70 + "jsr:@cliffy/internal", 71 + "jsr:@cliffy/table", 72 + "jsr:@std/fmt", 73 + "jsr:@std/text" 74 + ] 75 + }, 76 + "@cliffy/flags@1.0.0-rc.8": { 77 + "integrity": "0f1043ce6ef037ba1cb5fe6b1bcecb25dc2f29371a1c17f278ab0f45e4b6f46c", 78 + "dependencies": [ 79 + "jsr:@std/text" 80 + ] 81 + }, 82 + "@cliffy/internal@1.0.0-rc.8": { 83 + "integrity": "34cdf2fad9b084b5aed493b138d573f52d4e988767215f7460daf0b918ff43d8" 84 + }, 85 + "@cliffy/table@1.0.0-rc.8": { 86 + "integrity": "8bbcdc2ba5e0061b4b13810a24e6f5c6ab19c09f0cce9eb691ccd76c7c6c9db5", 87 + "dependencies": [ 88 + "jsr:@std/fmt" 89 + ] 90 + }, 91 + "@david/code-block-writer@13.0.3": { 92 + "integrity": "f98c77d320f5957899a61bfb7a9bead7c6d83ad1515daee92dbacc861e13bb7f" 93 + }, 94 + "@hono/hono@4.9.6": { 95 + "integrity": "b85abb0013d167a290b1808d1d4d542dee269df31d4f47122023259fdd7e184b" 96 + }, 97 + "@logtape/file@1.0.4": { 98 + "integrity": "80f49feb4826fa748ae3ebd1eaf2fa7f23f1820c84d5402a1f7de76f529464e4", 99 + "dependencies": [ 100 + "jsr:@logtape/logtape@^1.0.4" 101 + ] 102 + }, 103 + "@logtape/logtape@1.0.4": { 104 + "integrity": "6ada87764d995b1033c352a17fd9e20b217f3672083bc2d8debe356eac03fe10" 105 + }, 106 + "@std/assert@1.0.14": { 107 + "integrity": "68d0d4a43b365abc927f45a9b85c639ea18a9fab96ad92281e493e4ed84abaa4", 108 + "dependencies": [ 109 + "jsr:@std/internal@^1.0.10" 110 + ] 111 + }, 112 + "@std/bytes@1.0.6": { 113 + "integrity": "f6ac6adbd8ccd99314045f5703e23af0a68d7f7e58364b47d2c7f408aeb5820a" 114 + }, 115 + "@std/cbor@0.1.8": { 116 + "integrity": "a0d1c520f8963358cc96defd8cbd1f9e81e40adc2bbfb301f122150f2024d93e", 117 + "dependencies": [ 118 + "jsr:@std/bytes@^1.0.5", 119 + "jsr:@std/streams@^1.0.9" 120 + ] 121 + }, 122 + "@std/crypto@1.0.5": { 123 + "integrity": "0dcfbb319fe0bba1bd3af904ceb4f948cde1b92979ec1614528380ed308a3b40" 124 + }, 125 + "@std/encoding@1.0.10": { 126 + "integrity": "8783c6384a2d13abd5e9e87a7ae0520a30e9f56aeeaa3bdf910a3eaaf5c811a1" 127 + }, 128 + "@std/fmt@1.0.8": { 129 + "integrity": "71e1fc498787e4434d213647a6e43e794af4fd393ef8f52062246e06f7e372b7" 130 + }, 131 + "@std/fs@1.0.19": { 132 + "integrity": "051968c2b1eae4d2ea9f79a08a3845740ef6af10356aff43d3e2ef11ed09fb06", 133 + "dependencies": [ 134 + "jsr:@std/internal@^1.0.9", 135 + "jsr:@std/path@^1.1.1" 136 + ] 137 + }, 138 + "@std/internal@1.0.10": { 139 + "integrity": "e3be62ce42cab0e177c27698e5d9800122f67b766a0bea6ca4867886cbde8cf7" 140 + }, 141 + "@std/io@0.224.9": { 142 + "integrity": "4414664b6926f665102e73c969cfda06d2c4c59bd5d0c603fd4f1b1c840d6ee3", 143 + "dependencies": [ 144 + "jsr:@std/bytes@^1.0.2" 145 + ] 146 + }, 147 + "@std/path@1.1.2": { 148 + "integrity": "c0b13b97dfe06546d5e16bf3966b1cadf92e1cc83e56ba5476ad8b498d9e3038", 149 + "dependencies": [ 150 + "jsr:@std/internal@^1.0.10" 151 + ] 152 + }, 153 + "@std/streams@1.0.10": { 154 + "integrity": "75c0b1431873cd0d8b3d679015220204d36d3c7420d93b60acfc379eb0dc30af" 155 + }, 156 + "@std/streams@1.0.12": { 157 + "integrity": "ae925fa1dc459b1abf5cbaa28cc5c7b0485853af3b2a384b0dc22d86e59dfbf4", 158 + "dependencies": [ 159 + "jsr:@std/bytes@^1.0.6" 160 + ] 161 + }, 162 + "@std/text@1.0.16": { 163 + "integrity": "ddb9853b75119a2473857d691cf1ec02ad90793a2e8b4a4ac49d7354281a0cf8" 164 + }, 165 + "@ts-morph/common@0.27.0": { 166 + "integrity": "c7b73592d78ce8479b356fd4f3d6ec3c460d77753a8680ff196effea7a939052", 167 + "dependencies": [ 168 + "jsr:@std/fs@1", 169 + "jsr:@std/path@1" 170 + ] 171 + }, 172 + "@ts-morph/ts-morph@26.0.0": { 173 + "integrity": "f2b1ca67b4d1a6332d00c00dd48496b20879c899a702c1b92bcce1c552a168df", 174 + "dependencies": [ 175 + "jsr:@david/code-block-writer", 176 + "jsr:@ts-morph/common" 177 + ] 178 + }, 179 + "@zod/zod@4.1.5": { 180 + "integrity": "e995ca7d588a835ce333de626c940e242c55b6763c5190e8cbb9fefb7d0fb4ef" 181 + } 182 + }, 183 + "npm": { 184 + "@atproto/common-web@0.4.2": { 185 + "integrity": "sha512-vrXwGNoFGogodjQvJDxAeP3QbGtawgZute2ed1XdRO0wMixLk3qewtikZm06H259QDJVu6voKC5mubml+WgQUw==", 186 + "dependencies": [ 187 + "graphemer", 188 + "multiformats@9.9.0", 189 + "uint8arrays", 190 + "zod" 191 + ] 192 + }, 193 + "@atproto/crypto@0.4.4": { 194 + "integrity": "sha512-Yq9+crJ7WQl7sxStVpHgie5Z51R05etaK9DLWYG/7bR5T4bhdcIgF6IfklLShtZwLYdVVj+K15s0BqW9a8PSDA==", 195 + "dependencies": [ 196 + "@noble/curves", 197 + "@noble/hashes", 198 + "uint8arrays" 199 + ] 200 + }, 201 + "@atproto/lexicon@0.4.14": { 202 + "integrity": "sha512-jiKpmH1QER3Gvc7JVY5brwrfo+etFoe57tKPQX/SmPwjvUsFnJAow5xLIryuBaJgFAhnTZViXKs41t//pahGHQ==", 203 + "dependencies": [ 204 + "@atproto/common-web", 205 + "@atproto/syntax", 206 + "iso-datestring-validator", 207 + "multiformats@9.9.0", 208 + "zod" 209 + ] 210 + }, 211 + "@atproto/lexicon@0.5.0": { 212 + "integrity": "sha512-3aAzEAy9EAPs3CxznzMhEcqDd7m3vz1eze/ya9/ThbB7yleqJIhz5GY2q76tCCwHPhn5qDDMhlA9kKV6fG23gA==", 213 + "dependencies": [ 214 + "@atproto/common-web", 215 + "@atproto/syntax", 216 + "iso-datestring-validator", 217 + "multiformats@9.9.0", 218 + "zod" 219 + ] 220 + }, 221 + "@atproto/syntax@0.4.1": { 222 + "integrity": "sha512-CJdImtLAiFO+0z3BWTtxwk6aY5w4t8orHTMVJgkf++QRJWTxPbIFko/0hrkADB7n2EruDxDSeAgfUGehpH6ngw==" 223 + }, 224 + "@atproto/xrpc@0.7.4": { 225 + "integrity": "sha512-sDi68+QE1XHegTaNAndlX41Gp827pouSzSs8CyAwhrqZdsJUxE3P7TMtrA0z+zAjvxVyvzscRc0TsN/fGUGrhw==", 226 + "dependencies": [ 227 + "@atproto/lexicon@0.5.0", 228 + "zod" 229 + ] 230 + }, 231 + "@ipld/dag-cbor@9.2.5": { 232 + "integrity": "sha512-84wSr4jv30biui7endhobYhXBQzQE4c/wdoWlFrKcfiwH+ofaPg8fwsM8okX9cOzkkrsAsNdDyH3ou+kiLquwQ==", 233 + "dependencies": [ 234 + "cborg", 235 + "multiformats@13.4.0" 236 + ] 237 + }, 238 + "@noble/curves@1.9.7": { 239 + "integrity": "sha512-gbKGcRUYIjA3/zCCNaWDciTMFI0dCkvou3TL8Zmy5Nc7sJ47a0jtOeZoTaMxkuqRo9cRhjOdZJXegxYE5FN/xw==", 240 + "dependencies": [ 241 + "@noble/hashes" 242 + ] 243 + }, 244 + "@noble/hashes@1.8.0": { 245 + "integrity": "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==" 246 + }, 247 + "@types/node@24.2.0": { 248 + "integrity": "sha512-3xyG3pMCq3oYCNg7/ZP+E1ooTaGB4cG8JWRsqqOYQdbWNY4zbaV0Ennrd7stjiJEFZCaybcIgpTjJWHRfBSIDw==", 249 + "dependencies": [ 250 + "undici-types" 251 + ] 252 + }, 253 + "cborg@4.2.15": { 254 + "integrity": "sha512-T+YVPemWyXcBVQdp0k61lQp2hJniRNmul0lAwTj2DTS/6dI4eCq/MRMucGqqvFqMBfmnD8tJ9aFtPu5dEGAbgw==", 255 + "bin": true 256 + }, 257 + "depd@2.0.0": { 258 + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" 259 + }, 260 + "graphemer@1.4.0": { 261 + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==" 262 + }, 263 + "http-errors@2.0.0": { 264 + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", 265 + "dependencies": [ 266 + "depd", 267 + "inherits", 268 + "setprototypeof", 269 + "statuses", 270 + "toidentifier" 271 + ] 272 + }, 273 + "inherits@2.0.4": { 274 + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" 275 + }, 276 + "iso-datestring-validator@2.2.2": { 277 + "integrity": "sha512-yLEMkBbLZTlVQqOnQ4FiMujR6T4DEcCb1xizmvXS+OxuhwcbtynoosRzdMA69zZCShCNAbi+gJ71FxZBBXx1SA==" 278 + }, 279 + "jose@6.1.0": { 280 + "integrity": "sha512-TTQJyoEoKcC1lscpVDCSsVgYzUDg/0Bt3WE//WiTPK6uOCQC2KZS4MpugbMWt/zyjkopgZoXhZuCi00gLudfUA==" 281 + }, 282 + "multiformats@13.4.0": { 283 + "integrity": "sha512-Mkb/QcclrJxKC+vrcIFl297h52QcKh2Az/9A5vbWytbQt4225UWWWmIuSsKksdww9NkIeYcA7DkfftyLuC/JSg==" 284 + }, 285 + "multiformats@9.9.0": { 286 + "integrity": "sha512-HoMUjhH9T8DDBNT+6xzkrd9ga/XiBI4xLr58LJACwK6G3HTOPeMz4nB4KJs33L2BelrIJa7P0VuNaVF3hMYfjg==" 287 + }, 288 + "rate-limiter-flexible@2.4.2": { 289 + "integrity": "sha512-rMATGGOdO1suFyf/mI5LYhts71g1sbdhmd6YvdiXO2gJnd42Tt6QS4JUKJKSWVVkMtBacm6l40FR7Trjo6Iruw==" 290 + }, 291 + "setprototypeof@1.2.0": { 292 + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" 293 + }, 294 + "statuses@2.0.1": { 295 + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==" 296 + }, 297 + "toidentifier@1.0.1": { 298 + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==" 299 + }, 300 + "uint8arrays@3.0.0": { 301 + "integrity": "sha512-HRCx0q6O9Bfbp+HHSfQQKD7wU70+lydKVt4EghkdOvlK/NlrF90z+eXV34mUd48rNvVJXwkrMSPpCATkct8fJA==", 302 + "dependencies": [ 303 + "multiformats@9.9.0" 304 + ] 305 + }, 306 + "undici-types@7.10.0": { 307 + "integrity": "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==" 308 + }, 309 + "ws@8.18.3": { 310 + "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==" 311 + }, 312 + "zod@3.25.76": { 313 + "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==" 314 + } 315 + }, 316 + "workspace": { 317 + "members": { 318 + "common": { 319 + "dependencies": [ 320 + "jsr:@logtape/file@^1.0.4", 321 + "jsr:@logtape/logtape@^1.0.4", 322 + "jsr:@std/cbor@~0.1.8", 323 + "jsr:@std/encoding@^1.0.10", 324 + "jsr:@std/fs@^1.0.19", 325 + "jsr:@std/streams@^1.0.12", 326 + "jsr:@zod/zod@^4.1.5", 327 + "npm:@ipld/dag-cbor@^9.2.5", 328 + "npm:multiformats@^13.4.0" 329 + ] 330 + }, 331 + "lex-cli": { 332 + "dependencies": [ 333 + "jsr:@cliffy/ansi@^1.0.0-rc.8", 334 + "jsr:@cliffy/command@^1.0.0-rc.8", 335 + "jsr:@std/fs@^1.0.19", 336 + "jsr:@std/path@^1.1.2", 337 + "jsr:@ts-morph/ts-morph@26", 338 + "jsr:@zod/zod@^4.1.5", 339 + "npm:@atproto/lexicon@~0.4.14", 340 + "npm:@atproto/syntax@~0.4.1" 341 + ] 342 + }, 343 + "xrpc-server": { 344 + "dependencies": [ 345 + "jsr:@hono/hono@^4.7.10", 346 + "jsr:@std/assert@^1.0.14", 347 + "jsr:@std/encoding@^1.0.10", 348 + "jsr:@zod/zod@^4.0.17", 349 + "npm:@atproto/crypto@~0.4.4", 350 + "npm:@atproto/lexicon@~0.4.11", 351 + "npm:@atproto/xrpc@0.7", 352 + "npm:http-errors@2", 353 + "npm:multiformats@^13.3.6", 354 + "npm:rate-limiter-flexible@^2.4.1", 355 + "npm:uint8arrays@3.0.0", 356 + "npm:ws@^8.12.0" 357 + ] 358 + } 359 + } 360 + } 361 + }
lex-cli/.DS_Store

This is a binary file and will not be displayed.

+34
lex-cli/cmd/gen-api.ts
··· 1 + import { Command } from "@cliffy/command"; 2 + import { 3 + applyFileDiff, 4 + genFileDiff, 5 + printFileDiff, 6 + readAllLexicons, 7 + } from "../util.ts"; 8 + import { genClientApi } from "../codegen/client.ts"; 9 + import { formatGeneratedFiles } from "../codegen/util.ts"; 10 + 11 + const command = new Command() 12 + .command("gen-api") 13 + .description("Generate a TS client API") 14 + .option("--js", "use .js extension for imports instead of .ts") 15 + .option("-o, --outdir <outdir>", "dir path to write to", { required: true }) 16 + .option("-i, --input <input...>", "paths of lexicon files to include", { 17 + required: true, 18 + }) 19 + .action( 20 + async ({ outdir, input, js }) => { 21 + const lexicons = readAllLexicons(input); 22 + const api = await genClientApi(lexicons, { 23 + useJsExtension: js, 24 + }); 25 + const diff = genFileDiff(outdir, api); 26 + console.log("This will write the following files:"); 27 + printFileDiff(diff); 28 + applyFileDiff(diff); 29 + await formatGeneratedFiles(outdir); 30 + console.log("API generated."); 31 + }, 32 + ); 33 + 34 + export default command;
+22
lex-cli/cmd/gen-md.ts
··· 1 + import { Command } from "@cliffy/command"; 2 + import { readAllLexicons } from "../util.ts"; 3 + import * as mdGen from "../mdgen/index.ts"; 4 + 5 + const command = new Command() 6 + .description("Generate markdown documentation") 7 + .option("-o, --output <outfile>", "Output file path", { required: true }) 8 + .option("-i, --input <infile>", "Input file path", { required: true }) 9 + .action( 10 + async ({ output, input }) => { 11 + if (!output.endsWith(".md")) { 12 + console.error( 13 + "Must supply the path to a .md file as the first parameter", 14 + ); 15 + Deno.exit(1); 16 + } 17 + const lexicons = readAllLexicons(input); 18 + await mdGen.process(output, lexicons); 19 + }, 20 + ); 21 + 22 + export default command;
+34
lex-cli/cmd/gen-server.ts
··· 1 + import { Command } from "@cliffy/command"; 2 + import { 3 + applyFileDiff, 4 + genFileDiff, 5 + printFileDiff, 6 + readAllLexicons, 7 + } from "../util.ts"; 8 + import { formatGeneratedFiles } from "../codegen/util.ts"; 9 + import { genServerApi } from "../codegen/server.ts"; 10 + 11 + const command = new Command() 12 + .command("gen-server") 13 + .description("Generate a TS server API") 14 + .option("--js", "use .js extension for imports instead of .ts") 15 + .option("-o, --outdir <outdir>", "dir path to write to", { required: true }) 16 + .option("-i, --input <input...>", "paths of lexicon files to include", { 17 + required: true, 18 + }) 19 + .action( 20 + async ({ outdir, input, js }) => { 21 + const lexicons = readAllLexicons(input); 22 + const api = await genServerApi(lexicons, { 23 + useJsExtension: js, 24 + }); 25 + const diff = genFileDiff(outdir, api); 26 + console.log("This will write the following files:"); 27 + printFileDiff(diff); 28 + applyFileDiff(diff); 29 + await formatGeneratedFiles(outdir); 30 + console.log("API generated."); 31 + }, 32 + ); 33 + 34 + export default command;
+14
lex-cli/cmd/gen-ts-obj.ts
··· 1 + import { Command } from "@cliffy/command"; 2 + import { genTsObj, readAllLexicons } from "../util.ts"; 3 + 4 + const command = new Command() 5 + .description("Generate a TS file that exports an array of lexicons") 6 + .option("-i, --input <lexicons>", "paths of the lexicon files to include", { 7 + required: true, 8 + }) 9 + .action(({ input }) => { 10 + const lexicons = readAllLexicons(input); 11 + console.log(genTsObj(lexicons)); 12 + }); 13 + 14 + export default command;
+6
lex-cli/cmd/index.ts
··· 1 + import genMd from "./gen-md.ts"; 2 + import genApi from "./gen-api.ts"; 3 + import genServer from "./gen-server.ts"; 4 + import genTsObj from "./gen-ts-obj.ts"; 5 + 6 + export { genApi, genMd, genServer, genTsObj };
+612
lex-cli/codegen/client.ts
··· 1 + import { 2 + IndentationText, 3 + Project, 4 + type SourceFile, 5 + VariableDeclarationKind, 6 + } from "ts-morph"; 7 + import { type LexiconDoc, Lexicons, type LexRecord } from "@atproto/lexicon"; 8 + import { NSID } from "@atproto/syntax"; 9 + import type { GeneratedAPI } from "../types.ts"; 10 + import { gen, lexiconsTs, utilTs } from "./common.ts"; 11 + import { 12 + genCommonImports, 13 + genImports, 14 + genRecord, 15 + genUserType, 16 + genXrpcInput, 17 + genXrpcOutput, 18 + genXrpcParams, 19 + } from "./lex-gen.ts"; 20 + import { 21 + type CodeGenOptions, 22 + type DefTreeNode, 23 + lexiconsToDefTree, 24 + schemasToNsidTokens, 25 + toCamelCase, 26 + toScreamingSnakeCase, 27 + toTitleCase, 28 + } from "./util.ts"; 29 + 30 + const ATP_METHODS = { 31 + list: "com.atproto.repo.listRecords", 32 + get: "com.atproto.repo.getRecord", 33 + create: "com.atproto.repo.createRecord", 34 + put: "com.atproto.repo.putRecord", 35 + delete: "com.atproto.repo.deleteRecord", 36 + }; 37 + 38 + export async function genClientApi( 39 + lexiconDocs: LexiconDoc[], 40 + options?: CodeGenOptions, 41 + ): Promise<GeneratedAPI> { 42 + const project = new Project({ 43 + useInMemoryFileSystem: true, 44 + manipulationSettings: { indentationText: IndentationText.TwoSpaces }, 45 + }); 46 + const api: GeneratedAPI = { files: [] }; 47 + const lexicons = new Lexicons(lexiconDocs); 48 + const nsidTree = lexiconsToDefTree(lexiconDocs); 49 + const nsidTokens = schemasToNsidTokens(lexiconDocs); 50 + for (const lexiconDoc of lexiconDocs) { 51 + api.files.push(await lexiconTs(project, lexicons, lexiconDoc)); 52 + } 53 + api.files.push(await utilTs(project)); 54 + api.files.push(await lexiconsTs(project, lexiconDocs, options)); 55 + api.files.push( 56 + await indexTs(project, lexiconDocs, nsidTree, nsidTokens, options), 57 + ); 58 + return api; 59 + } 60 + 61 + const indexTs = ( 62 + project: Project, 63 + lexiconDocs: LexiconDoc[], 64 + nsidTree: DefTreeNode[], 65 + nsidTokens: Record<string, string[]>, 66 + options?: CodeGenOptions, 67 + ) => 68 + gen(project, "/index.ts", (file) => { 69 + const extension = options?.useJsExtension ? ".js" : ".ts"; 70 + //= import { XrpcClient, type FetchHandler, type FetchHandlerOptions } from '@atproto/xrpc' 71 + const xrpcImport = file.addImportDeclaration({ 72 + moduleSpecifier: "@atproto/xrpc", 73 + }); 74 + xrpcImport.addNamedImports([ 75 + { name: "XrpcClient" }, 76 + { name: "FetchHandler", isTypeOnly: true }, 77 + { name: "FetchHandlerOptions", isTypeOnly: true }, 78 + ]); 79 + //= import {schemas} from './lexicons.ts' 80 + file 81 + .addImportDeclaration({ moduleSpecifier: `./lexicons${extension}` }) 82 + .addNamedImports([{ name: "schemas" }]); 83 + 84 + // Check if any lexicon docs use cid-link types 85 + const needsCID = lexiconDocs.some((lexiconDoc) => 86 + Object.values(lexiconDoc.defs).some((def) => 87 + def.type === "cid-link" || 88 + (def.type === "object" && 89 + Object.values(def.properties || {}).some((prop) => 90 + "type" in prop && prop.type === "cid-link" 91 + )) || 92 + (def.type === "array" && def.items.type === "cid-link") || 93 + (def.type === "record" && 94 + Object.values(def.record.properties || {}).some((prop) => 95 + "type" in prop && (prop.type === "cid-link" || 96 + (prop.type === "array" && "items" in prop && 97 + prop.items.type === "cid-link")) 98 + )) 99 + ) 100 + ); 101 + 102 + //= import {CID} from 'multiformats/cid' 103 + if (needsCID) { 104 + file 105 + .addImportDeclaration({ 106 + moduleSpecifier: "multiformats/cid", 107 + }) 108 + .addNamedImports([{ name: "CID" }]); 109 + } 110 + 111 + //= import { type OmitKey, type Un$Typed } from './util.ts' 112 + file 113 + .addImportDeclaration({ moduleSpecifier: `./util${extension}` }) 114 + .addNamedImports([ 115 + { name: "OmitKey", isTypeOnly: true }, 116 + { name: "Un$Typed", isTypeOnly: true }, 117 + ]); 118 + 119 + // generate type imports and re-exports 120 + for (const lexicon of lexiconDocs) { 121 + const moduleSpecifier = `./types/${ 122 + lexicon.id.split(".").join("/") 123 + }${extension}`; 124 + file 125 + .addImportDeclaration({ moduleSpecifier }) 126 + .setNamespaceImport(toTitleCase(lexicon.id)); 127 + file 128 + .addExportDeclaration({ moduleSpecifier }) 129 + .setNamespaceExport(toTitleCase(lexicon.id)); 130 + } 131 + 132 + // generate token enums 133 + for (const nsidAuthority in nsidTokens) { 134 + // export const {THE_AUTHORITY} = { 135 + // {Name}: "{authority.the.name}" 136 + // } 137 + file.addVariableStatement({ 138 + isExported: true, 139 + declarationKind: VariableDeclarationKind.Const, 140 + declarations: [ 141 + { 142 + name: toScreamingSnakeCase(nsidAuthority), 143 + initializer: [ 144 + "{", 145 + ...nsidTokens[nsidAuthority].map( 146 + (nsidName) => 147 + `${toTitleCase(nsidName)}: "${nsidAuthority}.${nsidName}",`, 148 + ), 149 + "}", 150 + ].join("\n"), 151 + }, 152 + ], 153 + }); 154 + } 155 + 156 + //= export class AtpBaseClient {...} 157 + const clientCls = file.addClass({ 158 + name: "AtpBaseClient", 159 + isExported: true, 160 + extends: "XrpcClient", 161 + }); 162 + 163 + for (const ns of nsidTree) { 164 + //= ns: NS 165 + clientCls.addProperty({ 166 + name: ns.propName, 167 + type: ns.className, 168 + }); 169 + } 170 + 171 + //= constructor (options: FetchHandler | FetchHandlerOptions) { 172 + //= super(options, schemas) 173 + //= {namespace declarations} 174 + //= } 175 + clientCls.addConstructor({ 176 + parameters: [ 177 + { name: "options", type: "FetchHandler | FetchHandlerOptions" }, 178 + ], 179 + statements: [ 180 + "super(options, schemas)", 181 + ...nsidTree.map( 182 + (ns) => `this.${ns.propName} = new ${ns.className}(this)`, 183 + ), 184 + ], 185 + }); 186 + 187 + //= /** @deprecated use `this` instead */ 188 + //= get xrpc(): XrpcClient { 189 + //= return this 190 + //= } 191 + clientCls 192 + .addGetAccessor({ 193 + name: "xrpc", 194 + returnType: "XrpcClient", 195 + statements: ["return this"], 196 + }) 197 + .addJsDoc("@deprecated use `this` instead"); 198 + 199 + // generate classes for the schemas 200 + for (const ns of nsidTree) { 201 + genNamespaceCls(file, ns); 202 + } 203 + }); 204 + 205 + function genNamespaceCls(file: SourceFile, ns: DefTreeNode) { 206 + //= export class {ns}NS {...} 207 + const cls = file.addClass({ 208 + name: ns.className, 209 + isExported: true, 210 + }); 211 + //= _client: XrpcClient 212 + cls.addProperty({ 213 + name: "_client", 214 + type: "XrpcClient", 215 + }); 216 + 217 + for (const userType of ns.userTypes) { 218 + if (userType.def.type !== "record") { 219 + continue; 220 + } 221 + //= type: TypeRecord 222 + const name = NSID.parse(userType.nsid).name || ""; 223 + cls.addProperty({ 224 + name: toCamelCase(name), 225 + type: `${toTitleCase(userType.nsid)}Record`, 226 + }); 227 + } 228 + 229 + for (const child of ns.children) { 230 + //= child: ChildNS 231 + cls.addProperty({ 232 + name: child.propName, 233 + type: child.className, 234 + }); 235 + 236 + // recurse 237 + genNamespaceCls(file, child); 238 + } 239 + 240 + //= constructor(public client: XrpcClient) { 241 + //= this._client = client 242 + //= {child namespace prop declarations} 243 + //= {record prop declarations} 244 + //= } 245 + cls.addConstructor({ 246 + parameters: [ 247 + { 248 + name: "client", 249 + type: "XrpcClient", 250 + }, 251 + ], 252 + statements: [ 253 + `this._client = client`, 254 + ...ns.children.map( 255 + (ns) => `this.${ns.propName} = new ${ns.className}(client)`, 256 + ), 257 + ...ns.userTypes 258 + .filter((ut) => ut.def.type === "record") 259 + .map((ut) => { 260 + const name = NSID.parse(ut.nsid).name || ""; 261 + return `this.${toCamelCase(name)} = new ${ 262 + toTitleCase( 263 + ut.nsid, 264 + ) 265 + }Record(client)`; 266 + }), 267 + ], 268 + }); 269 + 270 + // methods 271 + for (const userType of ns.userTypes) { 272 + if (userType.def.type !== "query" && userType.def.type !== "procedure") { 273 + continue; 274 + } 275 + const isGetReq = userType.def.type === "query"; 276 + const moduleName = toTitleCase(userType.nsid); 277 + const name = toCamelCase(NSID.parse(userType.nsid).name || ""); 278 + const method = cls.addMethod({ 279 + name, 280 + returnType: `Promise<${moduleName}.Response>`, 281 + }); 282 + if (isGetReq) { 283 + method.addParameter({ 284 + name: "params?", 285 + type: `${moduleName}.QueryParams`, 286 + }); 287 + } else if (userType.def.type === "procedure") { 288 + method.addParameter({ 289 + name: "data?", 290 + type: `${moduleName}.InputSchema`, 291 + }); 292 + } 293 + method.addParameter({ 294 + name: "opts?", 295 + type: `${moduleName}.CallOptions`, 296 + }); 297 + method.setBodyText( 298 + [ 299 + `return this._client`, 300 + isGetReq 301 + ? `.call('${userType.nsid}', params, undefined, opts)` 302 + : `.call('${userType.nsid}', opts?.qp, data, opts)`, 303 + userType.def.errors?.length 304 + // Only add a catch block if there are custom errors 305 + ? ` .catch((e) => { throw ${moduleName}.toKnownErr(e) })` 306 + : "", 307 + ].join("\n"), 308 + ); 309 + } 310 + 311 + // record api classes 312 + for (const userType of ns.userTypes) { 313 + if (userType.def.type !== "record") { 314 + continue; 315 + } 316 + genRecordCls(file, userType.nsid, userType.def); 317 + } 318 + } 319 + 320 + function genRecordCls(file: SourceFile, nsid: string, lexRecord: LexRecord) { 321 + //= export class {type}Record {...} 322 + const cls = file.addClass({ 323 + name: `${toTitleCase(nsid)}Record`, 324 + isExported: true, 325 + }); 326 + //= _client: XrpcClient 327 + cls.addProperty({ 328 + name: "_client", 329 + type: "XrpcClient", 330 + }); 331 + 332 + //= constructor(client: XrpcClient) { 333 + //= this._client = client 334 + //= } 335 + const cons = cls.addConstructor(); 336 + cons.addParameter({ 337 + name: "client", 338 + type: "XrpcClient", 339 + }); 340 + cons.setBodyText(`this._client = client`); 341 + 342 + // methods 343 + const typeModule = toTitleCase(nsid); 344 + { 345 + //= list() 346 + const method = cls.addMethod({ 347 + isAsync: true, 348 + name: "list", 349 + returnType: 350 + `Promise<{cursor?: string, records: ({uri: string, value: ${typeModule}.Record})[]}>`, 351 + }); 352 + method.addParameter({ 353 + name: "params", 354 + type: `OmitKey<${ 355 + toTitleCase(ATP_METHODS.list) 356 + }.QueryParams, "collection">`, 357 + }); 358 + method.setBodyText( 359 + [ 360 + `const res = await this._client.call('${ATP_METHODS.list}', { collection: '${nsid}', ...params })`, 361 + `return res.data`, 362 + ].join("\n"), 363 + ); 364 + } 365 + { 366 + //= get() 367 + const method = cls.addMethod({ 368 + isAsync: true, 369 + name: "get", 370 + returnType: 371 + `Promise<{uri: string, cid: string, value: ${typeModule}.Record}>`, 372 + }); 373 + method.addParameter({ 374 + name: "params", 375 + type: `OmitKey<${ 376 + toTitleCase(ATP_METHODS.get) 377 + }.QueryParams, "collection">`, 378 + }); 379 + method.setBodyText( 380 + [ 381 + `const res = await this._client.call('${ATP_METHODS.get}', { collection: '${nsid}', ...params })`, 382 + `return res.data`, 383 + ].join("\n"), 384 + ); 385 + } 386 + { 387 + //= create() 388 + const method = cls.addMethod({ 389 + isAsync: true, 390 + name: "create", 391 + returnType: "Promise<{uri: string, cid: string}>", 392 + }); 393 + method.addParameter({ 394 + name: "params", 395 + type: `OmitKey<${ 396 + toTitleCase( 397 + ATP_METHODS.create, 398 + ) 399 + }.InputSchema, "collection" | "record">`, 400 + }); 401 + method.addParameter({ 402 + name: "record", 403 + type: `Un$Typed<${typeModule}.Record>`, 404 + }); 405 + method.addParameter({ 406 + name: "headers?", 407 + type: `Record<string, string>`, 408 + }); 409 + const maybeRkeyPart = lexRecord.key?.startsWith("literal:") 410 + ? `rkey: '${lexRecord.key.replace("literal:", "")}', ` 411 + : ""; 412 + method.setBodyText( 413 + [ 414 + `const collection = '${nsid}'`, 415 + `const res = await this._client.call('${ATP_METHODS.create}', undefined, { collection, ${maybeRkeyPart}...params, record: { ...record, $type: collection} }, {encoding: 'application/json', headers })`, 416 + `return res.data`, 417 + ].join("\n"), 418 + ); 419 + } 420 + // { 421 + // //= put() 422 + // const method = cls.addMethod({ 423 + // isAsync: true, 424 + // name: 'put', 425 + // returnType: 'Promise<{uri: string, cid: string}>', 426 + // }) 427 + // method.addParameter({ 428 + // name: 'params', 429 + // type: `OmitKey<${toTitleCase(ATP_METHODS.put)}.InputSchema, "collection" | "record">`, 430 + // }) 431 + // method.addParameter({ 432 + // name: 'record', 433 + // type: `${typeModule}.Record`, 434 + // }) 435 + // method.addParameter({ 436 + // name: 'headers?', 437 + // type: `Record<string, string>`, 438 + // }) 439 + // method.setBodyText( 440 + // [ 441 + // `record.$type = '${userType.nsid}'`, 442 + // `const res = await this._client.call('${ATP_METHODS.put}', undefined, { collection: '${userType.nsid}', record, ...params }, {encoding: 'application/json', headers})`, 443 + // `return res.data`, 444 + // ].join('\n'), 445 + // ) 446 + // } 447 + { 448 + //= delete() 449 + const method = cls.addMethod({ 450 + isAsync: true, 451 + name: "delete", 452 + returnType: "Promise<void>", 453 + }); 454 + method.addParameter({ 455 + name: "params", 456 + type: `OmitKey<${ 457 + toTitleCase( 458 + ATP_METHODS.delete, 459 + ) 460 + }.InputSchema, "collection">`, 461 + }); 462 + method.addParameter({ 463 + name: "headers?", 464 + type: `Record<string, string>`, 465 + }); 466 + 467 + method.setBodyText( 468 + [ 469 + `await this._client.call('${ATP_METHODS.delete}', undefined, { collection: '${nsid}', ...params }, { headers })`, 470 + ].join("\n"), 471 + ); 472 + } 473 + } 474 + 475 + const lexiconTs = ( 476 + project: Project, 477 + lexicons: Lexicons, 478 + lexiconDoc: LexiconDoc, 479 + ) => 480 + gen( 481 + project, 482 + `/types/${lexiconDoc.id.split(".").join("/")}.ts`, 483 + (file) => { 484 + const main = lexiconDoc.defs.main; 485 + if ( 486 + main?.type === "query" || 487 + main?.type === "subscription" || 488 + main?.type === "procedure" 489 + ) { 490 + //= import {HeadersMap, XRPCError} from '@atproto/xrpc' 491 + const xrpcImport = file.addImportDeclaration({ 492 + moduleSpecifier: "@atproto/xrpc", 493 + }); 494 + xrpcImport.addNamedImports([ 495 + { name: "HeadersMap" }, 496 + { name: "XRPCError" }, 497 + ]); 498 + } 499 + 500 + genCommonImports(file, lexiconDoc.id, lexiconDoc); 501 + 502 + const imports: Set<string> = new Set(); 503 + for (const defId in lexiconDoc.defs) { 504 + const def = lexiconDoc.defs[defId]; 505 + const lexUri = `${lexiconDoc.id}#${defId}`; 506 + if (defId === "main") { 507 + if (def.type === "query" || def.type === "procedure") { 508 + genXrpcParams(file, lexicons, lexUri, false); 509 + genXrpcInput(file, imports, lexicons, lexUri, false); 510 + genXrpcOutput(file, imports, lexicons, lexUri); 511 + genClientXrpcCommon(file, lexicons, lexUri); 512 + } else if (def.type === "subscription") { 513 + continue; 514 + } else if (def.type === "record") { 515 + genRecord(file, imports, lexicons, lexUri); 516 + } else { 517 + genUserType(file, imports, lexicons, lexUri); 518 + } 519 + } else { 520 + genUserType(file, imports, lexicons, lexUri); 521 + } 522 + } 523 + genImports(file, imports, lexiconDoc.id); 524 + return Promise.resolve(); 525 + }, 526 + ); 527 + 528 + function genClientXrpcCommon( 529 + file: SourceFile, 530 + lexicons: Lexicons, 531 + lexUri: string, 532 + ) { 533 + const def = lexicons.getDefOrThrow(lexUri, ["query", "procedure"]); 534 + 535 + //= export interface CallOptions {...} 536 + const opts = file.addInterface({ 537 + name: "CallOptions", 538 + isExported: true, 539 + }); 540 + opts.addProperty({ name: "signal?", type: "AbortSignal" }); 541 + opts.addProperty({ name: "headers?", type: "HeadersMap" }); 542 + if (def.type === "procedure") { 543 + opts.addProperty({ name: "qp?", type: "QueryParams" }); 544 + } 545 + if (def.type === "procedure" && def.input) { 546 + let encodingType = "string"; 547 + if (def.input.encoding !== "*/*") { 548 + encodingType = def.input.encoding 549 + .split(",") 550 + .map((v) => `'${v.trim()}'`) 551 + .join(" | "); 552 + } 553 + opts.addProperty({ 554 + name: "encoding?", 555 + type: encodingType, 556 + }); 557 + } 558 + 559 + // export interface Response {...} 560 + const res = file.addInterface({ 561 + name: "Response", 562 + isExported: true, 563 + }); 564 + res.addProperty({ name: "success", type: "boolean" }); 565 + res.addProperty({ name: "headers", type: "HeadersMap" }); 566 + if (def.output?.schema) { 567 + if (def.output.encoding?.includes(",")) { 568 + res.addProperty({ name: "data", type: "OutputSchema | Uint8Array" }); 569 + } else { 570 + res.addProperty({ name: "data", type: "OutputSchema" }); 571 + } 572 + } else if (def.output?.encoding) { 573 + res.addProperty({ name: "data", type: "Uint8Array" }); 574 + } 575 + 576 + // export class {errcode}Error {...} 577 + const customErrors: { name: string; cls: string }[] = []; 578 + for (const error of def.errors || []) { 579 + let name = toTitleCase(error.name); 580 + if (!name.endsWith("Error")) name += "Error"; 581 + const errCls = file.addClass({ 582 + name, 583 + extends: "XRPCError", 584 + isExported: true, 585 + }); 586 + errCls.addConstructor({ 587 + parameters: [{ name: "src", type: "XRPCError" }], 588 + statements: [ 589 + "super(src.status, src.error, src.message, src.headers, { cause: src })", 590 + ], 591 + }); 592 + 593 + customErrors.push({ name: error.name, cls: name }); 594 + } 595 + 596 + // export function toKnownErr(err: any) {...} 597 + file.addFunction({ 598 + name: "toKnownErr", 599 + isExported: true, 600 + parameters: [{ name: "e", type: "any" }], 601 + statements: customErrors.length 602 + ? [ 603 + "if (e instanceof XRPCError) {", 604 + ...customErrors.map( 605 + (err) => `if (e.error === '${err.name}') return new ${err.cls}(e)`, 606 + ), 607 + "}", 608 + "return e", 609 + ] 610 + : ["return e"], 611 + }); 612 + }
+286
lex-cli/codegen/common.ts
··· 1 + import { 2 + type Project, 3 + type SourceFile, 4 + VariableDeclarationKind, 5 + } from "ts-morph"; 6 + import type { LexiconDoc } from "@atproto/lexicon"; 7 + import type { GeneratedFile } from "../types.ts"; 8 + import type { CodeGenOptions } from "./util.ts"; 9 + 10 + export const utilTs = ( 11 + project: Project, 12 + ) => 13 + gen(project, "/util.ts", (file) => { 14 + file.replaceWithText(` 15 + import { type ValidationResult } from '@atproto/lexicon' 16 + 17 + export type OmitKey<T, K extends keyof T> = { 18 + [K2 in keyof T as K2 extends K ? never : K2]: T[K2] 19 + } 20 + 21 + export type $Typed<V, T extends string = string> = V & { $type: T } 22 + export type Un$Typed<V extends { $type?: string }> = OmitKey<V, '$type'> 23 + 24 + export type $Type<Id extends string, Hash extends string> = Hash extends 'main' 25 + ? Id 26 + : \`\${Id}#\${Hash}\` 27 + 28 + function isObject<V>(v: V): v is V & object { 29 + return v != null && typeof v === 'object' 30 + } 31 + 32 + function is$type<Id extends string, Hash extends string>( 33 + $type: unknown, 34 + id: Id, 35 + hash: Hash, 36 + ): $type is $Type<Id, Hash> { 37 + return hash === 'main' 38 + ? $type === id 39 + : // $type === \`\${id}#\${hash}\` 40 + typeof $type === 'string' && 41 + $type.length === id.length + 1 + hash.length && 42 + $type.charCodeAt(id.length) === 35 /* '#' */ && 43 + $type.startsWith(id) && 44 + $type.endsWith(hash) 45 + } 46 + ${ 47 + /** 48 + * The construct below allows to properly distinguish open unions. Consider 49 + * the following example: 50 + * 51 + * ```ts 52 + * type Foo = { $type?: $Type<'foo', 'main'>; foo: string } 53 + * type Bar = { $type?: $Type<'bar', 'main'>; bar: string } 54 + * type OpenFooBarUnion = $Typed<Foo> | $Typed<Bar> | { $type: string } 55 + * ``` 56 + * 57 + * In the context of lexicons, when there is a open union as shown above, the 58 + * if `$type` if either `foo` or `bar`, then the object IS of type `Foo` or 59 + * `Bar`. 60 + * 61 + * ```ts 62 + * declare const obj1: OpenFooBarUnion 63 + * if (is$typed(obj1, 'foo', 'main')) { 64 + * obj1.$type // $Type<'foo', 'main'> 65 + * obj1.foo // string 66 + * } 67 + * ``` 68 + * 69 + * Similarly, if an object is of type `unknown`, then the `is$typed` function 70 + * should only return assurance about the `$type` property, which is what it 71 + * actually checks: 72 + * 73 + * ```ts 74 + * declare const obj2: unknown 75 + * if (is$typed(obj2, 'foo', 'main')) { 76 + * obj2.$type // $Type<'foo', 'main'> 77 + * // @ts-expect-error 78 + * obj2.foo 79 + * } 80 + * ``` 81 + * 82 + * The construct bellow is what makes these two scenarios possible. 83 + */ 84 + ""} 85 + export type $TypedObject<V, Id extends string, Hash extends string> = V extends { 86 + $type: $Type<Id, Hash> 87 + } 88 + ? V 89 + : V extends { $type?: string } 90 + ? V extends { $type?: infer T extends $Type<Id, Hash> } 91 + ? V & { $type: T } 92 + : never 93 + : V & { $type: $Type<Id, Hash> } 94 + 95 + export function is$typed<V, Id extends string, Hash extends string>( 96 + v: V, 97 + id: Id, 98 + hash: Hash, 99 + ): v is $TypedObject<V, Id, Hash> { 100 + return isObject(v) && '$type' in v && is$type(v.$type, id, hash) 101 + } 102 + 103 + export function maybe$typed<V, Id extends string, Hash extends string>( 104 + v: V, 105 + id: Id, 106 + hash: Hash, 107 + ): v is V & object & { $type?: $Type<Id, Hash> } { 108 + return ( 109 + isObject(v) && 110 + ('$type' in v 111 + ? v.$type === undefined || is$type(v.$type, id, hash) 112 + : true) 113 + ) 114 + } 115 + 116 + export type Validator<R = unknown> = (v: unknown) => ValidationResult<R> 117 + export type ValidatorParam<V extends Validator> = 118 + V extends Validator<infer R> ? R : never 119 + 120 + /** 121 + * Utility function that allows to convert a "validate*" utility function into a 122 + * type predicate. 123 + */ 124 + export function asPredicate<V extends Validator>(validate: V) { 125 + return function <T>(v: T): v is T & ValidatorParam<V> { 126 + return validate(v).success 127 + } 128 + } 129 + `); 130 + }); 131 + 132 + export const lexiconsTs = ( 133 + project: Project, 134 + lexiconDocs: LexiconDoc[], 135 + options?: CodeGenOptions, 136 + ) => 137 + gen(project, "/lexicons.ts", (file) => { 138 + const extension = options?.useJsExtension ? ".js" : ".ts"; 139 + const nsidToEnum = (nsid: string): string => { 140 + return nsid 141 + .split(".") 142 + .map((word) => word[0].toUpperCase() + word.slice(1)) 143 + .join(""); 144 + }; 145 + 146 + //= import { type LexiconDoc, Lexicons } from '@atproto/lexicon' 147 + file 148 + .addImportDeclaration({ 149 + moduleSpecifier: "@atproto/lexicon", 150 + }) 151 + .addNamedImports([ 152 + { name: "LexiconDoc", isTypeOnly: true }, 153 + { name: "Lexicons" }, 154 + { name: "ValidationError" }, 155 + { name: "ValidationResult", isTypeOnly: true }, 156 + ]); 157 + 158 + //= import { is$typed, maybe$typed, type $Typed } from "./util${extension}" 159 + file 160 + .addImportDeclaration({ moduleSpecifier: `./util${extension}` }) 161 + .addNamedImports([ 162 + { name: "is$typed" }, 163 + { name: "maybe$typed" }, 164 + ]); 165 + 166 + //= export const schemaDict = {...} as const satisfies Record<string, LexiconDoc> 167 + file.addVariableStatement({ 168 + isExported: true, 169 + declarationKind: VariableDeclarationKind.Const, 170 + declarations: [ 171 + { 172 + name: "schemaDict", 173 + initializer: JSON.stringify( 174 + lexiconDocs.reduce( 175 + (acc, cur) => ({ 176 + ...acc, 177 + [nsidToEnum(cur.id)]: cur, 178 + }), 179 + {}, 180 + ), 181 + null, 182 + 2, 183 + ) + " as Record<string, LexiconDoc>", 184 + }, 185 + ], 186 + }); 187 + 188 + //= export const schemas = Object.values(schemaDict) satisfies LexiconDoc[] 189 + file.addVariableStatement({ 190 + isExported: true, 191 + declarationKind: VariableDeclarationKind.Const, 192 + declarations: [ 193 + { 194 + name: "schemas", 195 + initializer: "Object.values(schemaDict) satisfies LexiconDoc[]", 196 + }, 197 + ], 198 + }); 199 + 200 + //= export const lexicons: Lexicons = new Lexicons(schemas) 201 + file.addVariableStatement({ 202 + isExported: true, 203 + declarationKind: VariableDeclarationKind.Const, 204 + declarations: [ 205 + { 206 + name: "lexicons", 207 + type: "Lexicons", 208 + initializer: "new Lexicons(schemas)", 209 + }, 210 + ], 211 + }); 212 + 213 + file.addFunction({ 214 + isExported: true, 215 + name: "validate", 216 + overloads: [ 217 + { 218 + typeParameters: ["T extends { $type: string }"], 219 + parameters: [ 220 + { name: "v", type: "unknown" }, 221 + { name: "id", type: "string" }, 222 + { name: "hash", type: "string" }, 223 + { name: "requiredType", type: "true" }, 224 + ], 225 + returnType: "ValidationResult<T>", 226 + }, 227 + { 228 + typeParameters: ["T extends { $type?: string }"], 229 + parameters: [ 230 + { name: "v", type: "unknown" }, 231 + { name: "id", type: "string" }, 232 + { name: "hash", type: "string" }, 233 + { name: "requiredType", type: "false", hasQuestionToken: true }, 234 + ], 235 + returnType: "ValidationResult<T>", 236 + }, 237 + ], 238 + parameters: [ 239 + { name: "v", type: "unknown" }, 240 + { name: "id", type: "string" }, 241 + { name: "hash", type: "string" }, 242 + { name: "requiredType", type: "boolean", hasQuestionToken: true }, 243 + ], 244 + statements: [ 245 + // If $type is present, make sure it is valid before validating the rest of the object 246 + "return (requiredType ? is$typed : maybe$typed)(v, id, hash) ? lexicons.validate(`${id}#${hash}`, v) : { success: false, error: new ValidationError(`Must be an object with \"${hash === 'main' ? id : `${id}#${hash}`}\" $type property`) }", 247 + ], 248 + returnType: "ValidationResult", 249 + }); 250 + 251 + //= export const ids = {...} 252 + file.addVariableStatement({ 253 + isExported: true, 254 + declarationKind: VariableDeclarationKind.Const, 255 + declarations: [ 256 + { 257 + name: "ids", 258 + initializer: `{${ 259 + lexiconDocs 260 + .map( 261 + (lex) => 262 + `\n ${nsidToEnum(lex.id)}: ${JSON.stringify(lex.id)},`, 263 + ) 264 + .join("") 265 + }\n} as const`, 266 + }, 267 + ], 268 + }); 269 + }); 270 + 271 + export async function gen( 272 + project: Project, 273 + path: string, 274 + gen: (file: SourceFile) => void | Promise<void>, 275 + ): Promise<GeneratedFile> { 276 + const file = project.createSourceFile(path); 277 + gen(file); 278 + await file.save(); // Save in the "in memory" file system 279 + const content = `${banner()}${file.getFullText()}`; 280 + 281 + return { path, content }; 282 + } 283 + 284 + function banner() { 285 + return `/**\n * GENERATED CODE - DO NOT MODIFY\n */\n`; 286 + }
+902
lex-cli/codegen/lex-gen.ts
··· 1 + import { relative as getRelativePath } from "@std/path"; 2 + import { type JSDoc, type SourceFile, VariableDeclarationKind } from "ts-morph"; 3 + import type { 4 + LexArray, 5 + LexBlob, 6 + LexBytes, 7 + LexCidLink, 8 + Lexicons, 9 + LexIpldType, 10 + LexObject, 11 + LexPrimitive, 12 + LexToken, 13 + } from "@atproto/lexicon"; 14 + import { 15 + type CodeGenOptions, 16 + toCamelCase, 17 + toScreamingSnakeCase, 18 + toTitleCase, 19 + } from "./util.ts"; 20 + import type { LexiconDoc } from "@atproto/lexicon"; 21 + import type { LexUserType } from "@atproto/lexicon"; 22 + 23 + interface Commentable { 24 + addJsDoc: ({ description }: { description: string }) => JSDoc; 25 + } 26 + export function genComment<T extends Commentable>( 27 + commentable: T, 28 + def: { description?: string }, 29 + ): T { 30 + if (def.description) { 31 + commentable.addJsDoc({ description: def.description }); 32 + } 33 + return commentable; 34 + } 35 + 36 + export function genCommonImports( 37 + file: SourceFile, 38 + baseNsid: string, 39 + lexiconDoc: LexiconDoc, 40 + options?: CodeGenOptions, 41 + ) { 42 + const extension = options?.useJsExtension ? ".js" : ".ts"; 43 + const needsBlobRef = Object.values(lexiconDoc.defs).some((def: LexUserType) => 44 + def.type === "blob" || 45 + (def.type === "object" && 46 + Object.values((def as LexObject).properties || {}).some((prop) => 47 + "type" in prop && (prop.type === "blob" || 48 + (prop.type === "array" && "items" in prop && 49 + prop.items.type === "blob")) 50 + )) || 51 + (def.type === "array" && def.items.type === "blob") || 52 + // Check record schema for blobs 53 + (def.type === "record" && 54 + Object.values(def.record.properties || {}).some((prop) => 55 + "type" in prop && (prop.type === "blob" || 56 + (prop.type === "array" && "items" in prop && 57 + prop.items.type === "blob")) 58 + )) || 59 + // Check output schema for blobs 60 + (def.type === "query" || def.type === "procedure") && 61 + def.output?.schema?.type === "object" && 62 + Object.values(def.output.schema.properties || {}).some((prop) => 63 + "type" in prop && (prop.type === "blob" || 64 + (prop.type === "array" && "items" in prop && 65 + prop.items.type === "blob")) 66 + ) 67 + ); 68 + 69 + const needsCID = Object.values(lexiconDoc.defs).some((def: LexUserType) => 70 + def.type === "cid-link" || 71 + (def.type === "object" && 72 + Object.values((def as LexObject).properties || {}).some((prop) => 73 + "type" in prop && prop.type === "cid-link" 74 + )) || 75 + (def.type === "array" && def.items.type === "cid-link") || 76 + // Check record schema for cid-links 77 + (def.type === "record" && 78 + Object.values(def.record.properties || {}).some((prop) => 79 + "type" in prop && (prop.type === "cid-link" || 80 + (prop.type === "array" && "items" in prop && 81 + prop.items.type === "cid-link")) 82 + )) || 83 + // Check output schema for cid-links 84 + (def.type === "query" || def.type === "procedure") && 85 + def.output?.schema?.type === "object" && 86 + Object.values(def.output.schema.properties || {}).some((prop) => 87 + "type" in prop && (prop.type === "cid-link" || 88 + (prop.type === "array" && "items" in prop && 89 + prop.items.type === "cid-link")) 90 + ) 91 + ); 92 + 93 + const needsTypedValidation = Object.values(lexiconDoc.defs).some(( 94 + def: LexUserType, 95 + ) => def.type === "record" || def.type === "object"); 96 + 97 + const needsUnionType = Object.values(lexiconDoc.defs).some( 98 + (def: LexUserType) => { 99 + // Check direct array unions 100 + if (def.type === "array" && def.items.type === "union") return true; 101 + 102 + // Check object property unions 103 + if (def.type === "object") { 104 + return Object.values((def as LexObject).properties || {}).some((prop) => 105 + prop.type === "union" || 106 + (prop.type === "array" && prop.items?.type === "union") 107 + ); 108 + } 109 + 110 + // Check record property unions 111 + if (def.type === "record") { 112 + return Object.values(def.record.properties || {}).some((prop) => 113 + "type" in prop && ( 114 + prop.type === "union" || 115 + (prop.type === "array" && "items" in prop && 116 + prop.items.type === "union") 117 + ) 118 + ); 119 + } 120 + 121 + // Check procedure input/output schemas 122 + if (def.type === "procedure") { 123 + // Check input schema 124 + if (def.input?.schema?.type === "union") return true; 125 + if (def.input?.schema?.type === "object") { 126 + return Object.values(def.input.schema.properties || {}).some((prop) => 127 + "type" in prop && ( 128 + prop.type === "union" || 129 + (prop.type === "array" && "items" in prop && 130 + prop.items.type === "union") 131 + ) 132 + ); 133 + } 134 + // Check output schema 135 + if (def.output?.schema?.type === "union") return true; 136 + if (def.output?.schema?.type === "object") { 137 + return Object.values(def.output.schema.properties || {}).some(( 138 + prop, 139 + ) => 140 + "type" in prop && ( 141 + prop.type === "union" || 142 + (prop.type === "array" && "items" in prop && 143 + prop.items.type === "union") 144 + ) 145 + ); 146 + } 147 + } 148 + 149 + // Check query output schemas 150 + if (def.type === "query") { 151 + if (def.output?.schema?.type === "union") return true; 152 + if (def.output?.schema?.type === "object") { 153 + return Object.values(def.output.schema.properties || {}).some(( 154 + prop, 155 + ) => 156 + "type" in prop && ( 157 + prop.type === "union" || 158 + (prop.type === "array" && "items" in prop && 159 + prop.items.type === "union") 160 + ) 161 + ); 162 + } 163 + } 164 + 165 + // Check subscription message schemas 166 + if (def.type === "subscription") { 167 + if (def.message?.schema?.type === "union") return true; 168 + if (def.message?.schema?.type === "object") { 169 + return Object.values(def.message.schema.properties || {}).some(( 170 + prop, 171 + ) => 172 + "type" in prop && ( 173 + prop.type === "union" || 174 + (prop.type === "array" && "items" in prop && 175 + prop.items.type === "union") 176 + ) 177 + ); 178 + } 179 + } 180 + 181 + return false; 182 + }, 183 + ); 184 + 185 + const needsIdConstant = Object.values(lexiconDoc.defs).some(( 186 + def: LexUserType, 187 + ) => 188 + (def.type === "string" && 189 + (def.enum?.length || def.const || def.knownValues?.length)) || 190 + def.type === "record" || 191 + def.type === "object" 192 + ); 193 + 194 + //= import {BlobRef} from '@atproto/lexicon' 195 + if (needsBlobRef) { 196 + file 197 + .addImportDeclaration({ 198 + moduleSpecifier: "@atproto/lexicon", 199 + }) 200 + .addNamedImports([{ name: "BlobRef" }]); 201 + } 202 + 203 + //= import {CID} from 'multiformats/cid' 204 + if (needsCID) { 205 + file 206 + .addImportDeclaration({ 207 + moduleSpecifier: "multiformats/cid", 208 + }) 209 + .addNamedImports([{ name: "CID" }]); 210 + } 211 + 212 + if (needsTypedValidation) { 213 + //= import { validate as _validate } from '../../lexicons.ts' 214 + file 215 + .addImportDeclaration({ 216 + moduleSpecifier: `${ 217 + baseNsid 218 + .split(".") 219 + .map((_str) => "..") 220 + .join("/") 221 + }/lexicons${extension}`, 222 + }) 223 + .addNamedImports([{ name: "validate", alias: "_validate" }]); 224 + 225 + //= import { is$typed as _is$typed } from '../[...]/util.ts' 226 + file 227 + .addImportDeclaration({ 228 + moduleSpecifier: `${ 229 + baseNsid 230 + .split(".") 231 + .map((_str) => "..") 232 + .join("/") 233 + }/util${extension}`, 234 + }) 235 + .addNamedImports([ 236 + { name: "is$typed", alias: "_is$typed" }, 237 + ]); 238 + 239 + // tsc adds protection against circular imports, which hurts bundle size. 240 + // Since we know that lexicon.ts and util.ts do not depend on the file being 241 + // generated, we can safely bypass this protection. 242 + // Note that we are not using `import * as util from '../../util'` because 243 + // typescript will emit is own helpers for the import, which we want to avoid. 244 + file.addVariableStatement({ 245 + isExported: false, 246 + declarationKind: VariableDeclarationKind.Const, 247 + declarations: [ 248 + { name: "is$typed", initializer: "_is$typed" }, 249 + { name: "validate", initializer: "_validate" }, 250 + ], 251 + }); 252 + } 253 + 254 + if (needsIdConstant) { 255 + //= const id = "{baseNsid}" 256 + file.addVariableStatement({ 257 + isExported: false, // Do not export to allow tree-shaking 258 + declarationKind: VariableDeclarationKind.Const, 259 + declarations: [{ name: "id", initializer: JSON.stringify(baseNsid) }], 260 + }); 261 + } 262 + 263 + if (needsUnionType) { 264 + //= import { type $Typed } from '../[...]/util.ts' 265 + file 266 + .addImportDeclaration({ 267 + moduleSpecifier: `${ 268 + baseNsid 269 + .split(".") 270 + .map((_str) => "..") 271 + .join("/") 272 + }/util${extension}`, 273 + }) 274 + .addNamedImports([ 275 + { name: "$Typed", isTypeOnly: true }, 276 + ]); 277 + } 278 + } 279 + 280 + export function genImports( 281 + file: SourceFile, 282 + imports: Set<string>, 283 + baseNsid: string, 284 + options?: CodeGenOptions, 285 + ) { 286 + const startPath = "/" + baseNsid.split(".").slice(0, -1).join("/"); 287 + const extension = options?.useJsExtension ? ".js" : ".ts"; 288 + 289 + for (const nsid of imports) { 290 + const targetPath = "/" + nsid.split(".").join("/") + extension; 291 + let resolvedPath = getRelativePath(startPath, targetPath); 292 + if (!resolvedPath.startsWith(".")) { 293 + resolvedPath = `./${resolvedPath}`; 294 + } 295 + file.addImportDeclaration({ 296 + isTypeOnly: true, 297 + moduleSpecifier: resolvedPath, 298 + namespaceImport: toTitleCase(nsid), 299 + }); 300 + } 301 + } 302 + 303 + export function genUserType( 304 + file: SourceFile, 305 + imports: Set<string>, 306 + lexicons: Lexicons, 307 + lexUri: string, 308 + ) { 309 + const def = lexicons.getDefOrThrow(lexUri); 310 + switch (def.type) { 311 + case "array": 312 + genArray(file, imports, lexUri, def); 313 + break; 314 + case "token": 315 + genToken(file, lexUri, def); 316 + break; 317 + case "object": { 318 + const ifaceName: string = toTitleCase(getHash(lexUri)); 319 + genObject(file, imports, lexUri, def, ifaceName, { 320 + typeProperty: true, 321 + }); 322 + genObjHelpers(file, lexUri, ifaceName, { 323 + requireTypeProperty: false, 324 + }); 325 + break; 326 + } 327 + 328 + case "blob": 329 + case "bytes": 330 + case "cid-link": 331 + case "boolean": 332 + case "integer": 333 + case "string": 334 + case "unknown": 335 + genPrimitiveOrBlob(file, lexUri, def); 336 + break; 337 + 338 + default: 339 + throw new Error( 340 + `genLexUserType() called with wrong definition type (${def.type}) in ${lexUri}`, 341 + ); 342 + } 343 + } 344 + 345 + function genObject( 346 + file: SourceFile, 347 + imports: Set<string>, 348 + lexUri: string, 349 + def: LexObject, 350 + ifaceName: string, 351 + { 352 + defaultsArePresent = true, 353 + allowUnknownProperties = false, 354 + typeProperty = false, 355 + }: { 356 + defaultsArePresent?: boolean; 357 + allowUnknownProperties?: boolean; 358 + typeProperty?: boolean | "required"; 359 + } = {}, 360 + ) { 361 + const iface = file.addInterface({ 362 + name: ifaceName, 363 + isExported: true, 364 + }); 365 + genComment(iface, def); 366 + 367 + if (typeProperty) { 368 + const hash = getHash(lexUri); 369 + const baseNsid = stripScheme(stripHash(lexUri)); 370 + 371 + //= $type?: <uri> 372 + iface.addProperty({ 373 + name: typeProperty === "required" ? `$type` : `$type?`, 374 + type: 375 + // Not using $Type here because it is less readable than a plain string 376 + // `$Type<${JSON.stringify(baseNsid)}, ${JSON.stringify(hash)}>` 377 + hash === "main" 378 + ? JSON.stringify(`${baseNsid}`) 379 + : JSON.stringify(`${baseNsid}#${hash}`), 380 + }); 381 + } 382 + 383 + const nullableProps = new Set(def.nullable); 384 + if (def.properties) { 385 + for (const propKey in def.properties) { 386 + const propDef = def.properties[propKey]; 387 + const propNullable = nullableProps.has(propKey); 388 + const req = def.required?.includes(propKey) || 389 + (defaultsArePresent && 390 + "default" in propDef && 391 + propDef.default !== undefined); 392 + if (propDef.type === "ref" || propDef.type === "union") { 393 + //= propName: External|External 394 + const types = propDef.type === "union" 395 + ? propDef.refs.map((ref) => refToUnionType(ref, lexUri, imports)) 396 + : [refToType(propDef.ref, stripScheme(stripHash(lexUri)), imports)]; 397 + if (propDef.type === "union" && !propDef.closed) { 398 + types.push("{ $type: string }"); 399 + } 400 + iface.addProperty({ 401 + name: `${propKey}${req ? "" : "?"}`, 402 + type: makeType(types, { nullable: propNullable }), 403 + }); 404 + continue; 405 + } else { 406 + if (propDef.type === "array") { 407 + //= propName: type[] 408 + let propAst; 409 + if (propDef.items.type === "ref") { 410 + propAst = iface.addProperty({ 411 + name: `${propKey}${req ? "" : "?"}`, 412 + type: makeType( 413 + refToType( 414 + propDef.items.ref, 415 + stripScheme(stripHash(lexUri)), 416 + imports, 417 + ), 418 + { 419 + nullable: propNullable, 420 + array: true, 421 + }, 422 + ), 423 + }); 424 + } else if (propDef.items.type === "union") { 425 + const types = propDef.items.refs.map((ref) => 426 + refToUnionType(ref, lexUri, imports) 427 + ); 428 + if (!propDef.items.closed) { 429 + types.push("{ $type: string }"); 430 + } 431 + propAst = iface.addProperty({ 432 + name: `${propKey}${req ? "" : "?"}`, 433 + type: makeType(types, { 434 + nullable: propNullable, 435 + array: true, 436 + }), 437 + }); 438 + } else { 439 + propAst = iface.addProperty({ 440 + name: `${propKey}${req ? "" : "?"}`, 441 + type: makeType(primitiveOrBlobToType(propDef.items), { 442 + nullable: propNullable, 443 + array: true, 444 + }), 445 + }); 446 + } 447 + genComment(propAst, propDef); 448 + } else { 449 + //= propName: type 450 + genComment( 451 + iface.addProperty({ 452 + name: `${propKey}${req ? "" : "?"}`, 453 + type: makeType(primitiveOrBlobToType(propDef), { 454 + nullable: propNullable, 455 + }), 456 + }), 457 + propDef, 458 + ); 459 + } 460 + } 461 + } 462 + 463 + if (allowUnknownProperties) { 464 + //= [k: string]: unknown 465 + iface.addIndexSignature({ 466 + keyName: "k", 467 + keyType: "string", 468 + returnType: "unknown", 469 + }); 470 + } 471 + } 472 + } 473 + 474 + export function genToken(file: SourceFile, lexUri: string, def: LexToken) { 475 + //= /** <comment> */ 476 + //= export const <TOKEN> = `${id}#<token>` 477 + genComment( 478 + file.addVariableStatement({ 479 + isExported: true, 480 + declarationKind: VariableDeclarationKind.Const, 481 + declarations: [ 482 + { 483 + name: toScreamingSnakeCase(getHash(lexUri)), 484 + initializer: `\`\${id}#${getHash(lexUri)}\``, 485 + }, 486 + ], 487 + }), 488 + def, 489 + ); 490 + } 491 + 492 + export function genArray( 493 + file: SourceFile, 494 + imports: Set<string>, 495 + lexUri: string, 496 + def: LexArray, 497 + ) { 498 + if (def.items.type === "ref") { 499 + file.addTypeAlias({ 500 + name: toTitleCase(getHash(lexUri)), 501 + type: `${ 502 + refToType( 503 + def.items.ref, 504 + stripScheme(stripHash(lexUri)), 505 + imports, 506 + ) 507 + }[]`, 508 + isExported: true, 509 + }); 510 + } else if (def.items.type === "union") { 511 + const types = def.items.refs.map((ref) => 512 + refToUnionType(ref, lexUri, imports) 513 + ); 514 + if (!def.items.closed) { 515 + types.push("{ $type: string }"); 516 + } 517 + file.addTypeAlias({ 518 + name: toTitleCase(getHash(lexUri)), 519 + type: `(${types.join("|")})[]`, 520 + isExported: true, 521 + }); 522 + } else { 523 + genComment( 524 + file.addTypeAlias({ 525 + name: toTitleCase(getHash(lexUri)), 526 + type: `${primitiveOrBlobToType(def.items)}[]`, 527 + isExported: true, 528 + }), 529 + def, 530 + ); 531 + } 532 + } 533 + 534 + export function genPrimitiveOrBlob( 535 + file: SourceFile, 536 + lexUri: string, 537 + def: LexPrimitive | LexBlob | LexIpldType, 538 + ) { 539 + genComment( 540 + file.addTypeAlias({ 541 + name: toTitleCase(getHash(lexUri)), 542 + type: primitiveOrBlobToType(def), 543 + isExported: true, 544 + }), 545 + def, 546 + ); 547 + } 548 + 549 + export function genXrpcParams( 550 + file: SourceFile, 551 + lexicons: Lexicons, 552 + lexUri: string, 553 + defaultsArePresent = true, 554 + ) { 555 + const def = lexicons.getDefOrThrow(lexUri, [ 556 + "query", 557 + "subscription", 558 + "procedure", 559 + ]); 560 + 561 + // @NOTE We need to use a `type` here instead of an `interface` because we 562 + // need the generated type to be used as generic type parameter like this: 563 + // 564 + // type QueryParams = {} // Generated by this function 565 + // 566 + // type MyUtil<P extends xrpcServer.QueryParam> = (...) 567 + // type NsType = MyUtil<NS.QueryParams> // ERROR if `NS.QueryParams` is an `interface` 568 + // 569 + // Second line will fail if `NS.QueryParams` is an `interface` that does 570 + // not explicitly extend `xrpcServer.QueryParam`, or have a string index 571 + // signature that encompasses `xrpcServer.QueryParam`. 572 + 573 + //= export type QueryParams = {...} 574 + if ( 575 + def.parameters && def.parameters.properties && 576 + Object.keys(def.parameters.properties).length > 0 577 + ) { 578 + genComment( 579 + file.addTypeAlias({ 580 + name: "QueryParams", 581 + isExported: true, 582 + type: `{ 583 + ${ 584 + Object.entries(def.parameters.properties) 585 + .map(([paramKey, paramDef]) => { 586 + const req = def.parameters!.required?.includes(paramKey) || 587 + (defaultsArePresent && 588 + "default" in paramDef && 589 + paramDef.default !== undefined); 590 + const jsDoc = paramDef.description 591 + ? `/** ${paramDef.description} */\n` 592 + : ""; 593 + return `${jsDoc}${paramKey}${req ? "" : "?"}: ${ 594 + paramDef.type === "array" 595 + ? primitiveToType(paramDef.items) + "[]" 596 + : primitiveToType(paramDef) 597 + }`; 598 + }) 599 + .join("\n") 600 + } 601 + }`, 602 + }), 603 + def.parameters, 604 + ); 605 + } else { 606 + file.addTypeAlias({ 607 + name: "QueryParams", 608 + isExported: true, 609 + type: "globalThis.Record<PropertyKey, never>", 610 + }); 611 + } 612 + } 613 + 614 + export function genXrpcInput( 615 + file: SourceFile, 616 + imports: Set<string>, 617 + lexicons: Lexicons, 618 + lexUri: string, 619 + defaultsArePresent = true, 620 + ) { 621 + const def = lexicons.getDefOrThrow(lexUri, ["query", "procedure"]); 622 + 623 + if (def.type === "procedure" && def.input?.schema) { 624 + if (def.input.schema.type === "ref" || def.input.schema.type === "union") { 625 + //= export type InputSchema = ... 626 + 627 + const types = def.input.schema.type === "union" 628 + ? def.input.schema.refs.map((ref) => 629 + refToUnionType(ref, lexUri, imports) 630 + ) 631 + : [ 632 + refToType( 633 + def.input.schema.ref, 634 + stripScheme(stripHash(lexUri)), 635 + imports, 636 + ), 637 + ]; 638 + 639 + if (def.input.schema.type === "union" && !def.input.schema.closed) { 640 + types.push("{ $type: string }"); 641 + } 642 + file.addTypeAlias({ 643 + name: "InputSchema", 644 + type: types.join("|"), 645 + isExported: true, 646 + }); 647 + } else { 648 + //= export interface InputSchema {...} 649 + genObject(file, imports, lexUri, def.input.schema, `InputSchema`, { 650 + defaultsArePresent, 651 + }); 652 + } 653 + } else if (def.type === "procedure" && def.input?.encoding) { 654 + //= export type InputSchema = string | Uint8Array | Blob 655 + file.addTypeAlias({ 656 + isExported: true, 657 + name: "InputSchema", 658 + type: "string | Uint8Array | Blob", 659 + }); 660 + } else { 661 + //= export type InputSchema = undefined 662 + file.addTypeAlias({ 663 + isExported: true, 664 + name: "InputSchema", 665 + type: "undefined", 666 + }); 667 + } 668 + } 669 + 670 + export function genXrpcOutput( 671 + file: SourceFile, 672 + imports: Set<string>, 673 + lexicons: Lexicons, 674 + lexUri: string, 675 + defaultsArePresent = true, 676 + ) { 677 + const def = lexicons.getDefOrThrow(lexUri, [ 678 + "query", 679 + "subscription", 680 + "procedure", 681 + ]); 682 + 683 + const schema = def.type === "subscription" 684 + ? def.message?.schema 685 + : def.output?.schema; 686 + if (schema) { 687 + if (schema.type === "ref" || schema.type === "union") { 688 + //= export type OutputSchema = ... 689 + const types = schema.type === "union" 690 + ? schema.refs.map((ref) => refToUnionType(ref, lexUri, imports)) 691 + : [refToType(schema.ref, stripScheme(stripHash(lexUri)), imports)]; 692 + if (schema.type === "union" && !schema.closed) { 693 + types.push("{ $type: string }"); 694 + } 695 + file.addTypeAlias({ 696 + name: "OutputSchema", 697 + type: types.join("|"), 698 + isExported: true, 699 + }); 700 + } else { 701 + // Check if schema is empty (no properties) 702 + const isEmpty = !schema.properties || 703 + Object.keys(schema.properties).length === 0; 704 + if (isEmpty) { 705 + //= export type OutputSchema = Record<PropertyKey, never> 706 + file.addTypeAlias({ 707 + name: "OutputSchema", 708 + type: "globalThis.Record<PropertyKey, never>", 709 + isExported: true, 710 + }); 711 + } else { 712 + //= export interface OutputSchema {...} 713 + genObject(file, imports, lexUri, schema, `OutputSchema`, { 714 + defaultsArePresent, 715 + }); 716 + } 717 + } 718 + } 719 + } 720 + 721 + export function genRecord( 722 + file: SourceFile, 723 + imports: Set<string>, 724 + lexicons: Lexicons, 725 + lexUri: string, 726 + ) { 727 + const def = lexicons.getDefOrThrow(lexUri, ["record"]); 728 + 729 + //= export interface Record {...} 730 + genObject(file, imports, lexUri, def.record, "Record", { 731 + defaultsArePresent: true, 732 + allowUnknownProperties: true, 733 + typeProperty: "required", 734 + }); 735 + 736 + //= export function isRecord(v: unknown): v is Record {...} 737 + genObjHelpers(file, lexUri, "Record", { 738 + requireTypeProperty: true, 739 + }); 740 + } 741 + 742 + function genObjHelpers( 743 + file: SourceFile, 744 + lexUri: string, 745 + ifaceName: string, 746 + { 747 + requireTypeProperty, 748 + }: { 749 + requireTypeProperty: boolean; 750 + }, 751 + ) { 752 + const hash = getHash(lexUri); 753 + 754 + const hashVar = `hash${ifaceName}`; 755 + 756 + file.addVariableStatement({ 757 + isExported: false, 758 + declarationKind: VariableDeclarationKind.Const, 759 + declarations: [{ name: hashVar, initializer: JSON.stringify(hash) }], 760 + }); 761 + 762 + const isX = toCamelCase(`is-${ifaceName}`); 763 + 764 + //= export function is{X}<V>(v: V) {...} 765 + file 766 + .addFunction({ 767 + name: isX, 768 + typeParameters: [{ name: `V` }], 769 + parameters: [{ name: `v`, type: `V` }], 770 + isExported: true, 771 + }) 772 + .setBodyText(`return is$typed(v, id, ${hashVar})`); 773 + 774 + const validateX = toCamelCase(`validate-${ifaceName}`); 775 + 776 + //= export function validate{X}(v: unknown) {...} 777 + file 778 + .addFunction({ 779 + name: validateX, 780 + typeParameters: [{ name: `V` }], 781 + parameters: [{ name: `v`, type: `V` }], 782 + isExported: true, 783 + }) 784 + .setBodyText( 785 + `return validate<${ifaceName} & V>(v, id, ${hashVar}${ 786 + requireTypeProperty ? ", true" : "" 787 + })`, 788 + ); 789 + } 790 + 791 + export function stripScheme(uri: string): string { 792 + if (uri.startsWith("lex:")) return uri.slice(4); 793 + return uri; 794 + } 795 + 796 + export function stripHash(uri: string): string { 797 + return uri.split("#")[0] || ""; 798 + } 799 + 800 + export function getHash(uri: string): string { 801 + return uri.split("#").pop() || ""; 802 + } 803 + 804 + export function ipldToType(def: LexCidLink | LexBytes) { 805 + if (def.type === "bytes") { 806 + return "Uint8Array"; 807 + } 808 + return "CID"; 809 + } 810 + 811 + function refToUnionType( 812 + ref: string, 813 + lexUri: string, 814 + imports: Set<string>, 815 + ): string { 816 + const baseNsid = stripScheme(stripHash(lexUri)); 817 + return `$Typed<${refToType(ref, baseNsid, imports)}>`; 818 + } 819 + 820 + function refToType( 821 + ref: string, 822 + baseNsid: string, 823 + imports: Set<string>, 824 + ): string { 825 + // TODO: import external types! 826 + let [refBase, refHash] = ref.split("#"); 827 + refBase = stripScheme(refBase); 828 + if (!refHash) refHash = "main"; 829 + 830 + // internal 831 + if (!refBase || baseNsid === refBase) { 832 + return toTitleCase(refHash); 833 + } 834 + 835 + // external 836 + imports.add(refBase); 837 + return `${toTitleCase(refBase)}.${toTitleCase(refHash)}`; 838 + } 839 + 840 + export function primitiveOrBlobToType( 841 + def: LexBlob | LexPrimitive | LexIpldType, 842 + ): string { 843 + switch (def.type) { 844 + case "blob": 845 + return "BlobRef"; 846 + case "bytes": 847 + return "Uint8Array"; 848 + case "cid-link": 849 + return "CID"; 850 + default: 851 + return primitiveToType(def); 852 + } 853 + } 854 + 855 + export function primitiveToType(def: LexPrimitive): string { 856 + switch (def.type) { 857 + case "string": 858 + if (def.knownValues?.length) { 859 + return `${ 860 + def.knownValues 861 + .map((v) => JSON.stringify(v)) 862 + .join(" | ") 863 + } | (string & globalThis.Record<PropertyKey, never>)`; 864 + } else if (def.enum) { 865 + return def.enum.map((v) => JSON.stringify(v)).join(" | "); 866 + } else if (def.const) { 867 + return JSON.stringify(def.const); 868 + } 869 + return "string"; 870 + case "integer": 871 + if (def.enum) { 872 + return def.enum.map((v) => JSON.stringify(v)).join(" | "); 873 + } else if (def.const) { 874 + return JSON.stringify(def.const); 875 + } 876 + return "number"; 877 + case "boolean": 878 + if (def.const) { 879 + return JSON.stringify(def.const); 880 + } 881 + return "boolean"; 882 + case "unknown": 883 + // @TODO Should we use "object" here ? 884 + // the "Record" identifier from typescript get overwritten by the Record 885 + // interface created by lex-cli. 886 + return "{ [_ in string]: unknown }"; // Record<string, unknown> 887 + default: 888 + throw new Error(`Unexpected primitive type: ${JSON.stringify(def)}`); 889 + } 890 + } 891 + 892 + function makeType( 893 + _types: string | string[], 894 + opts?: { array?: boolean; nullable?: boolean }, 895 + ) { 896 + const types = ([] as string[]).concat(_types); 897 + if (opts?.nullable) types.push("null"); 898 + const arr = opts?.array ? "[]" : ""; 899 + if (types.length === 1) return `(${types[0]})${arr}`; 900 + if (arr) return `(${types.join(" | ")})${arr}`; 901 + return types.join(" | "); 902 + }
+457
lex-cli/codegen/server.ts
··· 1 + import { 2 + IndentationText, 3 + Project, 4 + type SourceFile, 5 + VariableDeclarationKind, 6 + } from "ts-morph"; 7 + import { type LexiconDoc, Lexicons } from "@atproto/lexicon"; 8 + import { NSID } from "@atproto/syntax"; 9 + import type { GeneratedAPI } from "../types.ts"; 10 + import { gen, lexiconsTs, utilTs } from "./common.ts"; 11 + import { 12 + genCommonImports, 13 + genImports, 14 + genRecord, 15 + genUserType, 16 + genXrpcInput, 17 + genXrpcOutput, 18 + genXrpcParams, 19 + } from "./lex-gen.ts"; 20 + import { 21 + type CodeGenOptions, 22 + type DefTreeNode, 23 + lexiconsToDefTree, 24 + schemasToNsidTokens, 25 + toCamelCase, 26 + toScreamingSnakeCase, 27 + toTitleCase, 28 + } from "./util.ts"; 29 + 30 + export async function genServerApi( 31 + lexiconDocs: LexiconDoc[], 32 + options?: CodeGenOptions, 33 + ): Promise<GeneratedAPI> { 34 + const project = new Project({ 35 + useInMemoryFileSystem: true, 36 + manipulationSettings: { indentationText: IndentationText.TwoSpaces }, 37 + }); 38 + const api: GeneratedAPI = { files: [] }; 39 + const lexicons = new Lexicons(lexiconDocs); 40 + const nsidTree = lexiconsToDefTree(lexiconDocs); 41 + const nsidTokens = schemasToNsidTokens(lexiconDocs); 42 + for (const lexiconDoc of lexiconDocs) { 43 + api.files.push(await lexiconTs(project, lexicons, lexiconDoc, options)); 44 + } 45 + api.files.push(await utilTs(project)); 46 + api.files.push(await lexiconsTs(project, lexiconDocs)); 47 + api.files.push( 48 + await indexTs(project, lexiconDocs, nsidTree, nsidTokens, options), 49 + ); 50 + return api; 51 + } 52 + 53 + const indexTs = ( 54 + project: Project, 55 + lexiconDocs: LexiconDoc[], 56 + nsidTree: DefTreeNode[], 57 + nsidTokens: Record<string, string[]>, 58 + options?: CodeGenOptions, 59 + ) => 60 + gen(project, "/index.ts", (file) => { 61 + const extension = options?.useJsExtension ? ".js" : ".ts"; 62 + //= import {createServer as createXrpcServer, Server as XrpcServer} from '@sprk/xrpc-server' 63 + file.addImportDeclaration({ 64 + moduleSpecifier: "@sprk/xrpc-server", 65 + namedImports: [ 66 + { name: "Auth", isTypeOnly: true }, 67 + { name: "Options", alias: "XrpcOptions", isTypeOnly: true }, 68 + { name: "Server", alias: "XrpcServer" }, 69 + { name: "StreamConfigOrHandler", isTypeOnly: true }, 70 + { name: "MethodConfigOrHandler", isTypeOnly: true }, 71 + { name: "createServer", alias: "createXrpcServer" }, 72 + ], 73 + }); 74 + //= import {schemas} from './lexicons.ts' 75 + file 76 + .addImportDeclaration({ 77 + moduleSpecifier: "./lexicons.ts", 78 + }) 79 + .addNamedImport({ 80 + name: "schemas", 81 + }); 82 + 83 + // generate type imports 84 + for (const lexiconDoc of lexiconDocs) { 85 + if ( 86 + lexiconDoc.defs.main?.type !== "query" && 87 + lexiconDoc.defs.main?.type !== "subscription" && 88 + lexiconDoc.defs.main?.type !== "procedure" 89 + ) { 90 + continue; 91 + } 92 + file 93 + .addImportDeclaration({ 94 + moduleSpecifier: `./types/${ 95 + lexiconDoc.id.split(".").join("/") 96 + }${extension}`, 97 + }) 98 + .setNamespaceImport(toTitleCase(lexiconDoc.id)); 99 + } 100 + 101 + // generate token enums 102 + for (const nsidAuthority in nsidTokens) { 103 + // export const {THE_AUTHORITY} = { 104 + // {Name}: "{authority.the.name}" 105 + // } 106 + file.addVariableStatement({ 107 + isExported: true, 108 + declarationKind: VariableDeclarationKind.Const, 109 + declarations: [ 110 + { 111 + name: toScreamingSnakeCase(nsidAuthority), 112 + initializer: [ 113 + "{", 114 + ...nsidTokens[nsidAuthority].map( 115 + (nsidName) => 116 + `${toTitleCase(nsidName)}: "${nsidAuthority}.${nsidName}",`, 117 + ), 118 + "}", 119 + ].join("\n"), 120 + }, 121 + ], 122 + }); 123 + } 124 + 125 + //= export function createServer(options?: XrpcOptions) { ... } 126 + const createServerFn = file.addFunction({ 127 + name: "createServer", 128 + returnType: "Server", 129 + parameters: [ 130 + { name: "options", type: "XrpcOptions", hasQuestionToken: true }, 131 + ], 132 + isExported: true, 133 + }); 134 + createServerFn.setBodyText(`return new Server(options)`); 135 + 136 + //= export class Server {...} 137 + const serverCls = file.addClass({ 138 + name: "Server", 139 + isExported: true, 140 + }); 141 + //= xrpc: XrpcServer = createXrpcServer(methodSchemas) 142 + serverCls.addProperty({ 143 + name: "xrpc", 144 + type: "XrpcServer", 145 + }); 146 + 147 + // generate classes for the schemas 148 + for (const ns of nsidTree) { 149 + //= ns: NS 150 + serverCls.addProperty({ 151 + name: ns.propName, 152 + type: ns.className, 153 + }); 154 + 155 + // class... 156 + genNamespaceCls(file, ns); 157 + } 158 + 159 + //= constructor (options?: XrpcOptions) { 160 + //= this.xrpc = createXrpcServer(schemas, options) 161 + //= {namespace declarations} 162 + //= } 163 + serverCls 164 + .addConstructor({ 165 + parameters: [ 166 + { name: "options", type: "XrpcOptions", hasQuestionToken: true }, 167 + ], 168 + }) 169 + .setBodyText( 170 + [ 171 + "this.xrpc = createXrpcServer(schemas, options)", 172 + ...nsidTree.map( 173 + (ns) => `this.${ns.propName} = new ${ns.className}(this)`, 174 + ), 175 + ].join("\n"), 176 + ); 177 + }); 178 + 179 + function genNamespaceCls(file: SourceFile, ns: DefTreeNode) { 180 + //= export class {ns}NS {...} 181 + const cls = file.addClass({ 182 + name: ns.className, 183 + isExported: true, 184 + }); 185 + //= _server: Server 186 + cls.addProperty({ 187 + name: "_server", 188 + type: "Server", 189 + }); 190 + 191 + for (const child of ns.children) { 192 + //= child: ChildNS 193 + cls.addProperty({ 194 + name: child.propName, 195 + type: child.className, 196 + }); 197 + 198 + // recurse 199 + genNamespaceCls(file, child); 200 + } 201 + 202 + //= constructor(server: Server) { 203 + //= this._server = server 204 + //= {child namespace declarations} 205 + //= } 206 + const cons = cls.addConstructor(); 207 + cons.addParameter({ 208 + name: "server", 209 + type: "Server", 210 + }); 211 + cons.setBodyText( 212 + [ 213 + `this._server = server`, 214 + ...ns.children.map( 215 + (ns) => `this.${ns.propName} = new ${ns.className}(server)`, 216 + ), 217 + ].join("\n"), 218 + ); 219 + 220 + // methods 221 + for (const userType of ns.userTypes) { 222 + if ( 223 + userType.def.type !== "query" && 224 + userType.def.type !== "subscription" && 225 + userType.def.type !== "procedure" 226 + ) { 227 + continue; 228 + } 229 + const moduleName = toTitleCase(userType.nsid); 230 + const name = toCamelCase(NSID.parse(userType.nsid).name || ""); 231 + const isSubscription = userType.def.type === "subscription"; 232 + const method = cls.addMethod({ 233 + name, 234 + typeParameters: [ 235 + { 236 + name: "A", 237 + constraint: "Auth", 238 + default: "void", 239 + }, 240 + ], 241 + }); 242 + method.addParameter({ 243 + name: "cfg", 244 + type: isSubscription 245 + ? `StreamConfigOrHandler< 246 + A, 247 + ${moduleName}.QueryParams, 248 + ${moduleName}.HandlerOutput, 249 + >` 250 + : `MethodConfigOrHandler< 251 + A, 252 + ${moduleName}.QueryParams, 253 + ${moduleName}.HandlerInput, 254 + ${moduleName}.HandlerOutput, 255 + >`, 256 + }); 257 + const methodType = isSubscription ? "streamMethod" : "method"; 258 + method.setBodyText( 259 + [ 260 + // Placing schema on separate line, since the following one was being formatted 261 + // into multiple lines and causing the ts-ignore to ignore the wrong line. 262 + `const nsid = '${userType.nsid}' // @ts-ignore - userType.nsid is dynamically generated and TypeScript can't infer its type`, 263 + `return this._server.xrpc.${methodType}(nsid, cfg)`, 264 + ].join("\n"), 265 + ); 266 + } 267 + } 268 + 269 + const lexiconTs = ( 270 + project: Project, 271 + lexicons: Lexicons, 272 + lexiconDoc: LexiconDoc, 273 + options?: CodeGenOptions, 274 + ) => 275 + gen( 276 + project, 277 + `/types/${lexiconDoc.id.split(".").join("/")}${ 278 + options?.useJsExtension ? ".js" : ".ts" 279 + }`, 280 + (file) => { 281 + const main = lexiconDoc.defs.main; 282 + if (main?.type === "query" || main?.type === "procedure") { 283 + const streamingInput = main?.type === "procedure" && 284 + main.input?.encoding && 285 + !main.input.schema; 286 + const streamingOutput = main.output?.encoding && !main.output.schema; 287 + if (streamingInput || streamingOutput) { 288 + //= import stream from 'node:stream' 289 + file.addImportDeclaration({ 290 + moduleSpecifier: "node:stream", 291 + defaultImport: "stream", 292 + }); 293 + } 294 + } 295 + 296 + genCommonImports(file, lexiconDoc.id, lexiconDoc); 297 + 298 + const imports: Set<string> = new Set(); 299 + for (const defId in lexiconDoc.defs) { 300 + const def = lexiconDoc.defs[defId]; 301 + const lexUri = `${lexiconDoc.id}#${defId}`; 302 + if (defId === "main") { 303 + if (def.type === "query" || def.type === "procedure") { 304 + genXrpcParams(file, lexicons, lexUri); 305 + genXrpcInput(file, imports, lexicons, lexUri); 306 + genXrpcOutput(file, imports, lexicons, lexUri, false); 307 + genServerXrpcMethod(file, lexicons, lexUri); 308 + } else if (def.type === "subscription") { 309 + genXrpcParams(file, lexicons, lexUri); 310 + genXrpcOutput(file, imports, lexicons, lexUri, false); 311 + genServerXrpcStreaming(file, lexicons, lexUri); 312 + } else if (def.type === "record") { 313 + genRecord(file, imports, lexicons, lexUri); 314 + } else { 315 + genUserType(file, imports, lexicons, lexUri); 316 + } 317 + } else { 318 + genUserType(file, imports, lexicons, lexUri); 319 + } 320 + } 321 + genImports(file, imports, lexiconDoc.id, options); 322 + }, 323 + ); 324 + 325 + function genServerXrpcMethod( 326 + file: SourceFile, 327 + lexicons: Lexicons, 328 + lexUri: string, 329 + ) { 330 + const def = lexicons.getDefOrThrow(lexUri, ["query", "procedure"]); 331 + 332 + //= export interface HandlerInput {...} 333 + if (def.type === "procedure" && def.input?.encoding) { 334 + const handlerInput = file.addInterface({ 335 + name: "HandlerInput", 336 + isExported: true, 337 + }); 338 + 339 + handlerInput.addProperty({ 340 + name: "encoding", 341 + type: def.input.encoding 342 + .split(",") 343 + .map((v) => `'${v.trim()}'`) 344 + .join(" | "), 345 + }); 346 + handlerInput.addProperty({ 347 + name: "body", 348 + type: def.input.schema 349 + ? def.input.encoding.includes(",") 350 + ? "InputSchema | stream.Readable" 351 + : "InputSchema" 352 + : "stream.Readable", 353 + }); 354 + } else { 355 + file.addTypeAlias({ 356 + isExported: true, 357 + name: "HandlerInput", 358 + type: "void", 359 + }); 360 + } 361 + 362 + // export interface HandlerSuccess {...} 363 + let hasHandlerSuccess = false; 364 + if (def.output?.schema || def.output?.encoding) { 365 + hasHandlerSuccess = true; 366 + const handlerSuccess = file.addInterface({ 367 + name: "HandlerSuccess", 368 + isExported: true, 369 + }); 370 + 371 + if (def.output.encoding) { 372 + handlerSuccess.addProperty({ 373 + name: "encoding", 374 + type: def.output.encoding 375 + .split(",") 376 + .map((v) => `'${v.trim()}'`) 377 + .join(" | "), 378 + }); 379 + } 380 + if (def.output?.schema) { 381 + if (def.output.encoding.includes(",")) { 382 + handlerSuccess.addProperty({ 383 + name: "body", 384 + type: "OutputSchema | Uint8Array | stream.Readable", 385 + }); 386 + } else { 387 + handlerSuccess.addProperty({ name: "body", type: "OutputSchema" }); 388 + } 389 + } else if (def.output?.encoding) { 390 + handlerSuccess.addProperty({ 391 + name: "body", 392 + type: "Uint8Array | stream.Readable", 393 + }); 394 + } 395 + handlerSuccess.addProperty({ 396 + name: "headers?", 397 + type: "{ [key: string]: string }", 398 + }); 399 + } 400 + 401 + // export interface HandlerError {...} 402 + const handlerError = file.addInterface({ 403 + name: "HandlerError", 404 + isExported: true, 405 + }); 406 + handlerError.addProperties([ 407 + { name: "status", type: "number" }, 408 + { name: "message?", type: "string" }, 409 + ]); 410 + if (def.errors?.length) { 411 + handlerError.addProperty({ 412 + name: "error?", 413 + type: def.errors.map((err) => `'${err.name}'`).join(" | "), 414 + }); 415 + } 416 + 417 + // export type HandlerOutput = ... 418 + file.addTypeAlias({ 419 + isExported: true, 420 + name: "HandlerOutput", 421 + type: `HandlerError | ${hasHandlerSuccess ? "HandlerSuccess" : "void"}`, 422 + }); 423 + } 424 + 425 + function genServerXrpcStreaming( 426 + file: SourceFile, 427 + lexicons: Lexicons, 428 + lexUri: string, 429 + ) { 430 + const def = lexicons.getDefOrThrow(lexUri, ["subscription"]); 431 + 432 + file.addImportDeclaration({ 433 + moduleSpecifier: "@sprk/xrpc-server", 434 + namedImports: [{ name: "ErrorFrame" }], 435 + }); 436 + 437 + // export type HandlerError = ... 438 + file.addTypeAlias({ 439 + name: "HandlerError", 440 + isExported: true, 441 + type: `ErrorFrame<${arrayToUnion(def.errors?.map((e) => e.name))}>`, 442 + }); 443 + 444 + // export type HandlerOutput = ... 445 + file.addTypeAlias({ 446 + isExported: true, 447 + name: "HandlerOutput", 448 + type: `HandlerError | ${def.message?.schema ? "OutputSchema" : "void"}`, 449 + }); 450 + } 451 + 452 + function arrayToUnion(arr?: string[]) { 453 + if (!arr?.length) { 454 + return "never"; 455 + } 456 + return arr.map((item) => `'${item}'`).join(" | "); 457 + }
+105
lex-cli/codegen/util.ts
··· 1 + import type { LexiconDoc, LexUserType } from "@atproto/lexicon"; 2 + import { NSID } from "@atproto/syntax"; 3 + 4 + export interface CodeGenOptions { 5 + useJsExtension?: boolean; 6 + } 7 + 8 + export interface DefTreeNodeUserType { 9 + nsid: string; 10 + def: LexUserType; 11 + } 12 + 13 + export interface DefTreeNode { 14 + name: string; 15 + className: string; 16 + propName: string; 17 + children: DefTreeNode[]; 18 + userTypes: DefTreeNodeUserType[]; 19 + } 20 + 21 + export function lexiconsToDefTree(lexicons: LexiconDoc[]): DefTreeNode[] { 22 + const tree: DefTreeNode[] = []; 23 + for (const lexicon of lexicons) { 24 + if (!lexicon.defs.main) { 25 + continue; 26 + } 27 + const node = getOrCreateNode(tree, lexicon.id.split(".").slice(0, -1)); 28 + node.userTypes.push({ nsid: lexicon.id, def: lexicon.defs.main }); 29 + } 30 + return tree; 31 + } 32 + 33 + function getOrCreateNode(tree: DefTreeNode[], path: string[]): DefTreeNode { 34 + let node: DefTreeNode | undefined; 35 + for (let i = 0; i < path.length; i++) { 36 + const segment = path[i]; 37 + node = tree.find((v) => v.name === segment); 38 + if (!node) { 39 + node = { 40 + name: segment, 41 + className: `${toTitleCase(path.slice(0, i + 1).join("-"))}NS`, 42 + propName: toCamelCase(segment), 43 + children: [], 44 + userTypes: [], 45 + } as DefTreeNode; 46 + tree.push(node); 47 + } 48 + tree = node.children; 49 + } 50 + if (!node) throw new Error(`Invalid schema path: ${path.join(".")}`); 51 + return node; 52 + } 53 + 54 + export function schemasToNsidTokens( 55 + lexiconDocs: LexiconDoc[], 56 + ): Record<string, string[]> { 57 + const nsidTokens: Record<string, string[]> = {}; 58 + for (const lexiconDoc of lexiconDocs) { 59 + const nsidp = NSID.parse(lexiconDoc.id); 60 + if (!nsidp.name) continue; 61 + for (const defId in lexiconDoc.defs) { 62 + const def = lexiconDoc.defs[defId]; 63 + if (def.type !== "token") continue; 64 + const authority = nsidp.segments.slice(0, -1).join("."); 65 + nsidTokens[authority] ??= []; 66 + nsidTokens[authority].push( 67 + nsidp.name + (defId === "main" ? "" : `#${defId}`), 68 + ); 69 + } 70 + } 71 + return nsidTokens; 72 + } 73 + 74 + export function toTitleCase(v: string): string { 75 + v = v.replace(/^([a-z])/gi, (_, g) => g.toUpperCase()); // upper-case first letter 76 + v = v.replace(/[.#-]([a-z])/gi, (_, g) => g.toUpperCase()); // uppercase any dash, dot, or hash segments 77 + return v.replace(/[.-]/g, ""); // remove lefover dashes or dots 78 + } 79 + 80 + export function toCamelCase(v: string): string { 81 + v = v.replace(/[.#-]([a-z])/gi, (_, g) => g.toUpperCase()); // uppercase any dash, dot, or hash segments 82 + return v.replace(/[.-]/g, ""); // remove lefover dashes or dots 83 + } 84 + 85 + export function toScreamingSnakeCase(v: string): string { 86 + v = v.replace(/[.#-]+/gi, "_"); // convert dashes, dots, and hashes into underscores 87 + return v.toUpperCase(); // and scream! 88 + } 89 + 90 + export async function formatGeneratedFiles(outDir: string) { 91 + console.log("Formatting generated files..."); 92 + const cmd = new Deno.Command("deno", { 93 + args: ["fmt", outDir], 94 + cwd: Deno.cwd(), 95 + }); 96 + 97 + const { code, stderr } = await cmd.output(); 98 + 99 + if (code !== 0) { 100 + const errorMsg = new TextDecoder().decode(stderr); 101 + console.warn(`Warning: deno fmt failed: ${errorMsg}`); 102 + } else { 103 + console.log("Files formatted successfully."); 104 + } 105 + }
+16
lex-cli/deno.json
··· 1 + { 2 + "name": "@atp/lex-cli", 3 + "version": "0.1.0-alpha.1", 4 + "exports": "./mod.ts", 5 + "license": "MIT", 6 + "imports": { 7 + "@cliffy/ansi": "jsr:@cliffy/ansi@^1.0.0-rc.8", 8 + "@cliffy/command": "jsr:@cliffy/command@^1.0.0-rc.8", 9 + "@std/fs": "jsr:@std/fs@^1.0.19", 10 + "@std/path": "jsr:@std/path@^1.1.2", 11 + "@atproto/lexicon": "npm:@atproto/lexicon@^0.4.14", 12 + "@atproto/syntax": "npm:@atproto/syntax@^0.4.1", 13 + "ts-morph": "jsr:@ts-morph/ts-morph@^26.0.0", 14 + "zod": "jsr:@zod/zod@^4.1.5" 15 + } 16 + }
+78
lex-cli/mdgen/index.ts
··· 1 + import { readFileSync } from "@std/fs/unstable-read-file"; 2 + import { writeFileSync } from "@std/fs/unstable-write-file"; 3 + import type { LexiconDoc } from "@atproto/lexicon"; 4 + 5 + const INSERT_START = [ 6 + "<!-- START lex generated content. Please keep comment here to allow auto update -->", 7 + "<!-- DON'T EDIT THIS SECTION! INSTEAD RE-RUN lex TO UPDATE -->", 8 + ]; 9 + const INSERT_END = [ 10 + "<!-- END lex generated TOC please keep comment here to allow auto update -->", 11 + ]; 12 + 13 + export async function process(outFilePath: string, lexicons: LexiconDoc[]) { 14 + let existingContent = ""; 15 + try { 16 + existingContent = new TextDecoder().decode(readFileSync(outFilePath)); 17 + } catch { 18 + // ignore - no existing content 19 + } 20 + const fileLines: StringTree = existingContent.split("\n"); 21 + 22 + // find previously generated content 23 + let startIndex = fileLines.findIndex((line) => matchesStart(line as string)); 24 + let endIndex = fileLines.findIndex((line) => matchesEnd(line as string)); 25 + if (startIndex === -1) { 26 + startIndex = fileLines.length; 27 + } 28 + if (endIndex === -1) { 29 + endIndex = fileLines.length; 30 + } 31 + 32 + // generate & insert content 33 + fileLines.splice(startIndex, endIndex - startIndex + 1, [ 34 + INSERT_START, 35 + await genMdLines(lexicons), 36 + INSERT_END, 37 + ]); 38 + 39 + writeFileSync(outFilePath, new TextEncoder().encode(merge(fileLines))); 40 + } 41 + 42 + function genMdLines(lexicons: LexiconDoc[]): StringTree { 43 + const doc: StringTree = []; 44 + for (const lexicon of lexicons) { 45 + console.log(lexicon.id); 46 + const desc: StringTree = []; 47 + if (lexicon.description) { 48 + desc.push(lexicon.description, ``); 49 + } 50 + doc.push([ 51 + `---`, 52 + ``, 53 + `## ${lexicon.id}`, 54 + "", 55 + desc, 56 + "```json", 57 + JSON.stringify(lexicon, null, 2), 58 + "```", 59 + ]); 60 + } 61 + return doc; 62 + } 63 + 64 + type StringTree = (StringTree | string | undefined)[]; 65 + function merge(arr: StringTree): string { 66 + return arr 67 + .flat(10) 68 + .filter((v) => typeof v === "string") 69 + .join("\n"); 70 + } 71 + 72 + function matchesStart(line: string) { 73 + return /<!-- START lex /.test(line); 74 + } 75 + 76 + function matchesEnd(line: string) { 77 + return /<!-- END lex /.test(line); 78 + }
+12
lex-cli/mod.ts
··· 1 + #!/usr/bin/env node 2 + 3 + import { Command } from "@cliffy/command"; 4 + import { genApi, genMd, genServer, genTsObj } from "./cmd/index.ts"; 5 + 6 + new Command() 7 + .name("lex-cli") 8 + .description("Lexicon CLI") 9 + .command("gen-api", genApi) 10 + .command("gen-md", genMd) 11 + .command("gen-server", genServer) 12 + .command("gen-ts-obj", genTsObj);
+14
lex-cli/types.ts
··· 1 + export interface GeneratedFile { 2 + path: string; 3 + content: string; 4 + } 5 + 6 + export interface GeneratedAPI { 7 + files: GeneratedFile[]; 8 + } 9 + 10 + export interface FileDiff { 11 + act: "add" | "mod" | "del"; 12 + path: string; 13 + content?: string; 14 + }
+172
lex-cli/util.ts
··· 1 + import { readFileSync } from "@std/fs/unstable-read-file"; 2 + import { statSync } from "@std/fs/unstable-stat"; 3 + import { mkdirSync } from "@std/fs/unstable-mkdir"; 4 + import { writeFileSync } from "@std/fs/unstable-write-file"; 5 + import { existsSync } from "@std/fs"; 6 + import { join } from "@std/path"; 7 + import { removeSync } from "@std/fs/unstable-remove"; 8 + import { readDirSync } from "@std/fs/unstable-read-dir"; 9 + import { colors } from "@cliffy/ansi/colors"; 10 + import { ZodError } from "zod"; 11 + import { type LexiconDoc, parseLexiconDoc } from "@atproto/lexicon"; 12 + import type { FileDiff, GeneratedAPI } from "./types.ts"; 13 + 14 + type RecursiveZodError = { 15 + _errors?: string[]; 16 + [k: string]: RecursiveZodError | string[] | undefined; 17 + }; 18 + 19 + export function readAllLexicons(paths: string[] | string): LexiconDoc[] { 20 + const docs: LexiconDoc[] = []; 21 + for (const path of Array.isArray(paths) ? paths : [paths]) { 22 + if (statSync(path).isDirectory) { 23 + // If it's a directory, recursively read all .json files in it 24 + const entries = Array.from(readDirSync(path)); 25 + const subPaths = entries.map((entry) => join(path, entry.name)); 26 + docs.push(...readAllLexicons(subPaths)); 27 + } else if (path.endsWith(".json") && statSync(path).isFile) { 28 + try { 29 + docs.push(readLexicon(path)); 30 + } catch { 31 + // skip 32 + } 33 + } 34 + } 35 + return docs; 36 + } 37 + 38 + export function readLexicon(path: string): LexiconDoc { 39 + let str: string; 40 + let obj: unknown; 41 + try { 42 + str = new TextDecoder().decode(readFileSync(path)); 43 + } catch (e) { 44 + console.error(`Failed to read file`, path); 45 + throw e; 46 + } 47 + try { 48 + obj = JSON.parse(str); 49 + } catch (e) { 50 + console.error(`Failed to parse JSON in file`, path); 51 + throw e; 52 + } 53 + if ( 54 + obj && 55 + typeof obj === "object" && 56 + typeof (obj as LexiconDoc).lexicon === "number" 57 + ) { 58 + try { 59 + return parseLexiconDoc(obj); 60 + } catch (e) { 61 + console.error(`Invalid lexicon`, path); 62 + if (e instanceof ZodError) { 63 + printZodError(e.format()); 64 + } 65 + throw e; 66 + } 67 + } else { 68 + console.error(`Not lexicon schema`, path); 69 + throw new Error(`Not lexicon schema`); 70 + } 71 + } 72 + 73 + export function genTsObj(lexicons: LexiconDoc[]): string { 74 + return `export const lexicons = ${JSON.stringify(lexicons, null, 2)}`; 75 + } 76 + 77 + export function genFileDiff(outDir: string, api: GeneratedAPI) { 78 + const diffs: FileDiff[] = []; 79 + const existingFiles = readdirRecursiveSync(outDir); 80 + 81 + for (const file of api.files) { 82 + file.path = join(outDir, file.path); 83 + if (existingFiles.includes(file.path)) { 84 + diffs.push({ act: "mod", path: file.path, content: file.content }); 85 + } else { 86 + diffs.push({ act: "add", path: file.path, content: file.content }); 87 + } 88 + } 89 + for (const filepath of existingFiles) { 90 + if (api.files.find((f) => f.path === filepath)) { 91 + // do nothing 92 + } else { 93 + diffs.push({ act: "del", path: filepath }); 94 + } 95 + } 96 + 97 + return diffs; 98 + } 99 + 100 + export function printFileDiff(diff: FileDiff[]) { 101 + for (const d of diff) { 102 + switch (d.act) { 103 + case "add": 104 + console.log(`${colors.bold.green("[+ add]")} ${d.path}`); 105 + break; 106 + case "mod": 107 + console.log(`${colors.bold.yellow("[* mod]")} ${d.path}`); 108 + break; 109 + case "del": 110 + console.log(`${colors.bold.green("[- del]")} ${d.path}`); 111 + break; 112 + } 113 + } 114 + } 115 + 116 + export function applyFileDiff(diff: FileDiff[]) { 117 + for (const d of diff) { 118 + switch (d.act) { 119 + case "add": 120 + case "mod": 121 + mkdirSync(join(d.path, ".."), { recursive: true }); // lazy way to make sure the parent dir exists 122 + writeFileSync(d.path, new TextEncoder().encode(d.content || "")); 123 + break; 124 + case "del": 125 + removeSync(d.path); 126 + break; 127 + } 128 + } 129 + } 130 + 131 + function isRecursiveZodError(value: unknown): value is RecursiveZodError { 132 + return value !== null && typeof value === "object"; 133 + } 134 + 135 + function printZodError(node: RecursiveZodError, path = ""): boolean { 136 + if (node._errors?.length) { 137 + console.log(colors.red(`Issues at ${path}:`)); 138 + for (const err of dedup(node._errors)) { 139 + console.log(colors.red(` - ${err}`)); 140 + } 141 + return true; 142 + } else { 143 + for (const k in node) { 144 + if (k === "_errors") { 145 + continue; 146 + } 147 + const value = node[k]; 148 + if (isRecursiveZodError(value)) { 149 + printZodError(value, `${path}/${k}`); 150 + } 151 + } 152 + } 153 + return false; 154 + } 155 + 156 + function readdirRecursiveSync(root: string, files: string[] = [], prefix = "") { 157 + const dir = join(root, prefix); 158 + if (!existsSync(dir)) return files; 159 + if (statSync(dir).isDirectory) { 160 + Array.from(readDirSync(dir)).forEach(function (entry) { 161 + readdirRecursiveSync(root, files, join(prefix, entry.name)); 162 + }); 163 + } else if (prefix.endsWith(".ts")) { 164 + files.push(join(root, prefix)); 165 + } 166 + 167 + return files; 168 + } 169 + 170 + function dedup(arr: string[]): string[] { 171 + return Array.from(new Set(arr)); 172 + }
xrpc-server/.DS_Store

This is a binary file and will not be displayed.

+324
xrpc-server/auth.ts
··· 1 + import * as ui8 from "uint8arrays"; 2 + import * as common from "@atproto/common"; 3 + import { MINUTE } from "@atproto/common"; 4 + import * as crypto from "@atproto/crypto"; 5 + import { AuthRequiredError } from "./errors.ts"; 6 + 7 + /** 8 + * Parameters for creating a service JWT. 9 + * Used for service-to-service authentication in XRPC systems. 10 + */ 11 + type ServiceJwtParams = { 12 + iss: string; 13 + aud: string; 14 + iat?: number; 15 + exp?: number; 16 + lxm: string | null; 17 + keypair: crypto.Keypair; 18 + }; 19 + 20 + /** 21 + * JWT header structure containing algorithm and additional fields. 22 + */ 23 + type ServiceJwtHeaders = { 24 + alg: string; 25 + } & Record<string, unknown>; 26 + 27 + /** 28 + * JWT payload structure containing standard and XRPC-specific claims. 29 + */ 30 + type ServiceJwtPayload = { 31 + iss: string; 32 + aud: string; 33 + exp: number; 34 + lxm?: string; 35 + jti?: string; 36 + nonce?: string; 37 + }; 38 + 39 + /** 40 + * Creates a signed JWT for service-to-service authentication. 41 + * The JWT includes standard claims (iss, aud, exp) and optional claims (lxm). 42 + * The token is signed using the provided keypair. 43 + * 44 + * @param params - Parameters for creating the JWT 45 + * @returns A signed JWT string in the format: header.payload.signature 46 + * 47 + * @example 48 + * ```typescript 49 + * const jwt = await createServiceJwt({ 50 + * iss: 'did:example:issuer', 51 + * aud: 'did:example:audience', 52 + * lxm: 'com.example.method', 53 + * keypair: myKeypair 54 + * }); 55 + * ``` 56 + */ 57 + export const createServiceJwt = async ( 58 + params: ServiceJwtParams, 59 + ): Promise<string> => { 60 + const { iss, aud, keypair } = params; 61 + const iat = params.iat ?? Math.floor(Date.now() / 1e3); 62 + const exp = params.exp ?? iat + MINUTE / 1e3; 63 + const lxm = params.lxm ?? undefined; 64 + const jti = crypto.randomStr(16, "hex"); 65 + const header = { 66 + typ: "JWT", 67 + alg: keypair.jwtAlg, 68 + }; 69 + const payload = common.noUndefinedVals({ 70 + iat, 71 + iss, 72 + aud, 73 + exp, 74 + lxm, 75 + jti, 76 + }); 77 + const toSignStr = `${jsonToB64Url(header)}.${jsonToB64Url(payload)}`; 78 + const toSign = ui8.fromString(toSignStr, "utf8"); 79 + const sig = await keypair.sign(toSign); 80 + return `${toSignStr}.${ui8.toString(sig, "base64url")}`; 81 + }; 82 + 83 + /** 84 + * Creates authorization headers containing a service JWT. 85 + * Useful for making authenticated HTTP requests to other services. 86 + * 87 + * @param params - Parameters for creating the JWT 88 + * @returns Object containing authorization header with Bearer token 89 + * 90 + * @example 91 + * ```typescript 92 + * const auth = await createServiceAuthHeaders({ 93 + * iss: 'did:example:issuer', 94 + * aud: 'did:example:audience', 95 + * keypair: myKeypair 96 + * }); 97 + * fetch(url, { headers: auth.headers }); 98 + * ``` 99 + */ 100 + export const createServiceAuthHeaders = async ( 101 + params: ServiceJwtParams, 102 + ): Promise<{ headers: { authorization: string } }> => { 103 + const jwt = await createServiceJwt(params); 104 + return { 105 + headers: { authorization: `Bearer ${jwt}` }, 106 + }; 107 + }; 108 + 109 + /** 110 + * Converts a JSON object to a base64url-encoded string. 111 + * @param json - The JSON object to encode 112 + * @returns The base64url-encoded string 113 + * @private 114 + */ 115 + const jsonToB64Url = (json: Record<string, unknown>): string => { 116 + return common.utf8ToB64Url(JSON.stringify(json)); 117 + }; 118 + 119 + /** 120 + * Function type for verifying JWT signatures with a given key. 121 + * @param key The public key to verify against 122 + * @param msgBytes The message bytes to verify 123 + * @param sigBytes The signature bytes to verify 124 + * @param alg The algorithm used for signing 125 + * @returns Whether the signature is valid 126 + */ 127 + export type VerifySignatureWithKeyFn = ( 128 + key: string, 129 + msgBytes: Uint8Array, 130 + sigBytes: Uint8Array, 131 + alg: string, 132 + ) => Promise<boolean> | boolean; 133 + 134 + /** 135 + * Verifies a JWT's authenticity and claims. 136 + * Performs comprehensive validation including: 137 + * - JWT format and signature 138 + * - Token expiration 139 + * - Audience validation 140 + * - Lexicon method validation 141 + * - Signature verification with key rotation support 142 + * 143 + * @param jwtStr - The JWT to verify 144 + * @param ownDid - The expected audience (null to skip check) 145 + * @param lxm - The expected lexicon method (null to skip check) 146 + * @param getSigningKey - Function to get the issuer's signing key 147 + * @param verifySignatureWithKey - Function to verify signatures 148 + * @returns The verified JWT payload 149 + * @throws {AuthRequiredError} If verification fails 150 + */ 151 + export const verifyJwt = async ( 152 + jwtStr: string, 153 + ownDid: string | null, 154 + lxm: string | null, 155 + getSigningKey: (iss: string, forceRefresh: boolean) => Promise<string>, 156 + verifySignatureWithKey: VerifySignatureWithKeyFn = 157 + cryptoVerifySignatureWithKey, 158 + ): Promise<ServiceJwtPayload> => { 159 + const parts = jwtStr.split("."); 160 + if (parts.length !== 3) { 161 + throw new AuthRequiredError("poorly formatted jwt", "BadJwt"); 162 + } 163 + 164 + const header = parseHeader(parts[0]); 165 + 166 + // The spec does not describe what to do with the "typ" claim. We can, 167 + // however, forbid some values that are not compatible with our use case. 168 + if ( 169 + // service tokens are not OAuth 2.0 access tokens 170 + // https://datatracker.ietf.org/doc/html/rfc9068 171 + header["typ"] === "at+jwt" || 172 + // "refresh+jwt" is a non-standard type used by the @atproto packages 173 + header["typ"] === "refresh+jwt" || 174 + // "DPoP" proofs are not meant to be used as service tokens 175 + // https://datatracker.ietf.org/doc/html/rfc9449 176 + header["typ"] === "dpop+jwt" 177 + ) { 178 + throw new AuthRequiredError( 179 + `Invalid jwt type "${header["typ"]}"`, 180 + "BadJwtType", 181 + ); 182 + } 183 + 184 + const payload = parsePayload(parts[1]); 185 + const sig = parts[2]; 186 + 187 + if (Date.now() / 1000 > payload.exp) { 188 + throw new AuthRequiredError("jwt expired", "JwtExpired"); 189 + } 190 + if (ownDid !== null && payload.aud !== ownDid) { 191 + throw new AuthRequiredError( 192 + "jwt audience does not match service did", 193 + "BadJwtAudience", 194 + ); 195 + } 196 + if (lxm !== null && payload.lxm !== lxm) { 197 + throw new AuthRequiredError( 198 + payload.lxm !== undefined 199 + ? `bad jwt lexicon method ("lxm"). must match: ${lxm}` 200 + : `missing jwt lexicon method ("lxm"). must match: ${lxm}`, 201 + "BadJwtLexiconMethod", 202 + ); 203 + } 204 + 205 + const msgBytes = ui8.fromString(parts.slice(0, 2).join("."), "utf8"); 206 + const sigBytes = ui8.fromString(sig, "base64url"); 207 + 208 + const signingKey = await getSigningKey(payload.iss, false); 209 + const { alg } = header; 210 + 211 + let validSig: boolean; 212 + try { 213 + validSig = await verifySignatureWithKey( 214 + signingKey, 215 + msgBytes, 216 + sigBytes, 217 + alg, 218 + ); 219 + } catch { 220 + throw new AuthRequiredError( 221 + "could not verify jwt signature", 222 + "BadJwtSignature", 223 + ); 224 + } 225 + 226 + if (!validSig) { 227 + // get fresh signing key in case it failed due to a recent rotation 228 + const freshSigningKey = await getSigningKey(payload.iss, true); 229 + try { 230 + validSig = freshSigningKey !== signingKey 231 + ? await verifySignatureWithKey( 232 + freshSigningKey, 233 + msgBytes, 234 + sigBytes, 235 + alg, 236 + ) 237 + : false; 238 + } catch { 239 + throw new AuthRequiredError( 240 + "could not verify jwt signature", 241 + "BadJwtSignature", 242 + ); 243 + } 244 + } 245 + 246 + if (!validSig) { 247 + throw new AuthRequiredError( 248 + "jwt signature does not match jwt issuer", 249 + "BadJwtSignature", 250 + ); 251 + } 252 + 253 + return payload; 254 + }; 255 + 256 + /** 257 + * Default implementation of signature verification using @atproto/crypto. 258 + * Supports malleable signatures for compatibility. 259 + * 260 + * @param key - The public key to verify against 261 + * @param msgBytes - The message bytes to verify 262 + * @param sigBytes - The signature bytes to verify 263 + * @param alg - The algorithm used for signing 264 + * @returns Whether the signature is valid 265 + */ 266 + export const cryptoVerifySignatureWithKey: VerifySignatureWithKeyFn = ( 267 + key: string, 268 + msgBytes: Uint8Array, 269 + sigBytes: Uint8Array, 270 + alg: string, 271 + ): Promise<boolean> => { 272 + return crypto.verifySignature(key, msgBytes, sigBytes, { 273 + jwtAlg: alg, 274 + allowMalleableSig: true, 275 + }); 276 + }; 277 + 278 + /** 279 + * Parses a base64url-encoded string into a JSON object. 280 + * @param b64 - The base64url-encoded string 281 + * @returns The parsed JSON object 282 + * @private 283 + */ 284 + const parseB64UrlToJson = (b64: string): unknown => { 285 + return JSON.parse(common.b64UrlToUtf8(b64)); 286 + }; 287 + 288 + /** 289 + * Parses and validates a JWT header. 290 + * @param b64 - The base64url-encoded header 291 + * @returns The parsed and validated header 292 + * @throws {AuthRequiredError} If the header is invalid 293 + * @private 294 + */ 295 + const parseHeader = (b64: string): ServiceJwtHeaders => { 296 + const header = parseB64UrlToJson(b64) as ServiceJwtHeaders; 297 + if (!header || typeof header !== "object" || typeof header.alg !== "string") { 298 + throw new AuthRequiredError("poorly formatted jwt", "BadJwt"); 299 + } 300 + return header; 301 + }; 302 + 303 + /** 304 + * Parses and validates a JWT payload. 305 + * @param b64 - The base64url-encoded payload 306 + * @returns The parsed and validated payload 307 + * @throws {AuthRequiredError} If the payload is invalid 308 + * @private 309 + */ 310 + const parsePayload = (b64: string): ServiceJwtPayload => { 311 + const payload = parseB64UrlToJson(b64) as ServiceJwtPayload; 312 + if ( 313 + !payload || 314 + typeof payload !== "object" || 315 + typeof payload.iss !== "string" || 316 + typeof payload.aud !== "string" || 317 + typeof payload.exp !== "number" || 318 + (payload.lxm && typeof payload.lxm !== "string") || 319 + (payload.nonce && typeof payload.nonce !== "string") 320 + ) { 321 + throw new AuthRequiredError("poorly formatted jwt", "BadJwt"); 322 + } 323 + return payload; 324 + };
+20
xrpc-server/deno.json
··· 1 + { 2 + "name": "@atp/xrpc-server", 3 + "version": "0.1.0-alpha.1", 4 + "exports": "./mod.ts", 5 + "license": "MIT", 6 + "imports": { 7 + "@atproto/crypto": "npm:@atproto/crypto@^0.4.4", 8 + "@atproto/lexicon": "npm:@atproto/lexicon@^0.4.11", 9 + "@atproto/xrpc": "npm:@atproto/xrpc@^0.7.0", 10 + "@std/assert": "jsr:@std/assert@^1.0.14", 11 + "@std/encoding": "jsr:@std/encoding@^1.0.10", 12 + "zod": "jsr:@zod/zod@^4.0.17", 13 + "hono": "jsr:@hono/hono@^4.7.10", 14 + "http-errors": "npm:http-errors@^2.0.0", 15 + "multiformats": "npm:multiformats@^13.3.6", 16 + "rate-limiter-flexible": "npm:rate-limiter-flexible@^2.4.1", 17 + "uint8arrays": "npm:uint8arrays@3.0.0", 18 + "ws": "npm:ws@^8.12.0" 19 + } 20 + }
+469
xrpc-server/errors.ts
··· 1 + import { z } from "zod"; 2 + import { 3 + httpResponseCodeToName, 4 + httpResponseCodeToString, 5 + ResponseType, 6 + ResponseTypeStrings, 7 + XRPCError as XRPCClientError, 8 + } from "@atproto/xrpc"; 9 + 10 + // @NOTE Do not depend (directly or indirectly) on "./types" here, as it would 11 + // create a circular dependency. 12 + 13 + /** 14 + * Zod schema for error result objects. 15 + * Defines the structure of error responses with status code and optional error/message fields. 16 + */ 17 + export const errorResult: z.ZodObject<{ 18 + status: z.ZodNumber; 19 + error: z.ZodOptional<z.ZodString>; 20 + message: z.ZodOptional<z.ZodString>; 21 + }> = z.object({ 22 + status: z.number(), 23 + error: z.string().optional(), 24 + message: z.string().optional(), 25 + }); 26 + 27 + /** 28 + * Type representing an error result object. 29 + * Contains HTTP status code and optional error identifier and message. 30 + */ 31 + export type ErrorResult = z.infer<typeof errorResult>; 32 + 33 + /** 34 + * Type guard to check if a value is an ErrorResult. 35 + * @param v - The value to check 36 + * @returns True if the value matches the ErrorResult schema 37 + */ 38 + export function isErrorResult(v: unknown): v is ErrorResult { 39 + return errorResult.safeParse(v).success; 40 + } 41 + 42 + /** 43 + * Type guard to check if a value is an HTTP error with status, message, and name properties. 44 + * @param v - The value to check 45 + * @returns True if the value has the expected HTTP error structure 46 + */ 47 + function isHttpErrorLike(v: unknown): v is { 48 + status: number; 49 + message: string; 50 + name: string; 51 + } { 52 + return ( 53 + typeof v === "object" && 54 + v !== null && 55 + "status" in v && 56 + "message" in v && 57 + "name" in v && 58 + typeof (v as { status: unknown }).status === "number" && 59 + typeof (v as { message: unknown }).message === "string" && 60 + typeof (v as { name: unknown }).name === "string" 61 + ); 62 + } 63 + 64 + /** 65 + * Excludes ErrorResult from a value type and throws if the value is an ErrorResult. 66 + * @template V - The value type 67 + * @param v - The value to check and exclude 68 + * @returns The value if it's not an ErrorResult 69 + * @throws {XRPCError} If the value is an ErrorResult 70 + */ 71 + export function excludeErrorResult<V>(v: V): Exclude<V, ErrorResult> { 72 + if (isErrorResult(v)) throw XRPCError.fromErrorResult(v); 73 + return v as Exclude<V, ErrorResult>; 74 + } 75 + 76 + export { ResponseType }; 77 + 78 + /** 79 + * Base class for all XRPC errors. 80 + * Extends the standard Error class with XRPC-specific properties and methods. 81 + */ 82 + export class XRPCError extends Error { 83 + /** 84 + * Creates a new XRPCError instance. 85 + * @param type - The HTTP response type/status code 86 + * @param errorMessage - Optional error message 87 + * @param customErrorName - Optional custom error name 88 + * @param options - Optional error options (including cause) 89 + */ 90 + constructor( 91 + public type: ResponseType, 92 + public errorMessage?: string, 93 + public customErrorName?: string, 94 + options?: ErrorOptions, 95 + ) { 96 + super(errorMessage, options); 97 + } 98 + 99 + /** 100 + * Gets the HTTP status code for this error. 101 + * Validates that the type is a valid HTTP error status code (400-599). 102 + * @returns The HTTP status code, or 500 if the type is invalid 103 + */ 104 + get statusCode(): number { 105 + const { type } = this; 106 + 107 + // Fool-proofing. `new XRPCError(123.5 as number, '')` does not generate a TypeScript error. 108 + // Because of this, we can end-up with any numeric value instead of an actual `ResponseType`. 109 + // For legacy reasons, the `type` argument is not checked in the constructor, so we check it here. 110 + if (type < 400 || type >= 600 || !Number.isFinite(type)) { 111 + return 500; 112 + } 113 + 114 + return type; 115 + } 116 + 117 + /** 118 + * Gets the error payload for HTTP responses. 119 + * For internal server errors (500), returns generic message instead of error details. 120 + * @returns Object containing error name and message for the response 121 + */ 122 + get payload(): { 123 + error: string | undefined; 124 + message: string | undefined; 125 + } { 126 + return { 127 + error: this.customErrorName ?? this.typeName, 128 + message: this.type === ResponseType.InternalServerError 129 + ? this.typeStr // Do not respond with error details for 500s 130 + : this.errorMessage || this.typeStr, 131 + }; 132 + } 133 + 134 + /** 135 + * Gets the string name of the response type. 136 + * @returns The response type name (e.g., "BadRequest", "NotFound") 137 + */ 138 + get typeName(): string | undefined { 139 + return ResponseType[this.type]; 140 + } 141 + 142 + /** 143 + * Gets the human-readable string description of the response type. 144 + * @returns The response type description (e.g., "Bad Request", "Not Found") 145 + */ 146 + get typeStr(): string | undefined { 147 + return ResponseTypeStrings[this.type]; 148 + } 149 + 150 + /** 151 + * Converts any error-like value into an XRPCError. 152 + * Handles various error types including XRPCError, XRPCClientError, HTTP errors, and generic errors. 153 + * @param cause - The error or error-like value to convert 154 + * @returns An XRPCError instance 155 + */ 156 + static fromError(cause: unknown): XRPCError { 157 + if (cause instanceof XRPCError) { 158 + return cause; 159 + } 160 + 161 + if (cause instanceof XRPCClientError) { 162 + const { error, message, type } = mapFromClientError(cause); 163 + return new XRPCError(type, message, error, { cause }); 164 + } 165 + 166 + if (isHttpErrorLike(cause)) { 167 + return new XRPCError( 168 + cause.status, 169 + cause.message, 170 + cause.name, 171 + { cause }, 172 + ); 173 + } 174 + 175 + if (isErrorResult(cause)) { 176 + return this.fromErrorResult(cause); 177 + } 178 + 179 + if (cause instanceof Error) { 180 + return new InternalServerError(cause.message, undefined, { cause }); 181 + } 182 + 183 + return new InternalServerError( 184 + "Unexpected internal server error", 185 + undefined, 186 + { cause }, 187 + ); 188 + } 189 + 190 + /** 191 + * Creates an XRPCError from an ErrorResult object. 192 + * @param err - The ErrorResult to convert 193 + * @returns An XRPCError instance 194 + */ 195 + static fromErrorResult(err: ErrorResult): XRPCError { 196 + return new XRPCError(err.status, err.message, err.error, { cause: err }); 197 + } 198 + } 199 + 200 + /** 201 + * Error class for invalid request errors (HTTP 400). 202 + * Used when the client request is malformed or invalid. 203 + */ 204 + export class InvalidRequestError extends XRPCError { 205 + /** 206 + * Creates a new InvalidRequestError. 207 + * @param errorMessage - Optional error message 208 + * @param customErrorName - Optional custom error name 209 + * @param options - Optional error options 210 + */ 211 + constructor( 212 + errorMessage?: string, 213 + customErrorName?: string, 214 + options?: ErrorOptions, 215 + ) { 216 + super(ResponseType.InvalidRequest, errorMessage, customErrorName, options); 217 + } 218 + 219 + [Symbol.hasInstance](instance: unknown): boolean { 220 + return ( 221 + instance instanceof XRPCError && 222 + instance.type === ResponseType.InvalidRequest 223 + ); 224 + } 225 + } 226 + 227 + /** 228 + * Error class for authentication required errors (HTTP 401). 229 + * Used when the request requires authentication but none was provided or it was invalid. 230 + */ 231 + export class AuthRequiredError extends XRPCError { 232 + /** 233 + * Creates a new AuthRequiredError. 234 + * @param errorMessage - Optional error message 235 + * @param customErrorName - Optional custom error name 236 + * @param options - Optional error options 237 + */ 238 + constructor( 239 + errorMessage?: string, 240 + customErrorName?: string, 241 + options?: ErrorOptions, 242 + ) { 243 + super( 244 + ResponseType.AuthenticationRequired, 245 + errorMessage, 246 + customErrorName, 247 + options, 248 + ); 249 + } 250 + 251 + [Symbol.hasInstance](instance: unknown): boolean { 252 + return ( 253 + instance instanceof XRPCError && 254 + instance.type === ResponseType.AuthenticationRequired 255 + ); 256 + } 257 + } 258 + 259 + /** 260 + * Error class for forbidden errors (HTTP 403). 261 + * Used when the client is authenticated but doesn't have permission to access the resource. 262 + */ 263 + export class ForbiddenError extends XRPCError { 264 + /** 265 + * Creates a new ForbiddenError. 266 + * @param errorMessage - Optional error message 267 + * @param customErrorName - Optional custom error name 268 + * @param options - Optional error options 269 + */ 270 + constructor( 271 + errorMessage?: string, 272 + customErrorName?: string, 273 + options?: ErrorOptions, 274 + ) { 275 + super(ResponseType.Forbidden, errorMessage, customErrorName, options); 276 + } 277 + 278 + [Symbol.hasInstance](instance: unknown): boolean { 279 + return ( 280 + instance instanceof XRPCError && instance.type === ResponseType.Forbidden 281 + ); 282 + } 283 + } 284 + 285 + /** 286 + * Error class for internal server errors (HTTP 500). 287 + * Used when an unexpected error occurs on the server side. 288 + */ 289 + export class InternalServerError extends XRPCError { 290 + /** 291 + * Creates a new InternalServerError. 292 + * @param errorMessage - Optional error message 293 + * @param customErrorName - Optional custom error name 294 + * @param options - Optional error options 295 + */ 296 + constructor( 297 + errorMessage?: string, 298 + customErrorName?: string, 299 + options?: ErrorOptions, 300 + ) { 301 + super( 302 + ResponseType.InternalServerError, 303 + errorMessage, 304 + customErrorName, 305 + options, 306 + ); 307 + } 308 + 309 + [Symbol.hasInstance](instance: unknown): boolean { 310 + return ( 311 + instance instanceof XRPCError && 312 + instance.type === ResponseType.InternalServerError 313 + ); 314 + } 315 + } 316 + 317 + /** 318 + * Error class for upstream failure errors (HTTP 502). 319 + * Used when a dependent service fails or returns an invalid response. 320 + */ 321 + export class UpstreamFailureError extends XRPCError { 322 + /** 323 + * Creates a new UpstreamFailureError. 324 + * @param errorMessage - Optional error message 325 + * @param customErrorName - Optional custom error name 326 + * @param options - Optional error options 327 + */ 328 + constructor( 329 + errorMessage?: string, 330 + customErrorName?: string, 331 + options?: ErrorOptions, 332 + ) { 333 + super(ResponseType.UpstreamFailure, errorMessage, customErrorName, options); 334 + } 335 + 336 + [Symbol.hasInstance](instance: unknown): boolean { 337 + return ( 338 + instance instanceof XRPCError && 339 + instance.type === ResponseType.UpstreamFailure 340 + ); 341 + } 342 + } 343 + 344 + /** 345 + * Error class for not enough resources errors (HTTP 507). 346 + * Used when the server temporarily cannot handle the request due to resource constraints. 347 + */ 348 + export class NotEnoughResourcesError extends XRPCError { 349 + /** 350 + * Creates a new NotEnoughResourcesError. 351 + * @param errorMessage - Optional error message 352 + * @param customErrorName - Optional custom error name 353 + * @param options - Optional error options 354 + */ 355 + constructor( 356 + errorMessage?: string, 357 + customErrorName?: string, 358 + options?: ErrorOptions, 359 + ) { 360 + super( 361 + ResponseType.NotEnoughResources, 362 + errorMessage, 363 + customErrorName, 364 + options, 365 + ); 366 + } 367 + 368 + [Symbol.hasInstance](instance: unknown): boolean { 369 + return ( 370 + instance instanceof XRPCError && 371 + instance.type === ResponseType.NotEnoughResources 372 + ); 373 + } 374 + } 375 + 376 + /** 377 + * Error class for upstream timeout errors (HTTP 504). 378 + * Used when a dependent service times out or takes too long to respond. 379 + */ 380 + export class UpstreamTimeoutError extends XRPCError { 381 + /** 382 + * Creates a new UpstreamTimeoutError. 383 + * @param errorMessage - Optional error message 384 + * @param customErrorName - Optional custom error name 385 + * @param options - Optional error options 386 + */ 387 + constructor( 388 + errorMessage?: string, 389 + customErrorName?: string, 390 + options?: ErrorOptions, 391 + ) { 392 + super(ResponseType.UpstreamTimeout, errorMessage, customErrorName, options); 393 + } 394 + 395 + [Symbol.hasInstance](instance: unknown): boolean { 396 + return ( 397 + instance instanceof XRPCError && 398 + instance.type === ResponseType.UpstreamTimeout 399 + ); 400 + } 401 + } 402 + 403 + /** 404 + * Error class for method not implemented errors (HTTP 501). 405 + * Used when the requested XRPC method is not implemented by the server. 406 + */ 407 + export class MethodNotImplementedError extends XRPCError { 408 + /** 409 + * Creates a new MethodNotImplementedError. 410 + * @param errorMessage - Optional error message 411 + * @param customErrorName - Optional custom error name 412 + * @param options - Optional error options 413 + */ 414 + constructor( 415 + errorMessage?: string, 416 + customErrorName?: string, 417 + options?: ErrorOptions, 418 + ) { 419 + super( 420 + ResponseType.MethodNotImplemented, 421 + errorMessage, 422 + customErrorName, 423 + options, 424 + ); 425 + } 426 + 427 + [Symbol.hasInstance](instance: unknown): boolean { 428 + return ( 429 + instance instanceof XRPCError && 430 + instance.type === ResponseType.MethodNotImplemented 431 + ); 432 + } 433 + } 434 + 435 + /** 436 + * Converts an upstream XRPC client error into a downstream ResponseType. 437 + * Maps client error status codes to appropriate server response types. 438 + * @param error The upstream XRPC client error 439 + * @returns Object containing error details and mapped response type 440 + */ 441 + function mapFromClientError(error: XRPCClientError): { 442 + error: string; 443 + message: string; 444 + type: ResponseType; 445 + } { 446 + switch (error.status) { 447 + case ResponseType.InvalidResponse: 448 + // Upstream server returned an XRPC response that is not compatible with our internal lexicon definitions for that XRPC method. 449 + // @NOTE This could be reflected as both a 500 ("we" are at fault) and 502 ("they" are at fault). Let's be gents about it. 450 + return { 451 + error: httpResponseCodeToName(ResponseType.InternalServerError), 452 + message: httpResponseCodeToString(ResponseType.InternalServerError), 453 + type: ResponseType.InternalServerError, 454 + }; 455 + case ResponseType.Unknown: 456 + // Typically a network error / unknown host 457 + return { 458 + error: httpResponseCodeToName(ResponseType.InternalServerError), 459 + message: httpResponseCodeToString(ResponseType.InternalServerError), 460 + type: ResponseType.InternalServerError, 461 + }; 462 + default: 463 + return { 464 + error: error.error, 465 + message: error.message, 466 + type: error.status, 467 + }; 468 + } 469 + }
+45
xrpc-server/lexicon/index.ts
··· 1 + import { Hono } from "hono"; 2 + import { Lexicons } from "@atproto/lexicon"; 3 + import type { Context, Next } from "hono"; 4 + import type { LexiconDoc } from "@atproto/lexicon"; 5 + 6 + export function createServer(lexicons?: LexiconDoc[]) { 7 + const routes = new Hono(); 8 + const lex = new Lexicons(); 9 + 10 + if (lexicons) { 11 + for (const doc of lexicons) { 12 + lex.add(doc); 13 + } 14 + } 15 + 16 + routes.all("/xrpc/:methodId", async (c: Context, next: Next) => { 17 + const methodId = c.req.param("methodId"); 18 + const def = lex.getDef(methodId); 19 + 20 + if (!def) { 21 + return c.json({ error: "Method Not Found" }, 404); 22 + } 23 + 24 + // Validate method 25 + if (def.type === "query" && c.req.method !== "GET") { 26 + return c.json({ error: "Invalid Method", message: "Expected GET" }, 405); 27 + } else if (def.type === "procedure" && c.req.method !== "POST") { 28 + return c.json({ error: "Invalid Method", message: "Expected POST" }, 405); 29 + } 30 + 31 + // Let the router handle it 32 + await next(); 33 + }); 34 + 35 + return { 36 + routes, 37 + method(nsid: string, handler: (c: Context) => Promise<Response>) { 38 + const def = lex.getDef(nsid); 39 + if (!def) throw new Error(`Unknown lexicon: ${nsid}`); 40 + 41 + const method = def.type === "procedure" ? "post" : "get"; 42 + routes[method](`/xrpc/${nsid}`, handler); 43 + }, 44 + }; 45 + }
+9
xrpc-server/logger.ts
··· 1 + import { subsystemLogger } from "@atp/common"; 2 + 3 + export const LOGGER_NAME = "xrpc-server"; 4 + 5 + export const logger: ReturnType<typeof subsystemLogger> = subsystemLogger( 6 + LOGGER_NAME, 7 + ); 8 + 9 + export default logger;
+198
xrpc-server/mod.ts
··· 1 + /** 2 + * XRPC Server implementation for atproto services. 3 + * 4 + * This module provides a Hono-based server implementation for atproto's XRPC protocol, 5 + * with support for Lexicon schema validation, authentication, rate limiting, and streaming. 6 + * Written in TypeScript with full type safety and designed to work across JavaScript runtimes. 7 + * 8 + * ## Features 9 + * - Full Lexicon schema validation 10 + * - Built on Hono for high performance and runtime compatibility 11 + * - Authentication (Basic Auth, Bearer tokens, JWT verification) 12 + * - Rate limiting (global, shared, and per-route) 13 + * - WebSocket streaming support 14 + * - Server timing utilities for performance monitoring 15 + * - Comprehensive error handling with XRPC error types 16 + * - TypeScript-first with complete type definitions 17 + * 18 + * ## Runtime Compatibility 19 + * Works with Deno, Node.js, Bun, Cloudflare Workers, and other JavaScript runtimes 20 + * supported by Hono. 21 + * 22 + * @example Basic server setup with a simple endpoint 23 + * ```ts 24 + * import { createServer } from "jsr:@atp/xrpc-server"; 25 + * import type { LexiconDoc } from "@atproto/lexicon"; 26 + * 27 + * const lexicons: LexiconDoc[] = [{ 28 + * lexicon: 1, 29 + * id: "com.example.ping", 30 + * defs: { 31 + * main: { 32 + * type: "query", 33 + * parameters: { 34 + * type: "params", 35 + * properties: { message: { type: "string" } }, 36 + * }, 37 + * output: { 38 + * encoding: "application/json", 39 + * }, 40 + * }, 41 + * }, 42 + * }]; 43 + * 44 + * const server = createServer(lexicons); 45 + * server.method("com.example.ping", { 46 + * handler: ({ params }) => ({ 47 + * encoding: "application/json", 48 + * body: { message: params.message || "Hello World!" } 49 + * }) 50 + * }); 51 + * 52 + * // Deno 53 + * Deno.serve(server.handler.fetch); 54 + * ``` 55 + * 56 + * @example Authentication with custom auth verifiers 57 + * ```ts 58 + * import { createServer, AuthRequiredError } from "jsr:@atp/xrpc-server"; 59 + * 60 + * const server = createServer(lexicons); 61 + * 62 + * // Basic Auth verification 63 + * server.method("com.example.protected", { 64 + * auth: async ({ req }) => { 65 + * const auth = req.headers.get("Authorization"); 66 + * if (!auth?.startsWith("Basic ")) { 67 + * throw new AuthRequiredError("Basic auth required"); 68 + * } 69 + * const [username, password] = atob(auth.slice(6)).split(":"); 70 + * if (username !== "admin" || password !== "secret") { 71 + * throw new AuthRequiredError("Invalid credentials"); 72 + * } 73 + * return { credentials: { username } }; 74 + * }, 75 + * handler: ({ auth }) => ({ 76 + * encoding: "application/json", 77 + * body: { user: auth?.credentials?.username } 78 + * }) 79 + * }); 80 + * 81 + * // Bearer token verification 82 + * server.method("com.example.tokenProtected", { 83 + * auth: async ({ req }) => { 84 + * const token = req.headers.get("Authorization")?.replace("Bearer ", ""); 85 + * if (!token) throw new AuthRequiredError("Bearer token required"); 86 + * 87 + * // Verify token (implement your own logic) 88 + * const user = await verifyToken(token); 89 + * return { credentials: user }; 90 + * }, 91 + * handler: ({ auth }) => ({ 92 + * encoding: "application/json", 93 + * body: { userId: auth?.credentials?.id } 94 + * }) 95 + * }); 96 + * ``` 97 + * 98 + * @example Rate limiting configuration 99 + * ```ts 100 + * import { createServer } from "jsr:@atp/xrpc-server"; 101 + * import { MemoryRateLimiter } from "@atp/xrpc-server"; 102 + * 103 + * const server = createServer(lexicons, { 104 + * rateLimits: { 105 + * creator: (opts) => new MemoryRateLimiter(opts), 106 + * global: [{ 107 + * name: "global", 108 + * durationMs: 60000, // 1 minute 109 + * points: 100 // 100 requests per minute 110 + * }], 111 + * shared: [{ 112 + * name: "auth-heavy", 113 + * durationMs: 300000, // 5 minutes 114 + * points: 20 // 20 requests per 5 minutes 115 + * }], 116 + * bypass: (ctx) => ctx.auth?.credentials?.isAdmin === true 117 + * } 118 + * }); 119 + * 120 + * // Per-route rate limiting 121 + * server.method("com.example.limited", { 122 + * rateLimit: [ 123 + * { name: "auth-heavy" }, // Use shared rate limiter 124 + * { durationMs: 60000, points: 10 } // Additional route-specific limit 125 + * ], 126 + * handler: () => ({ 127 + * encoding: "application/json", 128 + * body: { status: "ok" } 129 + * }) 130 + * }); 131 + * ``` 132 + * 133 + * @example Streaming endpoint with proper error handling 134 + * ```ts 135 + * import { createServer, ErrorFrame } from "jsr:@atp/xrpc-server"; 136 + * 137 + * const server = createServer(lexicons); 138 + * 139 + * server.streamMethod("com.example.events", { 140 + * auth: async ({ req }) => { 141 + * // Authenticate streaming connections 142 + * const token = req.headers.get("Authorization")?.replace("Bearer ", ""); 143 + * if (!token) throw new AuthRequiredError("Authentication required"); 144 + * return { credentials: await verifyToken(token) }; 145 + * }, 146 + * handler: async function* ({ auth, signal }) { 147 + * try { 148 + * const eventStream = subscribeToEvents(auth.credentials.userId); 149 + * 150 + * while (!signal.aborted) { 151 + * const event = await eventStream.next(); 152 + * if (event.done) break; 153 + * 154 + * yield { 155 + * timestamp: new Date().toISOString(), 156 + * event: event.value 157 + * }; 158 + * } 159 + * } catch (error) { 160 + * yield new ErrorFrame({ 161 + * error: "StreamError", 162 + * message: error.message 163 + * }); 164 + * } 165 + * } 166 + * }); 167 + * ``` 168 + * 169 + * @example Error handling and custom error conversion 170 + * ```ts 171 + * import { createServer, XRPCError, InternalServerError } from "jsr:@atp/xrpc-server"; 172 + * 173 + * const server = createServer(lexicons, { 174 + * errorParser: (err) => { 175 + * if (err instanceof MyCustomError) { 176 + * return new InvalidRequestError(err.message, "CustomError"); 177 + * } 178 + * return XRPCError.fromError(err); 179 + * } 180 + * }); 181 + * ``` 182 + * 183 + * @module 184 + */ 185 + 186 + export * from "./types.ts"; 187 + export * from "./auth.ts"; 188 + export * from "./server.ts"; 189 + export * from "./errors.ts"; 190 + 191 + export * from "./stream/index.ts"; 192 + export * from "./rate-limiter.ts"; 193 + export { 194 + parseReqNsid, 195 + ServerTimer, 196 + type ServerTiming, 197 + serverTimingHeader, 198 + } from "./util.ts";
+376
xrpc-server/rate-limiter.ts
··· 1 + import { 2 + type RateLimiterAbstract, 3 + RateLimiterMemory, 4 + RateLimiterRedis, 5 + RateLimiterRes, 6 + } from "rate-limiter-flexible"; 7 + import { ResponseType, XRPCError } from "./errors.ts"; 8 + import { logger } from "./logger.ts"; 9 + 10 + // @NOTE Do not depend (directly or indirectly) on "./types" here, as it would 11 + // create a circular dependency. 12 + 13 + export interface RateLimiterContext { 14 + req: Request; 15 + res?: Response; 16 + } 17 + 18 + export type CalcKeyFn<C extends RateLimiterContext = RateLimiterContext> = ( 19 + ctx: C, 20 + ) => string | null; 21 + export type CalcPointsFn<C extends RateLimiterContext = RateLimiterContext> = ( 22 + ctx: C, 23 + ) => number; 24 + 25 + export interface RateLimiterI< 26 + C extends RateLimiterContext = RateLimiterContext, 27 + > { 28 + consume: RateLimiterConsume<C>; 29 + reset: RateLimiterReset<C>; 30 + } 31 + 32 + export type RateLimiterConsumeOptions< 33 + C extends RateLimiterContext = RateLimiterContext, 34 + > = { 35 + calcKey?: CalcKeyFn<C>; 36 + calcPoints?: CalcPointsFn<C>; 37 + }; 38 + 39 + export type RateLimiterConsume< 40 + C extends RateLimiterContext = RateLimiterContext, 41 + > = ( 42 + ctx: C, 43 + opts?: RateLimiterConsumeOptions<C>, 44 + ) => Promise<RateLimiterStatus | RateLimitExceededError | null>; 45 + 46 + export type RateLimiterStatus = { 47 + limit: number; 48 + duration: number; 49 + remainingPoints: number; 50 + msBeforeNext: number; 51 + consumedPoints: number; 52 + isFirstInDuration: boolean; 53 + }; 54 + 55 + export type RateLimiterResetOptions< 56 + C extends RateLimiterContext = RateLimiterContext, 57 + > = { 58 + calcKey?: CalcKeyFn<C>; 59 + }; 60 + 61 + export type RateLimiterReset< 62 + C extends RateLimiterContext = RateLimiterContext, 63 + > = (ctx: C, opts?: RateLimiterResetOptions<C>) => Promise<void>; 64 + 65 + export type RateLimiterOptions< 66 + C extends RateLimiterContext = RateLimiterContext, 67 + > = { 68 + keyPrefix: string; 69 + durationMs: number; 70 + points: number; 71 + calcKey: CalcKeyFn<C>; 72 + calcPoints: CalcPointsFn<C>; 73 + failClosed?: boolean; 74 + }; 75 + 76 + export class RateLimiter<C extends RateLimiterContext = RateLimiterContext> 77 + implements RateLimiterI<C> { 78 + private readonly failClosed?: boolean; 79 + private readonly calcKey: CalcKeyFn<C>; 80 + private readonly calcPoints: CalcPointsFn<C>; 81 + 82 + constructor( 83 + public limiter: RateLimiterAbstract, 84 + options: RateLimiterOptions<C>, 85 + ) { 86 + this.limiter = limiter; 87 + this.failClosed = options.failClosed ?? false; 88 + this.calcKey = options.calcKey; 89 + this.calcPoints = options.calcPoints; 90 + } 91 + 92 + async consume( 93 + ctx: C, 94 + opts?: RateLimiterConsumeOptions<C>, 95 + ): Promise<RateLimiterStatus | RateLimitExceededError | null> { 96 + const calcKey = opts?.calcKey ?? this.calcKey; 97 + const key = calcKey(ctx); 98 + if (key === null) { 99 + return null; 100 + } 101 + const calcPoints = opts?.calcPoints ?? this.calcPoints; 102 + const points = calcPoints(ctx); 103 + if (points < 1) { 104 + return null; 105 + } 106 + try { 107 + const res = await this.limiter.consume(key, points); 108 + return formatLimiterStatus(this.limiter, res); 109 + } catch (err) { 110 + // yes this library rejects with a res not an error 111 + if (err instanceof RateLimiterRes) { 112 + const status = formatLimiterStatus(this.limiter, err); 113 + return new RateLimitExceededError(status); 114 + } else { 115 + if (this.failClosed) { 116 + throw err; 117 + } 118 + logger.error( 119 + "rate limiter failed to consume points", 120 + { 121 + err, 122 + keyPrefix: this.limiter.keyPrefix, 123 + points: this.limiter.points, 124 + duration: this.limiter.duration, 125 + }, 126 + ); 127 + return null; 128 + } 129 + } 130 + } 131 + 132 + async reset(ctx: C, opts?: RateLimiterResetOptions<C>): Promise<void> { 133 + const key = opts?.calcKey ? opts.calcKey(ctx) : this.calcKey(ctx); 134 + if (key === null) { 135 + return; 136 + } 137 + 138 + try { 139 + await this.limiter.delete(key); 140 + } catch (cause) { 141 + throw new Error(`rate limiter failed to reset key: ${key}`, { cause }); 142 + } 143 + } 144 + } 145 + 146 + export class MemoryRateLimiter< 147 + C extends RateLimiterContext = RateLimiterContext, 148 + > extends RateLimiter<C> { 149 + constructor(options: RateLimiterOptions<C>) { 150 + const limiter = new RateLimiterMemory({ 151 + keyPrefix: options.keyPrefix, 152 + duration: Math.floor(options.durationMs / 1000), 153 + points: options.points, 154 + }); 155 + super(limiter, options); 156 + } 157 + } 158 + 159 + export class RedisRateLimiter< 160 + C extends RateLimiterContext = RateLimiterContext, 161 + > extends RateLimiter<C> { 162 + constructor(storeClient: unknown, options: RateLimiterOptions<C>) { 163 + const limiter = new RateLimiterRedis({ 164 + storeClient, 165 + keyPrefix: options.keyPrefix, 166 + duration: Math.floor(options.durationMs / 1000), 167 + points: options.points, 168 + }); 169 + super(limiter, options); 170 + } 171 + } 172 + 173 + export const formatLimiterStatus = ( 174 + limiter: RateLimiterAbstract, 175 + res: RateLimiterRes, 176 + ): RateLimiterStatus => { 177 + return { 178 + limit: limiter.points, 179 + duration: limiter.duration, 180 + remainingPoints: res.remainingPoints, 181 + msBeforeNext: res.msBeforeNext, 182 + consumedPoints: res.consumedPoints, 183 + isFirstInDuration: res.isFirstInDuration, 184 + }; 185 + }; 186 + 187 + export type WrappedRateLimiterOptions< 188 + C extends RateLimiterContext = RateLimiterContext, 189 + > = { 190 + calcKey?: CalcKeyFn<C>; 191 + calcPoints?: CalcPointsFn<C>; 192 + }; 193 + 194 + /** 195 + * Wraps a {@link RateLimiterI} instance with custom key and points calculation 196 + * functions. 197 + */ 198 + export class WrappedRateLimiter< 199 + C extends RateLimiterContext = RateLimiterContext, 200 + > implements RateLimiterI<C> { 201 + private constructor( 202 + private readonly rateLimiter: RateLimiterI<C>, 203 + private readonly options: Readonly<WrappedRateLimiterOptions<C>>, 204 + ) {} 205 + 206 + consume( 207 + ctx: C, 208 + opts?: RateLimiterConsumeOptions<C>, 209 + ): Promise<RateLimiterStatus | RateLimitExceededError | null> { 210 + return this.rateLimiter.consume(ctx, { 211 + calcKey: opts?.calcKey ?? this.options.calcKey, 212 + calcPoints: opts?.calcPoints ?? this.options.calcPoints, 213 + }); 214 + } 215 + 216 + reset(ctx: C, opts?: RateLimiterResetOptions<C>): Promise<void> { 217 + return this.rateLimiter.reset(ctx, { 218 + calcKey: opts?.calcKey ?? this.options.calcKey, 219 + }); 220 + } 221 + 222 + static from<C extends RateLimiterContext = RateLimiterContext>( 223 + rateLimiter: RateLimiterI<C>, 224 + { calcKey, calcPoints }: WrappedRateLimiterOptions<C> = {}, 225 + ): RateLimiterI<C> { 226 + if (!calcKey && !calcPoints) return rateLimiter; 227 + return new WrappedRateLimiter<C>(rateLimiter, { calcKey, calcPoints }); 228 + } 229 + } 230 + 231 + /** 232 + * Combines multiple rate limiters into one. 233 + * 234 + * The combined rate limiter will return the tightest (most restrictive) of all 235 + * the provided rate limiters. 236 + */ 237 + export class CombinedRateLimiter< 238 + C extends RateLimiterContext = RateLimiterContext, 239 + > implements RateLimiterI<C> { 240 + private constructor( 241 + private readonly rateLimiters: readonly RateLimiterI<C>[], 242 + ) {} 243 + 244 + async consume( 245 + ctx: C, 246 + opts?: RateLimiterConsumeOptions<C>, 247 + ): Promise<RateLimiterStatus | RateLimitExceededError | null> { 248 + const promises: ReturnType<RateLimiterConsume>[] = []; 249 + for (const rl of this.rateLimiters) promises.push(rl.consume(ctx, opts)); 250 + return await Promise.all(promises).then(getTightestLimit); 251 + } 252 + 253 + async reset(ctx: C, opts?: RateLimiterResetOptions<C>) { 254 + const promises: ReturnType<RateLimiterReset>[] = []; 255 + for (const rl of this.rateLimiters) promises.push(rl.reset(ctx, opts)); 256 + await Promise.all(promises); 257 + } 258 + 259 + static from<C extends RateLimiterContext = RateLimiterContext>( 260 + rateLimiters: readonly RateLimiterI<C>[], 261 + ): RateLimiterI<C> | undefined { 262 + if (rateLimiters.length === 0) return undefined; 263 + if (rateLimiters.length === 1) return rateLimiters[0]; 264 + return new CombinedRateLimiter(rateLimiters); 265 + } 266 + } 267 + 268 + const getTightestLimit = ( 269 + resps: (RateLimiterStatus | RateLimitExceededError | null)[], 270 + ): RateLimiterStatus | RateLimitExceededError | null => { 271 + let lowest: RateLimiterStatus | null = null; 272 + for (const resp of resps) { 273 + if (resp === null) continue; 274 + if (resp instanceof RateLimitExceededError) return resp; 275 + if (lowest === null || resp.remainingPoints < lowest.remainingPoints) { 276 + lowest = resp; 277 + } 278 + } 279 + return lowest; 280 + }; 281 + 282 + export type RouteRateLimiterOptions< 283 + C extends RateLimiterContext = RateLimiterContext, 284 + > = { 285 + bypass?: (ctx: C) => boolean; 286 + }; 287 + 288 + /** 289 + * Wraps a {@link RateLimiterI} interface into a class that will apply the 290 + * appropriate headers to the response if a limit is exceeded. 291 + */ 292 + export class RouteRateLimiter<C extends RateLimiterContext = RateLimiterContext> 293 + implements RateLimiterI<C> { 294 + constructor( 295 + private readonly rateLimiter: RateLimiterI<C>, 296 + private readonly options: Readonly<RouteRateLimiterOptions<C>> = {}, 297 + ) {} 298 + 299 + async handle(ctx: C): Promise<RateLimiterStatus | null> { 300 + const { bypass } = this.options; 301 + if (bypass && bypass(ctx)) { 302 + return null; 303 + } 304 + 305 + const result = await this.consume(ctx); 306 + if (result instanceof RateLimitExceededError) { 307 + setStatusHeaders(ctx, result.status); 308 + throw result; 309 + } else if (result != null) { 310 + setStatusHeaders(ctx, result); 311 + } 312 + 313 + return result; 314 + } 315 + 316 + consume( 317 + ...args: Parameters<RateLimiterConsume<C>> 318 + ): Promise<RateLimiterStatus | RateLimitExceededError | null> { 319 + return this.rateLimiter.consume(...args); 320 + } 321 + 322 + reset(...args: Parameters<RateLimiterReset<C>>): Promise<void> { 323 + return this.rateLimiter.reset(...args); 324 + } 325 + 326 + static from<C extends RateLimiterContext = RateLimiterContext>( 327 + rateLimiters: readonly RateLimiterI<C>[], 328 + { bypass }: RouteRateLimiterOptions<C> = {}, 329 + ): RouteRateLimiter<C> | undefined { 330 + const rateLimiter = CombinedRateLimiter.from(rateLimiters); 331 + if (!rateLimiter) return undefined; 332 + 333 + return new RouteRateLimiter(rateLimiter, { bypass }); 334 + } 335 + } 336 + 337 + function setStatusHeaders<C extends RateLimiterContext = RateLimiterContext>( 338 + ctx: C, 339 + status: RateLimiterStatus, 340 + ) { 341 + const resetAt = Math.floor((Date.now() + status.msBeforeNext) / 1e3); 342 + 343 + ctx.res?.headers.set("RateLimit-Limit", status.limit.toString()); 344 + ctx.res?.headers.set("RateLimit-Reset", resetAt.toString()); 345 + ctx.res?.headers.set( 346 + "RateLimit-Remaining", 347 + status.remainingPoints.toString(), 348 + ); 349 + ctx.res?.headers.set( 350 + "RateLimit-Policy", 351 + `${status.limit};w=${status.duration}`, 352 + ); 353 + } 354 + 355 + export class RateLimitExceededError extends XRPCError { 356 + constructor( 357 + public status: RateLimiterStatus, 358 + errorMessage?: string, 359 + customErrorName?: string, 360 + options?: ErrorOptions, 361 + ) { 362 + super( 363 + ResponseType.RateLimitExceeded, 364 + errorMessage, 365 + customErrorName, 366 + options, 367 + ); 368 + } 369 + 370 + [Symbol.hasInstance](instance: unknown): boolean { 371 + return ( 372 + instance instanceof XRPCError && 373 + instance.type === ResponseType.RateLimitExceeded 374 + ); 375 + } 376 + }
+654
xrpc-server/server.ts
··· 1 + import type { Context, Handler } from "hono"; 2 + import { Hono } from "hono"; 3 + import { 4 + type LexiconDoc, 5 + Lexicons, 6 + type LexXrpcProcedure, 7 + type LexXrpcQuery, 8 + type LexXrpcSubscription, 9 + } from "@atproto/lexicon"; 10 + import { 11 + excludeErrorResult, 12 + InternalServerError, 13 + InvalidRequestError, 14 + isErrorResult, 15 + MethodNotImplementedError, 16 + XRPCError, 17 + } from "./errors.ts"; 18 + import { 19 + type RateLimiterI, 20 + RateLimitExceededError, 21 + RouteRateLimiter, 22 + } from "./rate-limiter.ts"; 23 + import { ErrorFrame, XrpcStreamServer } from "./stream/index.ts"; 24 + import { 25 + type Auth, 26 + type HandlerContext, 27 + type HandlerSuccess, 28 + type Input, 29 + isHandlerPipeThroughBuffer, 30 + isHandlerPipeThroughStream, 31 + isSharedRateLimitOpts, 32 + type MethodAuthVerifier, 33 + type MethodConfig, 34 + type MethodConfigOrHandler, 35 + type Options, 36 + type Params, 37 + type ServerRateLimitDescription, 38 + type StreamConfig, 39 + type StreamConfigOrHandler, 40 + } from "./types.ts"; 41 + import { 42 + asArray, 43 + createInputVerifier, 44 + decodeUrlQueryParams, 45 + getQueryParams, 46 + parseUrlNsid, 47 + setHeaders, 48 + validateOutput, 49 + } from "./util.ts"; 50 + import { 51 + type CalcKeyFn, 52 + type CalcPointsFn, 53 + type RateLimiterOptions, 54 + WrappedRateLimiter, 55 + type WrappedRateLimiterOptions, 56 + } from "./rate-limiter.ts"; 57 + import type { CatchallHandler, HandlerInput } from "./types.ts"; 58 + import { assert } from "@std/assert"; 59 + 60 + /** 61 + * Creates a new XRPC server instance. 62 + * @param lexicons - Optional array of lexicon documents to initialize the server with 63 + * @param options - Optional server configuration options 64 + * @returns A new Server instance 65 + */ 66 + export function createServer( 67 + lexicons?: LexiconDoc[], 68 + options?: Options, 69 + ): Server { 70 + return new Server(lexicons, options); 71 + } 72 + 73 + /** 74 + * XRPC server implementation that handles HTTP and WebSocket requests. 75 + * Manages method registration, authentication, rate limiting, and streaming. 76 + */ 77 + export class Server { 78 + /** The underlying Hono HTTP server instance */ 79 + app: Hono; 80 + /** Map of NSID to WebSocket streaming servers for subscriptions */ 81 + subscriptions: Map<string, XrpcStreamServer> = new Map< 82 + string, 83 + XrpcStreamServer 84 + >(); 85 + /** Lexicon registry for schema validation and method definitions */ 86 + lex: Lexicons = new Lexicons(); 87 + /** Server configuration options */ 88 + options: Options; 89 + /** Global rate limiter applied to all routes */ 90 + globalRateLimiter?: RouteRateLimiter<HandlerContext>; 91 + /** Map of named shared rate limiters */ 92 + sharedRateLimiters?: Map<string, RateLimiterI<HandlerContext>>; 93 + 94 + /** 95 + * Creates a new XRPC server instance. 96 + * @param lexicons - Optional array of lexicon documents to register 97 + * @param opts - Server configuration options 98 + */ 99 + constructor(lexicons?: LexiconDoc[], opts: Options = {}) { 100 + this.app = new Hono(); 101 + this.options = opts; 102 + 103 + if (lexicons) { 104 + this.addLexicons(lexicons); 105 + } 106 + 107 + // Add global middleware 108 + this.app.use("*", this.catchall); 109 + this.app.onError(createErrorHandler(opts)); 110 + 111 + if (opts.rateLimits) { 112 + const { global, shared, creator, bypass } = opts.rateLimits; 113 + 114 + if (global) { 115 + this.globalRateLimiter = RouteRateLimiter.from( 116 + global.map((options) => creator(buildRateLimiterOptions(options))), 117 + { bypass }, 118 + ); 119 + } 120 + 121 + if (shared) { 122 + this.sharedRateLimiters = new Map( 123 + shared.map((options) => [ 124 + options.name, 125 + creator(buildRateLimiterOptions(options)), 126 + ]), 127 + ); 128 + } 129 + } 130 + } 131 + 132 + // handlers 133 + // = 134 + 135 + /** 136 + * Registers a method handler for the specified NSID. 137 + * @param nsid - The namespace identifier for the method 138 + * @param configOrFn - Either a handler function or full method configuration 139 + */ 140 + method( 141 + nsid: string, 142 + configOrFn: MethodConfigOrHandler, 143 + ) { 144 + this.addMethod(nsid, configOrFn); 145 + } 146 + 147 + /** 148 + * Adds a method handler for the specified NSID. 149 + * @param nsid - The namespace identifier for the method 150 + * @param configOrFn - Either a handler function or full method configuration 151 + * @throws {Error} If the method is not found in the lexicon or is not a query/procedure 152 + */ 153 + addMethod( 154 + nsid: string, 155 + configOrFn: MethodConfigOrHandler, 156 + ) { 157 + const config = typeof configOrFn === "function" 158 + ? { handler: configOrFn } 159 + : configOrFn; 160 + const def = this.lex.getDef(nsid); 161 + if (!def || (def.type !== "query" && def.type !== "procedure")) { 162 + throw new Error(`Method not found in lexicon: ${nsid}`); 163 + } 164 + this.addRoute(nsid, def, config); 165 + } 166 + 167 + /** 168 + * Registers a streaming method handler for the specified NSID. 169 + * @param nsid - The namespace identifier for the streaming method 170 + * @param configOrFn - Either a stream handler function or full stream configuration 171 + */ 172 + streamMethod( 173 + nsid: string, 174 + configOrFn: StreamConfigOrHandler, 175 + ) { 176 + this.addStreamMethod(nsid, configOrFn); 177 + } 178 + 179 + /** 180 + * Adds a streaming method handler for the specified NSID. 181 + * @param nsid - The namespace identifier for the streaming method 182 + * @param configOrFn - Either a stream handler function or full stream configuration 183 + * @throws {Error} If the subscription is not found in the lexicon 184 + */ 185 + addStreamMethod( 186 + nsid: string, 187 + configOrFn: StreamConfigOrHandler, 188 + ) { 189 + const config = typeof configOrFn === "function" 190 + ? { handler: configOrFn } 191 + : configOrFn; 192 + const def = this.lex.getDef(nsid); 193 + if (!def || def.type !== "subscription") { 194 + throw new Error(`Subscription not found in lexicon: ${nsid}`); 195 + } 196 + this.addSubscription(nsid, def, config); 197 + } 198 + 199 + // lexicon 200 + // = 201 + 202 + /** 203 + * Adds a lexicon document to the server's schema registry. 204 + * @param doc - The lexicon document to add 205 + */ 206 + addLexicon(doc: LexiconDoc) { 207 + this.lex.add(doc); 208 + } 209 + 210 + /** 211 + * Adds multiple lexicon documents to the server's schema registry. 212 + * @param docs - Array of lexicon documents to add 213 + */ 214 + addLexicons(docs: LexiconDoc[]) { 215 + for (const doc of docs) { 216 + this.addLexicon(doc); 217 + } 218 + } 219 + 220 + // routes 221 + // = 222 + 223 + /** 224 + * Adds an HTTP route for the specified method. 225 + * @param nsid - The namespace identifier for the method 226 + * @param def - The lexicon definition for the method 227 + * @param config - The method configuration including handler and options 228 + * @protected 229 + */ 230 + protected addRoute( 231 + nsid: string, 232 + def: LexXrpcQuery | LexXrpcProcedure, 233 + config: MethodConfig, 234 + ) { 235 + const path = `/xrpc/${nsid}`; 236 + const handler = this.createHandler(nsid, def, config); 237 + 238 + if (def.type === "procedure") { 239 + this.app.post(path, handler); 240 + } else { 241 + this.app.get(path, handler); 242 + } 243 + } 244 + 245 + /** 246 + * Catchall handler that processes all XRPC routes and applies global rate limiting. 247 + * Only applies to routes starting with "/xrpc/". 248 + */ 249 + catchall: CatchallHandler = async (c, next) => { // catchall handler only applies to XRPC routes 250 + if (!c.req.url.startsWith("/xrpc/")) return next(); 251 + 252 + // Validate the NSID 253 + const nsid = parseUrlNsid(c.req.url); 254 + if (!nsid) { 255 + throw new InvalidRequestError("invalid xrpc path"); 256 + } 257 + 258 + if (this.globalRateLimiter) { 259 + try { 260 + await this.globalRateLimiter.handle({ 261 + req: c.req.raw, 262 + res: new Response(), 263 + auth: undefined, 264 + params: {}, 265 + input: undefined, 266 + async resetRouteRateLimits() {}, 267 + }); 268 + } catch { 269 + return next(); 270 + } 271 + } 272 + 273 + // Ensure that known XRPC methods are only called with the correct HTTP 274 + // method. 275 + const def = this.lex.getDef(nsid); 276 + if (def) { 277 + const expectedMethod = def.type === "procedure" 278 + ? "POST" 279 + : def.type === "query" 280 + ? "GET" 281 + : null; 282 + if (expectedMethod != null && expectedMethod !== c.req.method) { 283 + throw new InvalidRequestError( 284 + `Incorrect HTTP method (${c.req.method}) expected ${expectedMethod}`, 285 + ); 286 + } 287 + } 288 + 289 + if (this.options.catchall) { 290 + await this.options.catchall(c, next); 291 + } else if (!def) { 292 + throw new MethodNotImplementedError(); 293 + } else { 294 + await next(); 295 + } 296 + }; 297 + 298 + /** 299 + * Creates a parameter verification function for the given method definition. 300 + * @param _nsid - The namespace identifier (unused) 301 + * @param def - The lexicon definition containing parameter schema 302 + * @returns A function that validates and transforms query parameters 303 + * @protected 304 + */ 305 + protected createParamsVerifier( 306 + _nsid: string, 307 + def: LexXrpcQuery | LexXrpcProcedure | LexXrpcSubscription, 308 + ): (query: Record<string, unknown>) => Params { 309 + if (!def.parameters) { 310 + return () => ({}); 311 + } 312 + return (query: Record<string, unknown>) => { 313 + return query as Params; 314 + }; 315 + } 316 + 317 + /** 318 + * Creates an input verification function for the given method definition. 319 + * @param nsid - The namespace identifier for the method 320 + * @param def - The lexicon definition containing input schema 321 + * @returns A function that validates and transforms request input 322 + * @protected 323 + */ 324 + protected createInputVerifier( 325 + nsid: string, 326 + def: LexXrpcQuery | LexXrpcProcedure, 327 + ): (req: Request) => Promise<HandlerInput | undefined> { 328 + return createInputVerifier(this.lex, nsid, def); 329 + } 330 + 331 + /** 332 + * Creates an authentication verification function. 333 + * @param _nsid - The namespace identifier (unused) 334 + * @param verifier - Optional custom authentication verifier 335 + * @returns A function that performs authentication for the method 336 + * @protected 337 + */ 338 + protected createAuthVerifier( 339 + _nsid: string, 340 + verifier?: MethodAuthVerifier, 341 + ): (params: Params, input: Input, req: Request) => Promise<Auth> { 342 + return async ( 343 + params: Params, 344 + input: Input, 345 + req: Request, 346 + ): Promise<Auth> => { 347 + if (verifier) { 348 + return await verifier({ 349 + params, 350 + input, 351 + req, 352 + res: new Response(), 353 + }); 354 + } 355 + return undefined; 356 + }; 357 + } 358 + 359 + /** 360 + * Creates a Hono handler function for the specified XRPC method. 361 + * @template A - The authentication type 362 + * @param nsid - The namespace identifier for the method 363 + * @param def - The lexicon definition for the method 364 + * @param routeCfg - The method configuration including handler and options 365 + * @returns A Hono handler function 366 + */ 367 + createHandler<A extends Auth = Auth>( 368 + nsid: string, 369 + def: LexXrpcQuery | LexXrpcProcedure, 370 + routeCfg: MethodConfig<A>, 371 + ): Handler { 372 + const verifyParams = this.createParamsVerifier(nsid, def); 373 + const verifyInput = this.createInputVerifier(nsid, def); 374 + const verifyAuth = this.createAuthVerifier(nsid, routeCfg.auth); 375 + const validateReqNSID = () => nsid; 376 + const validateOutputFn = (output?: HandlerSuccess) => 377 + this.options.validateResponse && output && def.output 378 + ? validateOutput(nsid, def, output, this.lex) 379 + : undefined; 380 + 381 + const routeLimiter = this.createRouteRateLimiter(nsid, routeCfg); 382 + 383 + return async (c: Context) => { 384 + try { 385 + validateReqNSID(); 386 + 387 + const query = getQueryParams(c.req.url); 388 + const params = verifyParams(decodeUrlQueryParams(query)); 389 + 390 + let input: Input = undefined; 391 + if (def.type === "procedure") { 392 + input = await verifyInput(c.req.raw); 393 + } 394 + 395 + const auth = await verifyAuth(params, input, c.req.raw); 396 + 397 + const ctx: HandlerContext<A> = { 398 + req: c.req.raw, 399 + res: new Response(), 400 + params, 401 + input, 402 + auth: auth as A, 403 + resetRouteRateLimits: async () => {}, 404 + }; 405 + 406 + if (routeLimiter) { 407 + const result = await routeLimiter.consume(ctx); 408 + if (result instanceof RateLimitExceededError) { 409 + throw result; 410 + } 411 + } 412 + 413 + const output = await routeCfg.handler(ctx); 414 + if (isErrorResult(output)) { 415 + throw output.error; 416 + } 417 + 418 + if (isHandlerPipeThroughBuffer(output)) { 419 + setHeaders(c, output.headers); 420 + return c.body(new Uint8Array(output.buffer), 200, { 421 + "Content-Type": output.encoding, 422 + }); 423 + } else if (isHandlerPipeThroughStream(output)) { 424 + setHeaders(c, output.headers); 425 + return c.body(output.stream, 200, { 426 + "Content-Type": output.encoding, 427 + }); 428 + } 429 + 430 + if (output) { 431 + excludeErrorResult(output); 432 + validateOutputFn(output); 433 + } 434 + 435 + if (output) { 436 + setHeaders(c, output.headers); 437 + if (output.encoding === "application/json") { 438 + return c.json(output.body); 439 + } else { 440 + return c.body(output.body, 200, { 441 + "Content-Type": output.encoding, 442 + }); 443 + } 444 + } 445 + 446 + return c.body(null, 200); 447 + } catch (err: unknown) { 448 + throw err || new InternalServerError(); 449 + } 450 + }; 451 + } 452 + 453 + /** 454 + * Adds a WebSocket subscription handler for the specified NSID. 455 + * @param nsid - The namespace identifier for the subscription 456 + * @param _def - The lexicon definition for the subscription (unused) 457 + * @param _config - The stream configuration (unused) 458 + * @protected 459 + */ 460 + protected addSubscription( 461 + nsid: string, 462 + _def: LexXrpcSubscription, 463 + _config: StreamConfig, 464 + ) { 465 + const server = new XrpcStreamServer({ 466 + noServer: true, 467 + handler: async function* (_req: Request, _signal: AbortSignal) { 468 + // Stream handler implementation would go here 469 + yield new ErrorFrame({ 470 + error: "NotImplemented", 471 + message: "Streaming not implemented", 472 + }); 473 + }, 474 + }); 475 + 476 + this.subscriptions.set(nsid, server); 477 + } 478 + 479 + /** 480 + * Creates a route-specific rate limiter based on the method configuration. 481 + * @template A - The authentication type 482 + * @template C - The handler context type 483 + * @param nsid - The namespace identifier for the method 484 + * @param config - The method configuration containing rate limit options 485 + * @returns A route rate limiter or undefined if no rate limiting is configured 486 + * @private 487 + */ 488 + private createRouteRateLimiter<A extends Auth, C extends HandlerContext>( 489 + nsid: string, 490 + config: MethodConfig<A>, 491 + ): RouteRateLimiter<C> | undefined { 492 + // @NOTE global & shared rate limiters are instantiated with a context of 493 + // HandlerContext which is compatible (more generic) with the context of 494 + // this route specific rate limiters (C). For this reason, it's safe to 495 + // cast these with an `any` context 496 + 497 + const globalRateLimiter = this.globalRateLimiter as 498 + | RouteRateLimiter<C> 499 + | undefined; 500 + 501 + // No route specific rate limiting configured, use the global rate limiter. 502 + if (!config.rateLimit) return globalRateLimiter; 503 + 504 + const { rateLimits } = this.options; 505 + 506 + // @NOTE Silently ignore creation of route specific rate limiter if the 507 + // `rateLimits` options was not provided to the constructor. 508 + if (!rateLimits) return globalRateLimiter; 509 + 510 + const { creator, bypass } = rateLimits; 511 + 512 + const rateLimiters = asArray(config.rateLimit).map((options, i) => { 513 + if (isSharedRateLimitOpts(options)) { 514 + const rateLimiter = this.sharedRateLimiters?.get(options.name); 515 + 516 + // The route config references a shared rate limiter that does not 517 + // exist. This is a configuration error. 518 + assert( 519 + rateLimiter, 520 + `Shared rate limiter "${options.name}" not defined`, 521 + ); 522 + 523 + return WrappedRateLimiter.from<C>( 524 + rateLimiter as unknown as RateLimiterI<C>, 525 + options as unknown as WrappedRateLimiterOptions<C>, 526 + ); 527 + } else { 528 + return creator({ 529 + ...options, 530 + calcKey: options.calcKey ?? defaultKey, 531 + calcPoints: options.calcPoints ?? defaultPoints, 532 + keyPrefix: `${nsid}-${i}`, 533 + }); 534 + } 535 + }); 536 + 537 + // If the route config contains an empty array, use global rate limiter. 538 + if (!rateLimiters.length) return globalRateLimiter; 539 + 540 + // The global rate limiter (if present) should be applied in addition to 541 + // the route specific rate limiters. 542 + if (globalRateLimiter) rateLimiters.push(globalRateLimiter); 543 + 544 + return RouteRateLimiter.from<C>( 545 + rateLimiters as unknown as readonly RateLimiterI<C>[], 546 + { bypass }, 547 + ); 548 + } 549 + 550 + /** 551 + * Gets the underlying Hono app instance for external use. 552 + * @returns The Hono application instance 553 + */ 554 + get handler(): Hono { 555 + return this.app; 556 + } 557 + } 558 + 559 + /** 560 + * Creates an error handler function for the Hono application. 561 + * @param opts - Server options containing optional error parser 562 + * @returns An error handler function that converts errors to XRPC error responses 563 + */ 564 + function createErrorHandler(opts: Options) { 565 + return (err: Error, c: Context) => { 566 + const errorParser = opts.errorParser || 567 + ((e: unknown) => XRPCError.fromError(e)); 568 + const xrpcError = errorParser(err); 569 + 570 + const statusCode = "statusCode" in xrpcError 571 + ? (xrpcError as { statusCode: number }).statusCode 572 + : 500; 573 + 574 + return c.json( 575 + { 576 + error: xrpcError.type || "InternalServerError", 577 + message: xrpcError.message || "Internal Server Error", 578 + }, 579 + statusCode as 500, 580 + ); 581 + }; 582 + } 583 + 584 + /** 585 + * Type guard to check if an object is a Pino HTTP request object. 586 + * @param obj - The object to check 587 + * @returns True if the object has a req property 588 + * @private 589 + */ 590 + function _isPinoHttpRequest(obj: unknown): obj is { 591 + req: unknown; 592 + } { 593 + return ( 594 + !!obj && 595 + typeof obj === "object" && 596 + "req" in obj 597 + ); 598 + } 599 + 600 + /** 601 + * Converts an error to a simplified error-like object for logging. 602 + * @param err - The error to convert 603 + * @returns A simplified error object or the original value 604 + * @private 605 + */ 606 + function _toSimplifiedErrorLike(err: unknown) { 607 + if (err instanceof Error) { 608 + return { 609 + name: err.name, 610 + message: err.message, 611 + stack: err.stack, 612 + }; 613 + } 614 + return err; 615 + } 616 + 617 + /** 618 + * Builds rate limiter options from a server rate limit description. 619 + * @template C - The handler context type 620 + * @param options - The server rate limit description 621 + * @returns Rate limiter options with defaults applied 622 + */ 623 + function buildRateLimiterOptions<C extends HandlerContext = HandlerContext>({ 624 + name, 625 + calcKey = defaultKey, 626 + calcPoints = defaultPoints, 627 + ...desc 628 + }: ServerRateLimitDescription<C>): RateLimiterOptions<C> { 629 + return { ...desc, calcKey, calcPoints, keyPrefix: `rl-${name}` }; 630 + } 631 + 632 + /** 633 + * Default function for calculating rate limit points consumed per request. 634 + * Always returns 1 point per request. 635 + */ 636 + const defaultPoints: CalcPointsFn = () => 1; 637 + 638 + /** 639 + * Default function for calculating rate limit keys based on client IP address. 640 + * Extracts IP from X-Forwarded-For, X-Real-IP headers, or falls back to "unknown". 641 + * 642 + * @note When using a proxy, ensure headers are getting forwarded correctly: 643 + * `app.set('trust proxy', true)` 644 + * 645 + * @see {@link https://expressjs.com/en/guide/behind-proxies.html} 646 + */ 647 + const defaultKey: CalcKeyFn<HandlerContext> = ({ req }) => { 648 + const forwarded = req.headers.get("x-forwarded-for"); 649 + const ip = forwarded 650 + ? forwarded.split(",")[0] 651 + : req.headers.get("x-real-ip") || 652 + "unknown"; 653 + return ip; 654 + };
+182
xrpc-server/stream/frames.ts
··· 1 + import * as uint8arrays from "uint8arrays"; 2 + import { cborDecodeMulti, cborEncode } from "@atproto/common"; 3 + import type { 4 + ErrorFrameBody, 5 + ErrorFrameHeader, 6 + FrameHeader, 7 + MessageFrameHeader, 8 + } from "./types.ts"; 9 + import { errorFrameBody, frameHeader, FrameType } from "./types.ts"; 10 + 11 + /** 12 + * Abstract base class for XRPC stream frames. 13 + * Frames are the basic unit of communication in XRPC streaming, consisting of a header and body. 14 + * Each frame is serialized as CBOR for efficient binary transmission. 15 + * 16 + * @abstract 17 + * @property {FrameHeader} header - Frame header containing operation type and metadata 18 + * @property {unknown} body - Frame payload data 19 + */ 20 + export abstract class Frame { 21 + abstract header: FrameHeader; 22 + body: unknown; 23 + 24 + /** 25 + * Gets the operation type of the frame. 26 + * @returns {FrameType} The frame's operation type 27 + */ 28 + get op(): FrameType { 29 + return this.header.op; 30 + } 31 + 32 + /** 33 + * Serializes the frame to a binary format using CBOR encoding. 34 + * The resulting bytes contain both the header and body concatenated. 35 + * @returns {Uint8Array} The serialized frame as bytes 36 + */ 37 + toBytes(): Uint8Array { 38 + return uint8arrays.concat([cborEncode(this.header), cborEncode(this.body)]); 39 + } 40 + 41 + /** 42 + * Type guard to check if this frame is a MessageFrame. 43 + * @returns {boolean} True if this is a MessageFrame 44 + */ 45 + isMessage(): this is MessageFrame<unknown> { 46 + return this.op === FrameType.Message; 47 + } 48 + 49 + /** 50 + * Type guard to check if this frame is an ErrorFrame. 51 + * @returns {boolean} True if this is an ErrorFrame 52 + */ 53 + isError(): this is ErrorFrame { 54 + return this.op === FrameType.Error; 55 + } 56 + 57 + /** 58 + * Deserializes a frame from its binary representation. 59 + * Validates the frame structure and creates the appropriate frame type. 60 + * 61 + * @param {Uint8Array} bytes - The serialized frame bytes 62 + * @returns {Frame} The deserialized frame (either MessageFrame or ErrorFrame) 63 + * @throws {Error} If the frame format is invalid or unknown 64 + */ 65 + static fromBytes(bytes: Uint8Array): Frame { 66 + const decoded = cborDecodeMulti(bytes); 67 + if (decoded.length > 2) { 68 + throw new Error("Too many CBOR data items in frame"); 69 + } 70 + const header = decoded[0]; 71 + let body: unknown = kUnset; 72 + if (decoded.length > 1) { 73 + body = decoded[1]; 74 + } 75 + const parsedHeader = frameHeader.safeParse(header); 76 + if (!parsedHeader.success) { 77 + throw new Error(`Invalid frame header: ${parsedHeader.error.message}`); 78 + } 79 + if (body === kUnset) { 80 + throw new Error("Missing frame body"); 81 + } 82 + const frameOp = parsedHeader.data.op; 83 + if (frameOp === FrameType.Message) { 84 + return new MessageFrame(body, { 85 + type: parsedHeader.data.t, 86 + }); 87 + } else if (frameOp === FrameType.Error) { 88 + const parsedBody = errorFrameBody.safeParse(body); 89 + if (!parsedBody.success) { 90 + throw new Error( 91 + `Invalid error frame body: ${parsedBody.error.message}`, 92 + ); 93 + } 94 + return new ErrorFrame(parsedBody.data); 95 + } else { 96 + const exhaustiveCheck: never = frameOp; 97 + throw new Error(`Unknown frame op: ${exhaustiveCheck}`); 98 + } 99 + } 100 + } 101 + 102 + /** 103 + * Frame type for sending messages/data over an XRPC stream. 104 + * Can contain any type of payload data and an optional message type identifier. 105 + * 106 + * @template T - The type of the message body, defaults to Record<string, unknown> 107 + * @extends {Frame} 108 + * @property {MessageFrameHeader} header - Message frame header 109 + * @property {T} body - Message payload data 110 + */ 111 + export class MessageFrame<T = Record<string, unknown>> extends Frame { 112 + header: MessageFrameHeader; 113 + override body: T; 114 + 115 + /** 116 + * Creates a new MessageFrame. 117 + * @param {T} body - The message payload 118 + * @param {Object} [opts] - Optional frame configuration 119 + * @param {string} [opts.type] - Message type identifier 120 + */ 121 + constructor(body: T, opts?: { type?: string }) { 122 + super(); 123 + this.header = opts?.type !== undefined 124 + ? { op: FrameType.Message, t: opts?.type } 125 + : { op: FrameType.Message }; 126 + this.body = body; 127 + } 128 + 129 + /** 130 + * Gets the message type identifier. 131 + * @returns {string | undefined} The message type, if specified 132 + */ 133 + get type(): string | undefined { 134 + return this.header.t; 135 + } 136 + } 137 + 138 + /** 139 + * Frame type for sending errors over an XRPC stream. 140 + * Contains an error code and optional error message. 141 + * 142 + * @template T - The type of error code string 143 + * @extends {Frame} 144 + * @property {ErrorFrameHeader} header - Error frame header 145 + * @property {ErrorFrameBody<T>} body - Error details including code and message 146 + */ 147 + export class ErrorFrame<T extends string = string> extends Frame { 148 + header: ErrorFrameHeader; 149 + override body: ErrorFrameBody<T>; 150 + 151 + /** 152 + * Creates a new ErrorFrame. 153 + * @param {ErrorFrameBody<T>} body - The error details 154 + */ 155 + constructor(body: ErrorFrameBody<T>) { 156 + super(); 157 + this.header = { op: FrameType.Error }; 158 + this.body = body; 159 + } 160 + 161 + /** 162 + * Gets the error code. 163 + * @returns {string} The error code 164 + */ 165 + get code(): string { 166 + return this.body.error; 167 + } 168 + 169 + /** 170 + * Gets the error message. 171 + * @returns {string | undefined} The error message, if provided 172 + */ 173 + get message(): string | undefined { 174 + return this.body.message; 175 + } 176 + } 177 + 178 + /** 179 + * Symbol used internally to detect unset frame body. 180 + * @private 181 + */ 182 + const kUnset = Symbol("unset");
+6
xrpc-server/stream/index.ts
··· 1 + export * from "./types.ts"; 2 + export * from "./frames.ts"; 3 + export * from "./stream.ts"; 4 + export * from "./subscription.ts"; 5 + export * from "./server.ts"; 6 + export * from "./websocket-keepalive.ts";
+24
xrpc-server/stream/logger.ts
··· 1 + import { subsystemLogger } from "@atproto/common"; 2 + 3 + /** 4 + * Logger instance for XRPC streaming operations. 5 + * This is a subsystem logger specifically configured for logging events 6 + * related to WebSocket streaming, connection management, and stream processing. 7 + * 8 + * @example 9 + * ```typescript 10 + * import { logger } from './logger'; 11 + * 12 + * logger.info('WebSocket connection established'); 13 + * logger.error(error, 'Stream processing failed'); 14 + * ``` 15 + */ 16 + export const logger: ReturnType<typeof subsystemLogger> = subsystemLogger( 17 + "xrpc-stream", 18 + ); 19 + 20 + /** 21 + * Default export of the XRPC stream logger. 22 + * Same as the named export, provided for convenience. 23 + */ 24 + export default logger;
+109
xrpc-server/stream/server.ts
··· 1 + import { type ServerOptions, WebSocketServer } from "ws"; 2 + import { ErrorFrame, type Frame } from "./frames.ts"; 3 + import { logger } from "../logger.ts"; 4 + import { CloseCode, DisconnectError } from "./types.ts"; 5 + 6 + /** 7 + * XRPC WebSocket streaming server implementation. 8 + * Handles WebSocket connections and message streaming for XRPC methods. 9 + * @class 10 + */ 11 + export class XrpcStreamServer { 12 + wss: WebSocketServer; 13 + 14 + /** 15 + * Creates a new XRPC streaming server instance. 16 + * @constructor 17 + * @param {Object} opts - Server configuration options 18 + * @param {Handler} opts.handler - Function to handle incoming WebSocket connections 19 + * @param {ServerOptions} opts - Additional WebSocket server options 20 + */ 21 + constructor(opts: ServerOptions & { handler: Handler }) { 22 + const { handler, ...serverOpts } = opts; 23 + this.wss = new WebSocketServer(serverOpts); 24 + this.wss.on( 25 + "connection", 26 + async (socket: WebSocket, req: Request) => { 27 + socket.onerror = (ev: Event | ErrorEvent) => { 28 + if (ev instanceof ErrorEvent) { 29 + logger.error(ev.error, "websocket error"); 30 + } else { 31 + logger.error(ev, "websocket error"); 32 + } 33 + }; 34 + try { 35 + const ac = new AbortController(); 36 + const iterator = unwrapIterator( 37 + handler(req, ac.signal, socket, this), 38 + ); 39 + socket.onclose = () => { 40 + iterator.return?.(); 41 + ac.abort(); 42 + }; 43 + const safeFrames = wrapIterator(iterator); 44 + for await (const frame of safeFrames) { 45 + await new Promise<void>((res, rej) => { 46 + try { 47 + socket.send((frame as Frame).toBytes()); 48 + res(); 49 + } catch (err) { 50 + rej(err); 51 + } 52 + }); 53 + if (frame instanceof ErrorFrame) { 54 + throw new DisconnectError(CloseCode.Policy, frame.body.error); 55 + } 56 + } 57 + } catch (err) { 58 + if (err instanceof DisconnectError) { 59 + return socket.close(err.wsCode, err.xrpcCode); 60 + } else { 61 + logger.error({ err }, "websocket server error"); 62 + return socket.close(CloseCode.Abnormal); 63 + } 64 + } 65 + socket.close(CloseCode.Normal); 66 + }, 67 + ); 68 + } 69 + } 70 + 71 + /** 72 + * Handler function type for WebSocket connections. 73 + * @callback Handler 74 + * @param {Request} req - The incoming WebSocket request 75 + * @param {AbortSignal} signal - Signal for detecting connection abort 76 + * @param {WebSocket} socket - The WebSocket connection 77 + * @param {XrpcStreamServer} server - The server instance 78 + * @returns {AsyncIterable<Frame>} An async iterable of frames to send 79 + */ 80 + export type Handler = ( 81 + req: Request, 82 + signal: AbortSignal, 83 + socket: WebSocket, 84 + server: XrpcStreamServer, 85 + ) => AsyncIterable<Frame>; 86 + 87 + /** 88 + * Unwraps an AsyncIterable into its AsyncIterator. 89 + * @template T - The type of values being iterated 90 + * @param {AsyncIterable<T>} iterable - The iterable to unwrap 91 + * @returns {AsyncIterator<T>} The unwrapped iterator 92 + */ 93 + function unwrapIterator<T>(iterable: AsyncIterable<T>): AsyncIterator<T> { 94 + return iterable[Symbol.asyncIterator](); 95 + } 96 + 97 + /** 98 + * Wraps an AsyncIterator back into an AsyncIterable. 99 + * @template T - The type of values being iterated 100 + * @param {AsyncIterator<T>} iterator - The iterator to wrap 101 + * @returns {AsyncIterable<T>} The wrapped iterable 102 + */ 103 + function wrapIterator<T>(iterator: AsyncIterator<T>): AsyncIterable<T> { 104 + return { 105 + [Symbol.asyncIterator]() { 106 + return iterator; 107 + }, 108 + }; 109 + }
+98
xrpc-server/stream/stream.ts
··· 1 + import { ResponseType, XRPCError } from "@atproto/xrpc"; 2 + import { Frame } from "./frames.ts"; 3 + import type { MessageFrame } from "./frames.ts"; 4 + 5 + /** 6 + * Converts a WebSocket connection into an async generator of Frame objects. 7 + * Handles both message and error frames, with proper error propagation. 8 + * 9 + * @param {WebSocket} ws - The WebSocket connection to read from 10 + * @yields {Frame} Each frame received from the WebSocket 11 + * @throws {Error} Any WebSocket error that occurs during communication 12 + * 13 + * @example 14 + * ```typescript 15 + * const ws = new WebSocket(url); 16 + * for await (const frame of byFrame(ws)) { 17 + * // Process each frame 18 + * console.log(frame.type); 19 + * } 20 + * ``` 21 + */ 22 + export async function* byFrame( 23 + ws: WebSocket, 24 + ): AsyncGenerator<Frame> { 25 + const messageQueue: Frame[] = []; 26 + let error: Error | null = null; 27 + let done = false; 28 + 29 + ws.onmessage = (ev) => { 30 + if (ev.data instanceof Uint8Array) { 31 + messageQueue.push(Frame.fromBytes(ev.data)); 32 + } 33 + }; 34 + ws.onerror = (ev) => { 35 + if (ev instanceof ErrorEvent) { 36 + error = ev.error; 37 + } 38 + }; 39 + ws.onclose = () => { 40 + done = true; 41 + }; 42 + 43 + while (!done && !error) { 44 + if (messageQueue.length > 0) { 45 + yield messageQueue.shift()!; 46 + } else { 47 + await new Promise((resolve) => setTimeout(resolve, 0)); 48 + } 49 + } 50 + 51 + if (error) throw error; 52 + } 53 + 54 + /** 55 + * Converts a WebSocket connection into an async generator of MessageFrames. 56 + * Automatically filters and validates frames to ensure they are valid messages. 57 + * Error frames are converted to exceptions. 58 + * 59 + * @param {WebSocket} ws - The WebSocket connection to read from 60 + * @yields {MessageFrame<unknown>} Each message frame received from the WebSocket 61 + * @throws {XRPCError} If an error frame is received or an invalid frame type is encountered 62 + * 63 + * @example 64 + * ```typescript 65 + * const ws = new WebSocket(url); 66 + * for await (const message of byMessage(ws)) { 67 + * // Process each message 68 + * console.log(message.body); 69 + * } 70 + * ``` 71 + */ 72 + export async function* byMessage( 73 + ws: WebSocket, 74 + ): AsyncGenerator<MessageFrame<unknown>> { 75 + for await (const frame of byFrame(ws)) { 76 + yield ensureChunkIsMessage(frame); 77 + } 78 + } 79 + 80 + /** 81 + * Validates that a frame is a MessageFrame and converts it to the appropriate type. 82 + * If the frame is an error frame, throws an XRPCError with the error details. 83 + * 84 + * @param {Frame} frame - The frame to validate 85 + * @returns {MessageFrame<unknown>} The frame as a MessageFrame if valid 86 + * @throws {XRPCError} If the frame is an error frame or an invalid type 87 + * @internal 88 + */ 89 + export function ensureChunkIsMessage(frame: Frame): MessageFrame<unknown> { 90 + if (frame.isMessage()) { 91 + return frame; 92 + } else if (frame.isError()) { 93 + // @TODO work -1 error code into XRPCError 94 + throw new XRPCError(-1, frame.code, frame.message); 95 + } else { 96 + throw new XRPCError(ResponseType.Unknown, undefined, "Unknown frame type"); 97 + } 98 + }
+139
xrpc-server/stream/subscription.ts
··· 1 + import { ensureChunkIsMessage } from "./stream.ts"; 2 + import { WebSocketKeepAlive } from "./websocket-keepalive.ts"; 3 + import { Frame } from "./frames.ts"; 4 + import type { WebSocketOptions } from "./types.ts"; 5 + 6 + /** 7 + * Represents a message body in a subscription stream. 8 + * @interface 9 + * @property {string} [$type] - Optional type identifier for the message 10 + * @property {unknown} [key: string] - Additional message properties 11 + */ 12 + interface MessageBody { 13 + $type?: string; 14 + [key: string]: unknown; 15 + } 16 + 17 + /** 18 + * Represents a subscription to an XRPC streaming endpoint. 19 + * Handles WebSocket connection management, reconnection, and message parsing. 20 + * @class 21 + * @template T - The type of messages yielded by the subscription 22 + */ 23 + export class Subscription<T = unknown> { 24 + /** 25 + * Creates a new subscription instance. 26 + * @constructor 27 + * @param {Object} opts - Subscription configuration options 28 + * @param {string} opts.service - The base URL of the XRPC service 29 + * @param {string} opts.method - The XRPC method to subscribe to 30 + * @param {number} [opts.maxReconnectSeconds] - Maximum time in seconds between reconnection attempts 31 + * @param {number} [opts.heartbeatIntervalMs] - Interval in milliseconds for sending heartbeat messages 32 + * @param {AbortSignal} [opts.signal] - Signal for aborting the subscription 33 + * @param {Function} opts.validate - Function to validate and transform incoming messages 34 + * @param {Function} [opts.onReconnectError] - Callback for handling reconnection errors 35 + * @param {Function} [opts.getParams] - Function to get query parameters for the subscription URL 36 + */ 37 + constructor( 38 + public opts: WebSocketOptions & { 39 + service: string; 40 + method: string; 41 + maxReconnectSeconds?: number; 42 + heartbeatIntervalMs?: number; 43 + signal?: AbortSignal; 44 + validate: (obj: unknown) => T | undefined; 45 + onReconnectError?: ( 46 + error: unknown, 47 + n: number, 48 + initialSetup: boolean, 49 + ) => void; 50 + getParams?: () => 51 + | Record<string, unknown> 52 + | Promise<Record<string, unknown> | undefined> 53 + | undefined; 54 + }, 55 + ) {} 56 + 57 + /** 58 + * Implements the AsyncIterator protocol for the subscription. 59 + * Allows using the subscription in a for-await-of loop. 60 + * @returns {AsyncGenerator<T>} An async generator that yields validated messages 61 + */ 62 + async *[Symbol.asyncIterator](): AsyncGenerator<T> { 63 + const ws = new WebSocketKeepAlive({ 64 + ...this.opts, 65 + getUrl: async () => { 66 + const params = (await this.opts.getParams?.()) ?? {}; 67 + const query = encodeQueryParams(params); 68 + return `${this.opts.service}/xrpc/${this.opts.method}?${query}`; 69 + }, 70 + }); 71 + for await (const chunk of ws) { 72 + const frame = Frame.fromBytes(chunk); 73 + const message = ensureChunkIsMessage(frame); 74 + const t = message.header.t; 75 + const clone = message.body !== undefined 76 + ? { ...message.body } as MessageBody 77 + : undefined; 78 + if (clone !== undefined && t !== undefined) { 79 + clone.$type = t.startsWith("#") ? this.opts.method + t : t; 80 + } 81 + const result = this.opts.validate(clone); 82 + if (result !== undefined) { 83 + yield result; 84 + } 85 + } 86 + } 87 + } 88 + 89 + export default Subscription; 90 + 91 + /** 92 + * Encodes an object of parameters into a URL query string. 93 + * @param {Record<string, unknown>} obj - The parameters to encode 94 + * @returns {string} The encoded query string 95 + */ 96 + function encodeQueryParams(obj: Record<string, unknown>): string { 97 + const params = new URLSearchParams(); 98 + Object.entries(obj).forEach(([key, value]) => { 99 + const encoded = encodeQueryParam(value); 100 + if (Array.isArray(encoded)) { 101 + encoded.forEach((enc) => params.append(key, enc)); 102 + } else { 103 + params.set(key, encoded); 104 + } 105 + }); 106 + return params.toString(); 107 + } 108 + 109 + /** 110 + * Encodes a single query parameter value into a string or array of strings. 111 + * Handles various types including strings, numbers, booleans, dates, and arrays. 112 + * @param {unknown} value - The value to encode 113 + * @returns {string | string[]} The encoded parameter value(s) 114 + * @throws {Error} If the value cannot be encoded as a query parameter 115 + */ 116 + function encodeQueryParam(value: unknown): string | string[] { 117 + if (typeof value === "string") { 118 + return value; 119 + } 120 + if (typeof value === "number") { 121 + return value.toString(); 122 + } 123 + if (typeof value === "boolean") { 124 + return value ? "true" : "false"; 125 + } 126 + if (typeof value === "undefined") { 127 + return ""; 128 + } 129 + if (typeof value === "object") { 130 + if (value instanceof Date) { 131 + return value.toISOString(); 132 + } else if (Array.isArray(value)) { 133 + return value.flatMap(encodeQueryParam); 134 + } else if (!value) { 135 + return ""; 136 + } 137 + } 138 + throw new Error(`Cannot encode ${typeof value}s into query params`); 139 + }
+121
xrpc-server/stream/types.ts
··· 1 + import { z } from "zod"; 2 + 3 + /** 4 + * Enumeration of frame types used in the XRPC streaming protocol. 5 + * @enum {number} 6 + */ 7 + export enum FrameType { 8 + /** Normal message frame */ 9 + Message = 1, 10 + /** Error message frame */ 11 + Error = -1, 12 + } 13 + 14 + /** 15 + * WebSocket connection options. 16 + * @interface 17 + * @property {Record<string, string>} [headers] - Additional headers for the WebSocket connection 18 + * @property {string[]} [protocols] - WebSocket subprotocols to use 19 + */ 20 + export interface WebSocketOptions { 21 + headers?: Record<string, string>; 22 + protocols?: string[]; 23 + } 24 + 25 + /** 26 + * Header for message frames. 27 + * @interface 28 + * @property {FrameType.Message} op - Operation type, always Message 29 + * @property {string} [t] - Optional message type discriminator 30 + */ 31 + export type MessageFrameHeader = { 32 + op: FrameType.Message; 33 + t?: string; 34 + }; 35 + 36 + export const messageFrameHeader = z.object({ 37 + op: z.literal(FrameType.Message), // Frame op 38 + t: z.string().optional(), // Message body type discriminator 39 + }).strict() as z.ZodType<MessageFrameHeader>; 40 + 41 + /** 42 + * Header for error frames. 43 + * @interface 44 + * @property {FrameType.Error} op - Operation type, always Error 45 + */ 46 + export type ErrorFrameHeader = { 47 + op: FrameType.Error; 48 + }; 49 + 50 + export const errorFrameHeader = z.object({ 51 + op: z.literal(FrameType.Error), 52 + }).strict() as z.ZodType<ErrorFrameHeader>; 53 + 54 + /** 55 + * Base type for error frame bodies. 56 + * @interface 57 + * @property {string} error - Error code or identifier 58 + * @property {string} [message] - Optional error message 59 + */ 60 + export type ErrorFrameBodyBase = { 61 + error: string; 62 + message?: string; 63 + }; 64 + 65 + /** 66 + * Generic error frame body with typed error codes. 67 + * @template T - The type of error codes allowed 68 + * @interface 69 + * @property {T} error - Typed error code 70 + * @property {string} [message] - Optional error message 71 + */ 72 + export type ErrorFrameBody<T extends string = string> = { 73 + error: T; 74 + message?: string; 75 + }; 76 + 77 + export const errorFrameBody = z.object({ 78 + error: z.string(), // Error code 79 + message: z.string().optional(), // Error message 80 + }).strict() as z.ZodType<ErrorFrameBodyBase>; 81 + 82 + /** 83 + * Union type for all frame headers. 84 + * Can be either a message frame header or an error frame header. 85 + */ 86 + export type FrameHeader = MessageFrameHeader | ErrorFrameHeader; 87 + 88 + export const frameHeader = z.union([ 89 + messageFrameHeader, 90 + errorFrameHeader, 91 + ]) as z.ZodType<FrameHeader>; 92 + 93 + /** 94 + * Error class for handling WebSocket disconnections. 95 + * @class 96 + * @extends Error 97 + * @property {CloseCode} wsCode - WebSocket close code 98 + * @property {string} [xrpcCode] - XRPC-specific error code 99 + */ 100 + export class DisconnectError extends Error { 101 + constructor( 102 + public wsCode: CloseCode = CloseCode.Policy, 103 + public xrpcCode?: string, 104 + ) { 105 + super(); 106 + } 107 + } 108 + 109 + /** 110 + * WebSocket close codes as defined in RFC 6455. 111 + * @see https://www.rfc-editor.org/rfc/rfc6455#section-7.4.1 112 + * @enum {number} 113 + */ 114 + export enum CloseCode { 115 + /** Normal closure, meaning the purpose for which the connection was established has been fulfilled */ 116 + Normal = 1000, 117 + /** Abnormal closure, meaning that the connection was terminated in an abnormal way */ 118 + Abnormal = 1006, 119 + /** Policy violation, meaning the endpoint is terminating the connection due to a policy violation */ 120 + Policy = 1008, 121 + }
+253
xrpc-server/stream/websocket-keepalive.ts
··· 1 + import { SECOND, wait } from "@atproto/common"; 2 + import { CloseCode, DisconnectError, type WebSocketOptions } from "./types.ts"; 3 + 4 + /** 5 + * WebSocket client with automatic reconnection and heartbeat functionality. 6 + * Handles connection management, reconnection backoff, and keep-alive messages. 7 + * @class 8 + */ 9 + export class WebSocketKeepAlive { 10 + /** Current WebSocket connection instance */ 11 + public ws: WebSocket | null = null; 12 + /** Whether this is the first connection attempt */ 13 + public initialSetup = true; 14 + /** Number of reconnection attempts made, or null if not reconnecting */ 15 + public reconnects: number | null = null; 16 + 17 + /** 18 + * Creates a new WebSocket client with keep-alive functionality. 19 + * @constructor 20 + * @param {Object} opts - Client configuration options 21 + * @param {Function} opts.getUrl - Function to get the WebSocket URL 22 + * @param {number} [opts.maxReconnectSeconds] - Maximum backoff time between reconnection attempts 23 + * @param {AbortSignal} [opts.signal] - Signal for aborting the connection 24 + * @param {number} [opts.heartbeatIntervalMs] - Interval between heartbeat messages 25 + * @param {Function} [opts.onReconnectError] - Callback for handling reconnection errors 26 + */ 27 + constructor( 28 + public opts: WebSocketOptions & { 29 + getUrl: () => Promise<string>; 30 + maxReconnectSeconds?: number; 31 + signal?: AbortSignal; 32 + heartbeatIntervalMs?: number; 33 + onReconnectError?: ( 34 + error: unknown, 35 + n: number, 36 + initialSetup: boolean, 37 + ) => void; 38 + }, 39 + ) {} 40 + 41 + /** 42 + * Implements the AsyncIterator protocol for receiving WebSocket messages. 43 + * Handles automatic reconnection and message buffering. 44 + * @returns {AsyncGenerator<Uint8Array>} An async generator that yields received messages 45 + */ 46 + async *[Symbol.asyncIterator](): AsyncGenerator<Uint8Array> { 47 + const maxReconnectMs = 1000 * (this.opts.maxReconnectSeconds ?? 64); 48 + while (true) { 49 + if (this.reconnects !== null) { 50 + const duration = this.initialSetup 51 + ? Math.min(1000, maxReconnectMs) 52 + : backoffMs(this.reconnects++, maxReconnectMs); 53 + await wait(duration); 54 + } 55 + const url = await this.opts.getUrl(); 56 + this.ws = new WebSocket(url, this.opts.protocols); 57 + const ac = new AbortController(); 58 + if (this.opts.signal) { 59 + forwardSignal(this.opts.signal, ac); 60 + } 61 + this.ws.onopen = () => { 62 + this.initialSetup = false; 63 + this.reconnects = 0; 64 + if (this.ws) { 65 + this.startHeartbeat(this.ws); 66 + } 67 + }; 68 + this.ws.onclose = (ev: CloseEvent) => { 69 + if (ev.code === CloseCode.Abnormal) { 70 + // Forward into an error to distinguish from a clean close 71 + ac.abort( 72 + new AbnormalCloseError(`Abnormal ws close: ${ev.reason}`), 73 + ); 74 + } 75 + }; 76 + 77 + try { 78 + const messageQueue: Uint8Array[] = []; 79 + let error: Error | null = null; 80 + let done = false; 81 + 82 + this.ws.onmessage = (ev: MessageEvent) => { 83 + if (ev.data instanceof Uint8Array) { 84 + messageQueue.push(ev.data); 85 + } 86 + }; 87 + this.ws.onerror = (ev: Event | ErrorEvent) => { 88 + if (ev instanceof ErrorEvent) { 89 + error = ev.error; 90 + } 91 + }; 92 + this.ws.onclose = () => { 93 + done = true; 94 + }; 95 + 96 + while (!done && !error && !ac.signal.aborted) { 97 + if (messageQueue.length > 0) { 98 + yield messageQueue.shift()!; 99 + } else { 100 + await new Promise((resolve) => setTimeout(resolve, 0)); 101 + } 102 + } 103 + 104 + if (error) throw error; 105 + if (ac.signal.aborted) throw ac.signal.reason; 106 + } catch (_err) { 107 + const err = isErrorWithCode(_err) && _err.code === "ABORT_ERR" 108 + ? _err.cause 109 + : _err; 110 + if (err instanceof DisconnectError) { 111 + // We cleanly end the connection 112 + this.ws?.close(err.wsCode); 113 + break; 114 + } 115 + this.ws?.close(); // No-ops if already closed or closing 116 + if (isReconnectable(err)) { 117 + this.reconnects ??= 0; // Never reconnect with a null 118 + this.opts.onReconnectError?.(err, this.reconnects, this.initialSetup); 119 + continue; 120 + } else { 121 + throw err; 122 + } 123 + } 124 + break; // Other side cleanly ended stream and disconnected 125 + } 126 + } 127 + 128 + /** 129 + * Starts the heartbeat mechanism for a WebSocket connection. 130 + * Sends periodic ping messages and monitors for pong responses. 131 + * @param {WebSocket} ws - The WebSocket connection to monitor 132 + */ 133 + startHeartbeat(ws: WebSocket) { 134 + let isAlive = true; 135 + let heartbeatInterval: ReturnType<typeof setInterval> | null = null; 136 + 137 + const checkAlive = () => { 138 + if (!isAlive) { 139 + return ws.close(); 140 + } 141 + isAlive = false; // expect websocket to no longer be alive unless we receive a "pong" within the interval 142 + ws.send("ping"); 143 + }; 144 + 145 + checkAlive(); 146 + heartbeatInterval = setInterval( 147 + checkAlive, 148 + this.opts.heartbeatIntervalMs ?? 10 * SECOND, 149 + ); 150 + 151 + ws.onmessage = (ev: MessageEvent) => { 152 + if (ev.data === "pong") { 153 + isAlive = true; 154 + } 155 + }; 156 + ws.onclose = () => { 157 + if (heartbeatInterval) { 158 + clearInterval(heartbeatInterval); 159 + heartbeatInterval = null; 160 + } 161 + }; 162 + } 163 + } 164 + 165 + export default WebSocketKeepAlive; 166 + 167 + /** 168 + * Error class for abnormal WebSocket closures. 169 + * @class 170 + * @extends Error 171 + */ 172 + class AbnormalCloseError extends Error { 173 + code = "EWSABNORMALCLOSE"; 174 + } 175 + 176 + /** 177 + * Interface for errors with error codes. 178 + * @interface 179 + * @property {string} [code] - Error code identifier 180 + * @property {unknown} [cause] - Underlying cause of the error 181 + */ 182 + interface ErrorWithCode { 183 + code?: string; 184 + cause?: unknown; 185 + } 186 + 187 + /** 188 + * Type guard to check if an error has an error code. 189 + * @param {unknown} err - The error to check 190 + * @returns {boolean} True if the error has a code property 191 + */ 192 + function isErrorWithCode(err: unknown): err is ErrorWithCode { 193 + return err !== null && typeof err === "object" && "code" in err; 194 + } 195 + 196 + /** 197 + * Checks if an error should trigger a reconnection attempt. 198 + * Network-related errors are typically reconnectable. 199 + * @param {unknown} err - The error to check 200 + * @returns {boolean} True if the error should trigger a reconnection 201 + */ 202 + function isReconnectable(err: unknown): boolean { 203 + // Network errors are reconnectable. 204 + // AuthenticationRequired and InvalidRequest XRPCErrors are not reconnectable. 205 + // @TODO method-specific XRPCErrors may be reconnectable, need to consider. Receiving 206 + // an invalid message is not current reconnectable, but the user can decide to skip them. 207 + if (!isErrorWithCode(err)) return false; 208 + return typeof err.code === "string" && networkErrorCodes.includes(err.code); 209 + } 210 + 211 + /** 212 + * List of error codes that indicate network-related issues. 213 + * These errors typically warrant a reconnection attempt. 214 + */ 215 + const networkErrorCodes = [ 216 + "EWSABNORMALCLOSE", 217 + "ECONNRESET", 218 + "ECONNREFUSED", 219 + "ECONNABORTED", 220 + "EPIPE", 221 + "ETIMEDOUT", 222 + "ECANCELED", 223 + ]; 224 + 225 + /** 226 + * Calculates the backoff duration for reconnection attempts. 227 + * Uses exponential backoff with random jitter. 228 + * @param {number} n - The number of reconnection attempts so far 229 + * @param {number} maxMs - Maximum backoff duration in milliseconds 230 + * @returns {number} The backoff duration in milliseconds 231 + */ 232 + function backoffMs(n: number, maxMs: number) { 233 + const baseSec = Math.pow(2, n); // 1, 2, 4, ... 234 + const randSec = Math.random() - 0.5; // Random jitter between -.5 and .5 seconds 235 + const ms = 1000 * (baseSec + randSec); 236 + return Math.min(ms, maxMs); 237 + } 238 + 239 + /** 240 + * Forwards abort signals from one AbortController to another. 241 + * @param {AbortSignal} signal - The source abort signal 242 + * @param {AbortController} ac - The target abort controller 243 + */ 244 + function forwardSignal(signal: AbortSignal, ac: AbortController) { 245 + if (signal.aborted) { 246 + return ac.abort(signal.reason); 247 + } else { 248 + signal.addEventListener("abort", () => ac.abort(signal.reason), { 249 + // @ts-ignore https://github.com/DefinitelyTyped/DefinitelyTyped/pull/68625 250 + signal: ac.signal, 251 + }); 252 + } 253 + }
+100
xrpc-server/tests/_util.ts
··· 1 + import type * as xrpc from "../mod.ts"; 2 + import { AuthRequiredError } from "../errors.ts"; 3 + 4 + export async function createServer( 5 + server: xrpc.Server, 6 + ): Promise<Deno.HttpServer> { 7 + const abortController = new AbortController(); 8 + let resolveServer: (value: number) => void; 9 + const portPromise = new Promise<number>((resolve) => { 10 + resolveServer = resolve; 11 + }); 12 + 13 + const httpServer = Deno.serve({ 14 + signal: abortController.signal, 15 + port: 0, 16 + onListen({ port }) { 17 + resolveServer(port); 18 + }, 19 + handler: server.handler.fetch, 20 + }); 21 + 22 + // Attach the abort controller and port for cleanup and access 23 + type ServerWithMetadata = Deno.HttpServer & { 24 + abortController: AbortController; 25 + port: number; 26 + }; 27 + 28 + (httpServer as ServerWithMetadata).abortController = abortController; 29 + const port = await portPromise; 30 + (httpServer as ServerWithMetadata).port = port; 31 + 32 + return httpServer; 33 + } 34 + 35 + export async function closeServer(httpServer: Deno.HttpServer) { 36 + type ServerWithAbortController = Deno.HttpServer & { 37 + abortController: AbortController; 38 + }; 39 + const abortController = 40 + (httpServer as ServerWithAbortController).abortController; 41 + if (abortController) { 42 + abortController.abort(); 43 + await httpServer.finished; 44 + } 45 + } 46 + 47 + export function createBasicAuth(allowed: { 48 + username: string; 49 + password: string; 50 + }) { 51 + const verifyAuth = (header?: string | null) => { 52 + if (!header || !header.startsWith("Basic ")) { 53 + throw new AuthRequiredError(); 54 + } 55 + const original = header.replace("Basic ", ""); 56 + const decoded = atob(original); 57 + const [username, password] = decoded.split(":"); 58 + if (username !== allowed.username || password !== allowed.password) { 59 + throw new AuthRequiredError(); 60 + } 61 + return { 62 + credentials: { username }, 63 + artifacts: { original }, 64 + }; 65 + }; 66 + 67 + return function (ctx: { 68 + params: xrpc.Params; 69 + input: xrpc.Input; 70 + req: Request; 71 + res: Response; 72 + }) { 73 + return verifyAuth(ctx.req.headers.get("authorization")); 74 + }; 75 + } 76 + 77 + export function createStreamBasicAuth( 78 + { username, password }: { username: string; password: string }, 79 + ) { 80 + return (ctx: { req: { headers: Headers } }) => { 81 + const auth = ctx.req.headers.get("authorization"); 82 + if (auth !== `Basic ${btoa(`${username}:${password}`)}`) { 83 + throw new AuthRequiredError(); 84 + } 85 + return { 86 + credentials: { username }, 87 + artifacts: { original: btoa(`${username}:${password}`) }, 88 + }; 89 + }; 90 + } 91 + 92 + export function basicAuthHeaders(creds: { 93 + username: string; 94 + password: string; 95 + }) { 96 + return { 97 + authorization: "Basic " + 98 + btoa(`${creds.username}:${creds.password}`), 99 + }; 100 + }
+375
xrpc-server/tests/auth_test.ts
··· 1 + import * as jose from "npm:jose"; 2 + import { MINUTE } from "@atproto/common"; 3 + import { Secp256k1Keypair } from "@atproto/crypto"; 4 + import type { LexiconDoc } from "@atproto/lexicon"; 5 + import { XrpcClient, XRPCError } from "@atproto/xrpc"; 6 + import * as xrpcServer from "../mod.ts"; 7 + import { 8 + basicAuthHeaders, 9 + closeServer, 10 + createBasicAuth, 11 + createServer, 12 + } from "./_util.ts"; 13 + import { 14 + assert, 15 + assertEquals, 16 + assertObjectMatch, 17 + assertRejects, 18 + } from "@std/assert"; 19 + import { encodeBase64 } from "@std/encoding"; 20 + 21 + const LEXICONS: LexiconDoc[] = [ 22 + { 23 + lexicon: 1, 24 + id: "io.example.authTest", 25 + defs: { 26 + main: { 27 + type: "procedure", 28 + input: { 29 + encoding: "application/json", 30 + schema: { 31 + type: "object", 32 + properties: { 33 + present: { type: "boolean", const: true }, 34 + }, 35 + }, 36 + }, 37 + output: { 38 + encoding: "application/json", 39 + schema: { 40 + type: "object", 41 + properties: { 42 + username: { type: "string" }, 43 + original: { type: "string" }, 44 + }, 45 + }, 46 + }, 47 + }, 48 + }, 49 + }, 50 + ]; 51 + 52 + let s: Deno.HttpServer; 53 + let client: XrpcClient; 54 + const server = xrpcServer.createServer(LEXICONS); 55 + 56 + type AuthTestResponse = { 57 + username: string | undefined; 58 + original: string | undefined; 59 + }; 60 + 61 + type AuthTestAuth = { 62 + credentials: { username: string }; 63 + artifacts: { original: string }; 64 + }; 65 + 66 + server.method("io.example.authTest", { 67 + auth: createBasicAuth({ username: "admin", password: "password" }), 68 + handler: (ctx: xrpcServer.HandlerContext) => { 69 + const authResult = ctx.auth as xrpcServer.AuthResult | undefined; 70 + const credentials = authResult?.credentials as 71 + | { username: string } 72 + | undefined; 73 + const artifacts = authResult?.artifacts as { original: string } | undefined; 74 + return { 75 + encoding: "application/json", 76 + body: { 77 + username: credentials?.username, 78 + original: artifacts?.original, 79 + } satisfies AuthTestResponse, 80 + }; 81 + }, 82 + }); 83 + 84 + Deno.test({ 85 + name: "Auth Tests", 86 + async fn() { 87 + // Setup 88 + s = await createServer(server); 89 + const port = (s as Deno.HttpServer & { port: number }).port; 90 + client = new XrpcClient(`http://localhost:${port}`, LEXICONS); 91 + 92 + // Tests 93 + await Deno.test("creates and validates service auth headers", async () => { 94 + const keypair = await Secp256k1Keypair.create(); 95 + const iss = "did:example:alice"; 96 + const aud = "did:example:bob"; 97 + const token = await xrpcServer.createServiceJwt({ 98 + iss, 99 + aud, 100 + keypair, 101 + lxm: null, 102 + }); 103 + const validated = await xrpcServer.verifyJwt( 104 + token, 105 + null, 106 + null, 107 + async () => await keypair.did(), 108 + ); 109 + assertEquals(validated.iss, iss); 110 + assertEquals(validated.aud, aud); 111 + // should expire within the minute when no exp is provided 112 + assert(validated.exp > Date.now() / 1000); 113 + assert(validated.exp < Date.now() / 1000 + 60); 114 + assert(typeof validated.jti === "string"); 115 + assert(validated.lxm === undefined); 116 + }); 117 + 118 + await Deno.test("creates and validates service auth headers bound to a particular method", async () => { 119 + const keypair = await Secp256k1Keypair.create(); 120 + const iss = "did:example:alice"; 121 + const aud = "did:example:bob"; 122 + const lxm = "com.atproto.repo.createRecord"; 123 + const token = await xrpcServer.createServiceJwt({ 124 + iss, 125 + aud, 126 + keypair, 127 + lxm, 128 + }); 129 + const validated = await xrpcServer.verifyJwt( 130 + token, 131 + null, 132 + lxm, 133 + async () => await keypair.did(), 134 + ); 135 + assertEquals(validated.iss, iss); 136 + assertEquals(validated.aud, aud); 137 + assertEquals(validated.lxm, lxm); 138 + }); 139 + 140 + await Deno.test("fails on bad auth before invalid request payload", async () => { 141 + try { 142 + await client.call( 143 + "io.example.authTest", 144 + {}, 145 + { present: false }, 146 + { 147 + headers: basicAuthHeaders({ 148 + username: "admin", 149 + password: "wrong", 150 + }), 151 + }, 152 + ); 153 + throw new Error("Didnt throw"); 154 + } catch (e) { 155 + assert(e instanceof XRPCError); 156 + assert(!e.success); 157 + assertEquals(e.error, "AuthenticationRequired"); 158 + assertEquals(e.message, "Authentication Required"); 159 + assertEquals(e.status, 401); 160 + } 161 + }); 162 + 163 + await Deno.test("fails on invalid request payload after good auth", async () => { 164 + try { 165 + await client.call( 166 + "io.example.authTest", 167 + {}, 168 + { present: false }, 169 + { 170 + headers: basicAuthHeaders({ 171 + username: "admin", 172 + password: "password", 173 + }), 174 + }, 175 + ); 176 + throw new Error("Didnt throw"); 177 + } catch (e) { 178 + assert(e instanceof XRPCError); 179 + assert(!e.success); 180 + assertEquals(e.error, "InvalidRequest"); 181 + assertEquals(e.message, "Input/present must be true"); 182 + assertEquals(e.status, 400); 183 + } 184 + }); 185 + 186 + await Deno.test("succeeds on good auth and payload", async () => { 187 + const res = await client.call( 188 + "io.example.authTest", 189 + {}, 190 + { present: true }, 191 + { 192 + headers: basicAuthHeaders({ 193 + username: "admin", 194 + password: "password", 195 + }), 196 + }, 197 + ); 198 + assert(res.success); 199 + assertEquals(res.data, { 200 + username: "admin", 201 + original: "YWRtaW46cGFzc3dvcmQ=", 202 + }); 203 + }); 204 + 205 + await Deno.test("verifyJwt tests", async (t) => { 206 + await t.step("fails on expired jwt", async () => { 207 + const keypair = await Secp256k1Keypair.create(); 208 + const jwt = await xrpcServer.createServiceJwt({ 209 + aud: "did:example:aud", 210 + iss: "did:example:iss", 211 + keypair, 212 + exp: Math.floor((Date.now() - MINUTE) / 1000), 213 + lxm: null, 214 + }); 215 + await assertRejects( 216 + () => 217 + xrpcServer.verifyJwt( 218 + jwt, 219 + "did:example:aud", 220 + null, 221 + async () => await keypair.did(), 222 + ), 223 + Error, 224 + "jwt expired", 225 + ); 226 + }); 227 + 228 + await t.step("fails on bad audience", async () => { 229 + const keypair = await Secp256k1Keypair.create(); 230 + const jwt = await xrpcServer.createServiceJwt({ 231 + aud: "did:example:aud1", 232 + iss: "did:example:iss", 233 + keypair, 234 + lxm: null, 235 + }); 236 + await assertRejects( 237 + () => 238 + xrpcServer.verifyJwt( 239 + jwt, 240 + "did:example:aud2", 241 + null, 242 + async () => await keypair.did(), 243 + ), 244 + Error, 245 + "jwt audience does not match service did", 246 + ); 247 + }); 248 + 249 + await t.step("fails on bad lxm", async () => { 250 + const keypair = await Secp256k1Keypair.create(); 251 + const jwt = await xrpcServer.createServiceJwt({ 252 + aud: "did:example:aud1", 253 + iss: "did:example:iss", 254 + keypair, 255 + lxm: "com.atproto.repo.createRecord", 256 + }); 257 + await assertRejects( 258 + () => 259 + xrpcServer.verifyJwt( 260 + jwt, 261 + "did:example:aud1", 262 + "com.atproto.repo.putRecord", 263 + async () => await keypair.did(), 264 + ), 265 + Error, 266 + "bad jwt lexicon method", 267 + ); 268 + }); 269 + 270 + await t.step("fails on null lxm when lxm is required", async () => { 271 + const keypair = await Secp256k1Keypair.create(); 272 + const jwt = await xrpcServer.createServiceJwt({ 273 + aud: "did:example:aud1", 274 + iss: "did:example:iss", 275 + keypair, 276 + lxm: null, 277 + }); 278 + await assertRejects( 279 + () => 280 + xrpcServer.verifyJwt( 281 + jwt, 282 + "did:example:aud1", 283 + "com.atproto.repo.putRecord", 284 + async () => await keypair.did(), 285 + ), 286 + Error, 287 + "missing jwt lexicon method", 288 + ); 289 + }); 290 + 291 + await t.step("refreshes key on verification failure", async () => { 292 + const keypair1 = await Secp256k1Keypair.create(); 293 + const keypair2 = await Secp256k1Keypair.create(); 294 + const jwt = await xrpcServer.createServiceJwt({ 295 + aud: "did:example:aud", 296 + iss: "did:example:iss", 297 + keypair: keypair2, 298 + lxm: null, 299 + }); 300 + let usedKeypair1 = false; 301 + let usedKeypair2 = false; 302 + const tryVerify = await xrpcServer.verifyJwt( 303 + jwt, 304 + "did:example:aud", 305 + null, 306 + async (_did, forceRefresh) => { 307 + if (forceRefresh) { 308 + usedKeypair2 = true; 309 + return await keypair2.did(); 310 + } else { 311 + usedKeypair1 = true; 312 + return await keypair1.did(); 313 + } 314 + }, 315 + ); 316 + assertObjectMatch(tryVerify, { 317 + aud: "did:example:aud", 318 + iss: "did:example:iss", 319 + }); 320 + assert(usedKeypair1); 321 + assert(usedKeypair2); 322 + }); 323 + 324 + await t.step( 325 + "interoperates with jwts signed by other libraries", 326 + async () => { 327 + const keypair = await Secp256k1Keypair.create({ exportable: true }); 328 + const signingKey = await createPrivateKeyObject(keypair); 329 + const payload = { 330 + aud: "did:example:aud", 331 + iss: "did:example:iss", 332 + exp: Math.floor((Date.now() + MINUTE) / 1000), 333 + }; 334 + const jwt = await new jose.SignJWT(payload) 335 + .setProtectedHeader({ typ: "JWT", alg: keypair.jwtAlg }) 336 + .sign(signingKey); 337 + const tryVerify = await xrpcServer.verifyJwt( 338 + jwt, 339 + "did:example:aud", 340 + null, 341 + async () => { 342 + return await keypair.did(); 343 + }, 344 + ); 345 + assertEquals(tryVerify, payload); 346 + }, 347 + ); 348 + }); 349 + 350 + // Cleanup 351 + await closeServer(s); 352 + }, 353 + }); 354 + 355 + async function createPrivateKeyObject( 356 + privateKey: Secp256k1Keypair, 357 + ): Promise<CryptoKey> { 358 + const raw = await privateKey.export(); 359 + const pemKey = `-----BEGIN EC PRIVATE KEY-----\n${ 360 + encodeBase64(raw) 361 + }\n-----END EC PRIVATE KEY-----`; 362 + 363 + // Convert PEM to CryptoKey 364 + const binaryDer = new TextEncoder().encode(pemKey); 365 + return await crypto.subtle.importKey( 366 + "pkcs8", 367 + binaryDer, 368 + { 369 + name: "ECDSA", 370 + namedCurve: "P-256", 371 + }, 372 + true, 373 + ["sign"], 374 + ); 375 + }
+664
xrpc-server/tests/bodies_test.ts
··· 1 + import { cidForCbor } from "@atproto/common"; 2 + import { randomBytes } from "@atproto/crypto"; 3 + import type { LexiconDoc } from "@atproto/lexicon"; 4 + import { ResponseType, XrpcClient, XRPCError } from "@atproto/xrpc"; 5 + import * as xrpcServer from "../mod.ts"; 6 + import { logger } from "../logger.ts"; 7 + import { closeServer, createServer } from "./_util.ts"; 8 + import { 9 + assert, 10 + assertEquals, 11 + assertObjectMatch, 12 + assertRejects, 13 + } from "@std/assert"; 14 + 15 + // Web-standard compression helpers 16 + async function compressData( 17 + data: Uint8Array, 18 + format: CompressionFormat, 19 + ): Promise<Uint8Array> { 20 + const stream = new ReadableStream({ 21 + start(controller) { 22 + controller.enqueue(data); 23 + controller.close(); 24 + }, 25 + }); 26 + const compressedStream = stream.pipeThrough(new CompressionStream(format)); 27 + return new Uint8Array(await new Response(compressedStream).arrayBuffer()); 28 + } 29 + 30 + const LEXICONS: LexiconDoc[] = [ 31 + { 32 + lexicon: 1, 33 + id: "io.example.validationTest", 34 + defs: { 35 + main: { 36 + type: "procedure", 37 + input: { 38 + encoding: "application/json", 39 + schema: { 40 + type: "object", 41 + required: ["foo"], 42 + properties: { 43 + foo: { type: "string" }, 44 + bar: { type: "integer" }, 45 + }, 46 + }, 47 + }, 48 + output: { 49 + encoding: "application/json", 50 + schema: { 51 + type: "object", 52 + required: ["foo"], 53 + properties: { 54 + foo: { type: "string" }, 55 + bar: { type: "integer" }, 56 + }, 57 + }, 58 + }, 59 + }, 60 + }, 61 + }, 62 + { 63 + lexicon: 1, 64 + id: "io.example.validationTestTwo", 65 + defs: { 66 + main: { 67 + type: "query", 68 + output: { 69 + encoding: "application/json", 70 + schema: { 71 + type: "object", 72 + required: ["foo"], 73 + properties: { 74 + foo: { type: "string" }, 75 + bar: { type: "integer" }, 76 + }, 77 + }, 78 + }, 79 + }, 80 + }, 81 + }, 82 + { 83 + lexicon: 1, 84 + id: "io.example.blobTest", 85 + defs: { 86 + main: { 87 + type: "procedure", 88 + input: { 89 + encoding: "*/*", 90 + }, 91 + output: { 92 + encoding: "application/json", 93 + schema: { 94 + type: "object", 95 + required: ["cid"], 96 + properties: { 97 + cid: { type: "string" }, 98 + }, 99 + }, 100 + }, 101 + }, 102 + }, 103 + }, 104 + ]; 105 + 106 + const BLOB_LIMIT = 5000; 107 + 108 + async function consumeInput( 109 + input: ReadableStream | string | object, 110 + ): Promise<Uint8Array> { 111 + if (input instanceof Uint8Array) { 112 + return input; 113 + } 114 + if (typeof input === "string") { 115 + return new TextEncoder().encode(input); 116 + } 117 + if (input instanceof ReadableStream) { 118 + try { 119 + const chunks: Uint8Array[] = []; 120 + for await (const chunk of input) { 121 + chunks.push( 122 + chunk instanceof Uint8Array ? chunk : new TextEncoder().encode(chunk), 123 + ); 124 + } 125 + const totalLength = chunks.reduce((acc, chunk) => acc + chunk.length, 0); 126 + const result = new Uint8Array(totalLength); 127 + let offset = 0; 128 + for (const chunk of chunks) { 129 + result.set(chunk, offset); 130 + offset += chunk.length; 131 + } 132 + return result; 133 + } catch (err) { 134 + if (err instanceof XRPCError) { 135 + throw err; 136 + } else { 137 + throw new XRPCError( 138 + ResponseType.InvalidRequest, 139 + "unable to read input", 140 + ); 141 + } 142 + } 143 + } 144 + throw new Error("Invalid input"); 145 + } 146 + 147 + Deno.test({ 148 + name: "Bodies Tests", 149 + async fn() { 150 + const server = xrpcServer.createServer(LEXICONS, { 151 + payload: { 152 + blobLimit: BLOB_LIMIT, 153 + }, 154 + }); 155 + server.method( 156 + "io.example.validationTest", 157 + (ctx: xrpcServer.HandlerContext) => { 158 + if (ctx.input?.body instanceof ReadableStream) { 159 + throw new Error("Input is readable"); 160 + } 161 + 162 + return { 163 + encoding: "json", 164 + body: ctx.input?.body ?? null, 165 + }; 166 + }, 167 + ); 168 + server.method("io.example.validationTestTwo", () => ({ 169 + encoding: "json", 170 + body: { wrong: "data" }, 171 + })); 172 + server.method( 173 + "io.example.blobTest", 174 + async (ctx: xrpcServer.HandlerContext) => { 175 + const buffer = await consumeInput( 176 + ctx.input?.body as string | object | ReadableStream, 177 + ); 178 + const cid = await cidForCbor(buffer); 179 + return { 180 + encoding: "json", 181 + body: { cid: cid.toString() }, 182 + }; 183 + }, 184 + ); 185 + 186 + // Setup 187 + const s = await createServer(server); 188 + const port = (s as Deno.HttpServer & { port: number }).port; 189 + const url = `http://localhost:${port}`; 190 + const client = new XrpcClient(url, LEXICONS); 191 + 192 + // Tests 193 + await Deno.test("validates input and output bodies", async () => { 194 + const res1 = await client.call( 195 + "io.example.validationTest", 196 + {}, 197 + { 198 + foo: "hello", 199 + bar: 123, 200 + }, 201 + ); 202 + assert(res1.success); 203 + assertEquals(res1.data.foo, "hello"); 204 + assertEquals(res1.data.bar, 123); 205 + 206 + await assertRejects( 207 + () => client.call("io.example.validationTest", {}), 208 + Error, 209 + "Request encoding (Content-Type) required but not provided", 210 + ); 211 + 212 + await assertRejects( 213 + () => client.call("io.example.validationTest", {}, {}), 214 + Error, 215 + 'Input must have the property "foo"', 216 + ); 217 + 218 + await assertRejects( 219 + () => client.call("io.example.validationTest", {}, { foo: 123 }), 220 + Error, 221 + "Input/foo must be a string", 222 + ); 223 + 224 + await assertRejects( 225 + () => 226 + client.call( 227 + "io.example.validationTest", 228 + {}, 229 + { foo: "hello", bar: 123 }, 230 + { encoding: "image/jpeg" }, 231 + ), 232 + Error, 233 + "Unable to encode object as image/jpeg data", 234 + ); 235 + 236 + await assertRejects( 237 + () => 238 + client.call( 239 + "io.example.validationTest", 240 + {}, 241 + new Blob([randomBytes(123)], { type: "image/jpeg" }), 242 + ), 243 + Error, 244 + "Wrong request encoding (Content-Type): image/jpeg", 245 + ); 246 + 247 + await assertRejects( 248 + () => 249 + client.call( 250 + "io.example.validationTest", 251 + {}, 252 + (() => { 253 + const formData = new FormData(); 254 + formData.append("foo", "bar"); 255 + return formData; 256 + })(), 257 + ), 258 + Error, 259 + "Wrong request encoding (Content-Type): multipart/form-data", 260 + ); 261 + 262 + await assertRejects( 263 + () => 264 + client.call( 265 + "io.example.validationTest", 266 + {}, 267 + new URLSearchParams([["foo", "bar"]]), 268 + ), 269 + Error, 270 + "Wrong request encoding (Content-Type): application/x-www-form-urlencoded", 271 + ); 272 + 273 + await assertRejects( 274 + () => 275 + client.call( 276 + "io.example.validationTest", 277 + {}, 278 + new Blob([new Uint8Array([1])]), 279 + ), 280 + Error, 281 + "Wrong request encoding (Content-Type): application/octet-stream", 282 + ); 283 + 284 + await assertRejects( 285 + () => 286 + client.call( 287 + "io.example.validationTest", 288 + {}, 289 + new ReadableStream({ 290 + pull(ctrl) { 291 + ctrl.enqueue(new Uint8Array([1])); 292 + ctrl.close(); 293 + }, 294 + }), 295 + ), 296 + Error, 297 + "Wrong request encoding (Content-Type): application/octet-stream", 298 + ); 299 + 300 + await assertRejects( 301 + () => client.call("io.example.validationTest", {}, new Uint8Array([1])), 302 + Error, 303 + "Wrong request encoding (Content-Type): application/octet-stream", 304 + ); 305 + 306 + // 500 responses don't include details, so we nab details from the logger 307 + const originalError = logger.error; 308 + let loggedError: { err: { message: string } } | undefined; 309 + logger.error = (obj: unknown) => { 310 + loggedError = obj as { err: { message: string } }; 311 + }; 312 + 313 + try { 314 + await assertRejects( 315 + () => client.call("io.example.validationTestTwo"), 316 + Error, 317 + "Internal Server Error", 318 + ); 319 + 320 + assert(loggedError); 321 + assertObjectMatch(loggedError, { 322 + err: { 323 + message: 'Output must have the property "foo"', 324 + }, 325 + }); 326 + } finally { 327 + logger.error = originalError; 328 + } 329 + }); 330 + 331 + await Deno.test("supports ArrayBuffers", async () => { 332 + const bytes = randomBytes(1024); 333 + const expectedCid = await cidForCbor(bytes); 334 + 335 + const bytesResponse = await client.call( 336 + "io.example.blobTest", 337 + {}, 338 + bytes, 339 + { 340 + encoding: "application/octet-stream", 341 + }, 342 + ); 343 + assertEquals(bytesResponse.data.cid, expectedCid.toString()); 344 + }); 345 + 346 + await Deno.test("supports empty payload on procedures with encoding", async () => { 347 + const bytes = new Uint8Array(0); 348 + const expectedCid = await cidForCbor(bytes); 349 + const bytesResponse = await client.call("io.example.blobTest", {}, bytes); 350 + assertEquals(bytesResponse.data.cid, expectedCid.toString()); 351 + }); 352 + 353 + await Deno.test("supports upload of empty txt file", async () => { 354 + const txtFile = new Blob([], { type: "text/plain" }); 355 + const expectedCid = await cidForCbor(await txtFile.arrayBuffer()); 356 + const fileResponse = await client.call( 357 + "io.example.blobTest", 358 + {}, 359 + txtFile, 360 + ); 361 + assertEquals(fileResponse.data.cid, expectedCid.toString()); 362 + }); 363 + 364 + // This does not work because the xrpc-server will add a json middleware 365 + // regardless of the "input" definition. This is probably a behavior that 366 + // should be fixed in the xrpc-server. 367 + await Deno.test({ 368 + name: "supports upload of json data", 369 + ignore: true, 370 + async fn() { 371 + const jsonFile = new Blob([ 372 + new TextEncoder().encode(`{"foo":"bar","baz":[3, null]}`), 373 + ], { 374 + type: "application/json", 375 + }); 376 + const expectedCid = await cidForCbor(await jsonFile.arrayBuffer()); 377 + const fileResponse = await client.call( 378 + "io.example.blobTest", 379 + {}, 380 + jsonFile, 381 + ); 382 + assertEquals(fileResponse.data.cid, expectedCid.toString()); 383 + }, 384 + }); 385 + 386 + await Deno.test("supports ArrayBufferView", async () => { 387 + const bytes = randomBytes(1024); 388 + const expectedCid = await cidForCbor(bytes); 389 + 390 + const bufferResponse = await client.call( 391 + "io.example.blobTest", 392 + {}, 393 + new Uint8Array(bytes), 394 + ); 395 + assertEquals(bufferResponse.data.cid, expectedCid.toString()); 396 + }); 397 + 398 + await Deno.test("supports Blob", async () => { 399 + const bytes = randomBytes(1024); 400 + const expectedCid = await cidForCbor(bytes); 401 + 402 + const blobResponse = await client.call( 403 + "io.example.blobTest", 404 + {}, 405 + new Blob([bytes], { type: "application/octet-stream" }), 406 + ); 407 + assertEquals(blobResponse.data.cid, expectedCid.toString()); 408 + }); 409 + 410 + await Deno.test("supports Blob without explicit type", async () => { 411 + const bytes = randomBytes(1024); 412 + const expectedCid = await cidForCbor(bytes); 413 + 414 + const blobResponse = await client.call( 415 + "io.example.blobTest", 416 + {}, 417 + new Blob([bytes]), 418 + ); 419 + assertEquals(blobResponse.data.cid, expectedCid.toString()); 420 + }); 421 + 422 + await Deno.test("supports ReadableStream", async () => { 423 + const bytes = randomBytes(1024); 424 + const expectedCid = await cidForCbor(bytes); 425 + 426 + const streamResponse = await client.call( 427 + "io.example.blobTest", 428 + {}, 429 + // ReadableStream.from not available in node < 20 430 + new ReadableStream({ 431 + pull(ctrl) { 432 + ctrl.enqueue(bytes); 433 + ctrl.close(); 434 + }, 435 + }), 436 + ); 437 + assertEquals(streamResponse.data.cid, expectedCid.toString()); 438 + }); 439 + 440 + await Deno.test("supports blob uploads", async () => { 441 + const bytes = randomBytes(1024); 442 + const expectedCid = await cidForCbor(bytes); 443 + 444 + const { data } = await client.call("io.example.blobTest", {}, bytes, { 445 + encoding: "application/octet-stream", 446 + }); 447 + assertEquals(data.cid, expectedCid.toString()); 448 + }); 449 + 450 + await Deno.test("supports identity encoding", async () => { 451 + const bytes = randomBytes(1024); 452 + const expectedCid = await cidForCbor(bytes); 453 + 454 + const { data } = await client.call("io.example.blobTest", {}, bytes, { 455 + encoding: "application/octet-stream", 456 + headers: { "content-encoding": "identity" }, 457 + }); 458 + assertEquals(data.cid, expectedCid.toString()); 459 + }); 460 + 461 + await Deno.test("supports gzip encoding", async () => { 462 + const bytes = randomBytes(1024); 463 + const expectedCid = await cidForCbor(bytes); 464 + const compressedBytes = await compressData(bytes, "gzip"); 465 + 466 + const { data } = await client.call( 467 + "io.example.blobTest", 468 + {}, 469 + compressedBytes, 470 + { 471 + encoding: "application/octet-stream", 472 + headers: { 473 + "content-encoding": "gzip", 474 + }, 475 + }, 476 + ); 477 + assertEquals(data.cid, expectedCid.toString()); 478 + }); 479 + 480 + await Deno.test("supports deflate encoding", async () => { 481 + const bytes = randomBytes(1024); 482 + const expectedCid = await cidForCbor(bytes); 483 + const compressedBytes = await compressData(bytes, "deflate"); 484 + 485 + const { data } = await client.call( 486 + "io.example.blobTest", 487 + {}, 488 + compressedBytes, 489 + { 490 + encoding: "application/octet-stream", 491 + headers: { 492 + "content-encoding": "deflate", 493 + }, 494 + }, 495 + ); 496 + assertEquals(data.cid, expectedCid.toString()); 497 + }); 498 + 499 + await Deno.test("supports br encoding", async () => { 500 + const bytes = randomBytes(1024); 501 + const expectedCid = await cidForCbor(bytes); 502 + // Note: Using gzip as fallback since brotli compression isn't widely supported 503 + const compressedBytes = await compressData(bytes, "gzip"); 504 + 505 + const { data } = await client.call( 506 + "io.example.blobTest", 507 + {}, 508 + compressedBytes, 509 + { 510 + encoding: "application/octet-stream", 511 + headers: { 512 + "content-encoding": "br", 513 + }, 514 + }, 515 + ); 516 + assertEquals(data.cid, expectedCid.toString()); 517 + }); 518 + 519 + await Deno.test("supports multiple encodings", async () => { 520 + const bytes = randomBytes(1024); 521 + const expectedCid = await cidForCbor(bytes); 522 + 523 + // Apply multiple compressions in sequence 524 + const gzipped = await compressData(bytes, "gzip"); 525 + const deflated = await compressData(gzipped, "deflate"); 526 + const final = await compressData(deflated, "gzip"); // Using gzip instead of br 527 + 528 + const { data } = await client.call( 529 + "io.example.blobTest", 530 + {}, 531 + final, 532 + { 533 + encoding: "application/octet-stream", 534 + headers: { 535 + "content-encoding": 536 + "gzip, identity, deflate, identity, br, identity", 537 + }, 538 + }, 539 + ); 540 + assertEquals(data.cid, expectedCid.toString()); 541 + }); 542 + 543 + await Deno.test("fails gracefully on invalid encodings", async () => { 544 + const bytes = randomBytes(1024); 545 + const compressedBytes = await compressData(bytes, "gzip"); 546 + 547 + await assertRejects( 548 + () => 549 + client.call( 550 + "io.example.blobTest", 551 + {}, 552 + compressedBytes, 553 + { 554 + encoding: "application/octet-stream", 555 + headers: { 556 + "content-encoding": "gzip", 557 + }, 558 + }, 559 + ), 560 + Error, 561 + "unable to read input", 562 + ); 563 + }); 564 + 565 + await Deno.test("supports empty payload", async () => { 566 + const bytes = new Uint8Array(0); 567 + const expectedCid = await cidForCbor(bytes); 568 + 569 + // Using "undefined" as body to avoid encoding as lexicon { $bytes: "<base64>" } 570 + const result = await client.call("io.example.blobTest", {}, bytes, { 571 + encoding: "text/plain", 572 + }); 573 + 574 + assertEquals(result.data.cid, expectedCid.toString()); 575 + }); 576 + 577 + await Deno.test("supports max blob size (based on content-length)", async () => { 578 + const bytes = randomBytes(BLOB_LIMIT + 1); 579 + 580 + // Exactly the number of allowed bytes 581 + await client.call("io.example.blobTest", {}, bytes.slice(0, BLOB_LIMIT), { 582 + encoding: "application/octet-stream", 583 + }); 584 + 585 + // Over the number of allowed bytes 586 + await assertRejects( 587 + () => 588 + client.call("io.example.blobTest", {}, bytes, { 589 + encoding: "application/octet-stream", 590 + }), 591 + Error, 592 + "request entity too large", 593 + ); 594 + }); 595 + 596 + await Deno.test("supports max blob size (missing content-length)", async () => { 597 + // We stream bytes in these tests so that content-length isn't included. 598 + const bytes = randomBytes(BLOB_LIMIT + 1); 599 + 600 + // Exactly the number of allowed bytes 601 + await client.call( 602 + "io.example.blobTest", 603 + {}, 604 + bytesToReadableStream(bytes.slice(0, BLOB_LIMIT)), 605 + { 606 + encoding: "application/octet-stream", 607 + }, 608 + ); 609 + 610 + // Over the number of allowed bytes. 611 + await assertRejects( 612 + () => 613 + client.call( 614 + "io.example.blobTest", 615 + {}, 616 + bytesToReadableStream(bytes), 617 + { 618 + encoding: "application/octet-stream", 619 + }, 620 + ), 621 + Error, 622 + "request entity too large", 623 + ); 624 + }); 625 + 626 + await Deno.test("requires any parsable Content-Type for blob uploads", async () => { 627 + // not a real mimetype, but correct syntax 628 + await client.call("io.example.blobTest", {}, randomBytes(BLOB_LIMIT), { 629 + encoding: "some/thing", 630 + }); 631 + }); 632 + 633 + await Deno.test("errors on an empty Content-type on blob upload", async () => { 634 + // empty mimetype, but correct syntax 635 + const res = await fetch(`${url}/xrpc/io.example.blobTest`, { 636 + method: "post", 637 + headers: { "Content-Type": "" }, 638 + body: randomBytes(BLOB_LIMIT), 639 + // @ts-ignore see note in @atproto/xrpc/client.ts 640 + duplex: "half", 641 + }); 642 + const resBody = await res.json(); 643 + const status = res.status; 644 + assertEquals(status, 400); 645 + assertObjectMatch(resBody, { 646 + error: "InvalidRequest", 647 + message: "Request encoding (Content-Type) required but not provided", 648 + }); 649 + }); 650 + 651 + // Cleanup 652 + await closeServer(s); 653 + }, 654 + }); 655 + 656 + const bytesToReadableStream = (bytes: Uint8Array): ReadableStream => { 657 + // not using ReadableStream.from(), which lacks support in some contexts including nodejs v18. 658 + return new ReadableStream({ 659 + pull(ctrl) { 660 + ctrl.enqueue(bytes); 661 + ctrl.close(); 662 + }, 663 + }); 664 + };
+345
xrpc-server/tests/errors_test.ts
··· 1 + import type { LexiconDoc } from "@atproto/lexicon"; 2 + import { XrpcClient, XRPCError, XRPCInvalidResponseError } from "@atproto/xrpc"; 3 + import * as xrpcServer from "../mod.ts"; 4 + import { closeServer, createServer } from "./_util.ts"; 5 + import { assert, assertEquals, assertRejects } from "@std/assert"; 6 + 7 + const UPSTREAM_LEXICONS: LexiconDoc[] = [ 8 + { 9 + lexicon: 1, 10 + id: "io.example.upstreamInvalidResponse", 11 + defs: { 12 + main: { 13 + type: "query", 14 + output: { 15 + encoding: "application/json", 16 + schema: { 17 + type: "object", 18 + required: ["expectedValue"], 19 + properties: { 20 + expectedValue: { type: "string" }, 21 + }, 22 + }, 23 + }, 24 + }, 25 + }, 26 + }, 27 + ]; 28 + 29 + const LEXICONS: LexiconDoc[] = [ 30 + { 31 + lexicon: 1, 32 + id: "io.example.error", 33 + defs: { 34 + main: { 35 + type: "query", 36 + parameters: { 37 + type: "params", 38 + properties: { 39 + which: { type: "string", default: "foo" }, 40 + }, 41 + }, 42 + errors: [{ name: "Foo" }, { name: "Bar" }], 43 + }, 44 + }, 45 + }, 46 + { 47 + lexicon: 1, 48 + id: "io.example.throwFalsyValue", 49 + defs: { 50 + main: { 51 + type: "query", 52 + }, 53 + }, 54 + }, 55 + { 56 + lexicon: 1, 57 + id: "io.example.query", 58 + defs: { 59 + main: { 60 + type: "query", 61 + }, 62 + }, 63 + }, 64 + { 65 + lexicon: 1, 66 + id: "io.example.procedure", 67 + defs: { 68 + main: { 69 + type: "procedure", 70 + }, 71 + }, 72 + }, 73 + { 74 + lexicon: 1, 75 + id: "io.example.invalidResponse", 76 + defs: { 77 + main: { 78 + type: "query", 79 + output: { 80 + encoding: "application/json", 81 + schema: { 82 + type: "object", 83 + required: ["expectedValue"], 84 + properties: { 85 + expectedValue: { type: "string" }, 86 + }, 87 + }, 88 + }, 89 + }, 90 + }, 91 + }, 92 + { 93 + lexicon: 1, 94 + id: "io.example.invalidUpstreamResponse", 95 + defs: { 96 + main: { 97 + type: "query", 98 + }, 99 + }, 100 + }, 101 + ]; 102 + 103 + const MISMATCHED_LEXICONS: LexiconDoc[] = [ 104 + { 105 + lexicon: 1, 106 + id: "io.example.query", 107 + defs: { 108 + main: { 109 + type: "procedure", 110 + }, 111 + }, 112 + }, 113 + { 114 + lexicon: 1, 115 + id: "io.example.procedure", 116 + defs: { 117 + main: { 118 + type: "query", 119 + }, 120 + }, 121 + }, 122 + { 123 + lexicon: 1, 124 + id: "io.example.doesNotExist", 125 + defs: { 126 + main: { 127 + type: "query", 128 + }, 129 + }, 130 + }, 131 + ]; 132 + 133 + Deno.test({ 134 + name: "Error Tests", 135 + async fn() { 136 + const upstreamServer = xrpcServer.createServer(UPSTREAM_LEXICONS, { 137 + validateResponse: false, 138 + }); // disable validateResponse to test client validation 139 + upstreamServer.method("io.example.upstreamInvalidResponse", () => { 140 + return { encoding: "json", body: { something: "else" } }; 141 + }); 142 + const upstreamS = await createServer(upstreamServer); 143 + const upstreamPort = (upstreamS as Deno.HttpServer & { port: number }).port; 144 + const upstreamClient = new XrpcClient( 145 + `http://localhost:${upstreamPort}`, 146 + UPSTREAM_LEXICONS, 147 + ); 148 + 149 + const server = xrpcServer.createServer(LEXICONS, { 150 + validateResponse: false, 151 + }); // disable validateResponse to test client validation 152 + const s = await createServer(server); 153 + const port = (s as Deno.HttpServer & { port: number }).port; 154 + server.method("io.example.error", (ctx: xrpcServer.HandlerContext) => { 155 + if (ctx.params["which"] === "foo") { 156 + throw new xrpcServer.InvalidRequestError("It was this one!", "Foo"); 157 + } else if (ctx.params["which"] === "bar") { 158 + return { status: 400, error: "Bar", message: "It was that one!" }; 159 + } else { 160 + return { status: 400 }; 161 + } 162 + }); 163 + server.method("io.example.throwFalsyValue", () => { 164 + throw ""; 165 + }); 166 + server.method("io.example.query", () => { 167 + return undefined; 168 + }); 169 + // @ts-ignore We're intentionally giving the wrong response! -prf 170 + server.method("io.example.invalidResponse", () => { 171 + return { encoding: "json", body: { something: "else" } }; 172 + }); 173 + server.method("io.example.invalidUpstreamResponse", async () => { 174 + await upstreamClient.call("io.example.upstreamInvalidResponse"); 175 + return { 176 + encoding: "json", 177 + body: {}, 178 + }; 179 + }); 180 + server.method("io.example.procedure", () => { 181 + return undefined; 182 + }); 183 + 184 + const client = new XrpcClient(`http://localhost:${port}`, LEXICONS); 185 + const badClient = new XrpcClient( 186 + `http://localhost:${port}`, 187 + MISMATCHED_LEXICONS, 188 + ); 189 + 190 + // Tests 191 + await Deno.test("serves requests", async () => { 192 + await assertRejects( 193 + async () => { 194 + await client.call("io.example.error", { 195 + which: "foo", 196 + }); 197 + }, 198 + XRPCError, 199 + "It was this one!", 200 + ); 201 + 202 + const fooError = await client.call("io.example.error", { which: "foo" }) 203 + .catch((e) => e); 204 + assert(fooError instanceof XRPCError); 205 + assert(!fooError.success); 206 + assertEquals(fooError.error, "Foo"); 207 + 208 + await assertRejects( 209 + async () => { 210 + await client.call("io.example.error", { 211 + which: "bar", 212 + }); 213 + }, 214 + XRPCError, 215 + "It was that one!", 216 + ); 217 + 218 + const barError = await client.call("io.example.error", { which: "bar" }) 219 + .catch((e) => e); 220 + assert(barError instanceof XRPCError); 221 + assert(!barError.success); 222 + assertEquals(barError.error, "Bar"); 223 + 224 + await assertRejects( 225 + async () => { 226 + await client.call("io.example.throwFalsyValue"); 227 + }, 228 + XRPCError, 229 + "Internal Server Error", 230 + ); 231 + 232 + const falsyError = await client.call("io.example.throwFalsyValue").catch( 233 + (e) => e, 234 + ); 235 + assert(falsyError instanceof XRPCError); 236 + assert(!falsyError.success); 237 + assertEquals(falsyError.error, "InternalServerError"); 238 + 239 + await assertRejects( 240 + async () => { 241 + await client.call("io.example.error", { 242 + which: "other", 243 + }); 244 + }, 245 + XRPCError, 246 + "Invalid Request", 247 + ); 248 + 249 + const otherError = await client.call("io.example.error", { 250 + which: "other", 251 + }).catch((e) => e); 252 + assert(otherError instanceof XRPCError); 253 + assert(!otherError.success); 254 + assertEquals(otherError.error, "InvalidRequest"); 255 + 256 + await assertRejects( 257 + async () => { 258 + await client.call("io.example.invalidResponse"); 259 + }, 260 + XRPCInvalidResponseError, 261 + "The server gave an invalid response and may be out of date.", 262 + ); 263 + 264 + const invalidError = await client.call("io.example.invalidResponse") 265 + .catch((e) => e); 266 + assert(invalidError instanceof XRPCInvalidResponseError); 267 + assert(!invalidError.success); 268 + assertEquals(invalidError.error, "Invalid Response"); 269 + assertEquals( 270 + invalidError.validationError.message, 271 + 'Output must have the property "expectedValue"', 272 + ); 273 + assertEquals(invalidError.responseBody, { something: "else" }); 274 + 275 + await assertRejects( 276 + async () => { 277 + await client.call("io.example.invalidUpstreamResponse"); 278 + }, 279 + XRPCError, 280 + "Internal Server Error", 281 + ); 282 + 283 + const upstreamError = await client.call( 284 + "io.example.invalidUpstreamResponse", 285 + ).catch((e) => e); 286 + assert(upstreamError instanceof XRPCError); 287 + assert(!upstreamError.success); 288 + assertEquals(upstreamError.status, 500); 289 + assertEquals(upstreamError.error, "InternalServerError"); 290 + }); 291 + 292 + await Deno.test("serves error for missing/mismatch schemas", async () => { 293 + await client.call("io.example.query"); // No error 294 + await client.call("io.example.procedure"); // No error 295 + 296 + await assertRejects( 297 + async () => { 298 + await badClient.call("io.example.query"); 299 + }, 300 + XRPCError, 301 + "Incorrect HTTP method (POST) expected GET", 302 + ); 303 + 304 + const queryError = await badClient.call("io.example.query").catch((e) => 305 + e 306 + ); 307 + assert(queryError instanceof XRPCError); 308 + assert(!queryError.success); 309 + assertEquals(queryError.error, "InvalidRequest"); 310 + 311 + await assertRejects( 312 + async () => { 313 + await badClient.call("io.example.procedure"); 314 + }, 315 + XRPCError, 316 + "Incorrect HTTP method (GET) expected POST", 317 + ); 318 + 319 + const procError = await badClient.call("io.example.procedure").catch( 320 + (e) => e, 321 + ); 322 + assert(procError instanceof XRPCError); 323 + assert(!procError.success); 324 + assertEquals(procError.error, "InvalidRequest"); 325 + 326 + await assertRejects( 327 + async () => { 328 + await badClient.call("io.example.doesNotExist"); 329 + }, 330 + XRPCError, 331 + "Method Not Implemented", 332 + ); 333 + 334 + const notFoundError = await badClient.call("io.example.doesNotExist") 335 + .catch((e) => e); 336 + assert(notFoundError instanceof XRPCError); 337 + assert(!notFoundError.success); 338 + assertEquals(notFoundError.error, "MethodNotImplemented"); 339 + }); 340 + 341 + // Cleanup 342 + await closeServer(s); 343 + await closeServer(upstreamS); 344 + }, 345 + });
+226
xrpc-server/tests/frames_test.ts
··· 1 + import * as cborx from "npm:cbor-x"; 2 + import * as uint8arrays from "uint8arrays"; 3 + import { ErrorFrame, Frame, FrameType, MessageFrame } from "../mod.ts"; 4 + import { assertEquals, assertThrows } from "@std/assert"; 5 + 6 + Deno.test({ 7 + name: "Frames", 8 + fn() { 9 + Deno.test("creates and parses message frame", () => { 10 + const messageFrame = new MessageFrame( 11 + { a: "b", c: [1, 2, 3] }, 12 + { type: "#d" }, 13 + ); 14 + 15 + assertEquals(messageFrame.header, { 16 + op: FrameType.Message, 17 + t: "#d", 18 + }); 19 + assertEquals(messageFrame.op, FrameType.Message); 20 + assertEquals(messageFrame.type, "#d"); 21 + assertEquals(messageFrame.body, { a: "b", c: [1, 2, 3] }); 22 + 23 + const bytes = messageFrame.toBytes(); 24 + assertEquals( 25 + uint8arrays.equals( 26 + bytes, 27 + new Uint8Array([ 28 + /*header*/ 162, 29 + 97, 30 + 116, 31 + 98, 32 + 35, 33 + 100, 34 + 98, 35 + 111, 36 + 112, 37 + 1, 38 + /*body*/ 162, 39 + 97, 40 + 97, 41 + 97, 42 + 98, 43 + 97, 44 + 99, 45 + 131, 46 + 1, 47 + 2, 48 + 3, 49 + ]), 50 + ), 51 + true, 52 + ); 53 + 54 + const parsedFrame = Frame.fromBytes(bytes); 55 + if (!(parsedFrame instanceof MessageFrame)) { 56 + throw new Error("Did not parse as message frame"); 57 + } 58 + 59 + assertEquals(parsedFrame.header, messageFrame.header); 60 + assertEquals(parsedFrame.op, messageFrame.op); 61 + assertEquals(parsedFrame.type, messageFrame.type); 62 + assertEquals(parsedFrame.body, messageFrame.body); 63 + }); 64 + 65 + Deno.test("creates and parses error frame", () => { 66 + const errorFrame = new ErrorFrame({ 67 + error: "BigOops", 68 + message: "Something went awry", 69 + }); 70 + 71 + assertEquals(errorFrame.header, { op: FrameType.Error }); 72 + assertEquals(errorFrame.op, FrameType.Error); 73 + assertEquals(errorFrame.code, "BigOops"); 74 + assertEquals(errorFrame.message, "Something went awry"); 75 + assertEquals(errorFrame.body, { 76 + error: "BigOops", 77 + message: "Something went awry", 78 + }); 79 + 80 + const bytes = errorFrame.toBytes(); 81 + assertEquals( 82 + uint8arrays.equals( 83 + bytes, 84 + new Uint8Array([ 85 + /*header*/ 161, 86 + 98, 87 + 111, 88 + 112, 89 + 32, 90 + /*body*/ 162, 91 + 101, 92 + 101, 93 + 114, 94 + 114, 95 + 111, 96 + 114, 97 + 103, 98 + 66, 99 + 105, 100 + 103, 101 + 79, 102 + 111, 103 + 112, 104 + 115, 105 + 103, 106 + 109, 107 + 101, 108 + 115, 109 + 115, 110 + 97, 111 + 103, 112 + 101, 113 + 115, 114 + 83, 115 + 111, 116 + 109, 117 + 101, 118 + 116, 119 + 104, 120 + 105, 121 + 110, 122 + 103, 123 + 32, 124 + 119, 125 + 101, 126 + 110, 127 + 116, 128 + 32, 129 + 97, 130 + 119, 131 + 114, 132 + 121, 133 + ]), 134 + ), 135 + true, 136 + ); 137 + 138 + const parsedFrame = Frame.fromBytes(bytes); 139 + if (!(parsedFrame instanceof ErrorFrame)) { 140 + throw new Error("Did not parse as error frame"); 141 + } 142 + 143 + assertEquals(parsedFrame.header, errorFrame.header); 144 + assertEquals(parsedFrame.op, errorFrame.op); 145 + assertEquals(parsedFrame.code, errorFrame.code); 146 + assertEquals(parsedFrame.message, errorFrame.message); 147 + assertEquals(parsedFrame.body, errorFrame.body); 148 + }); 149 + 150 + Deno.test("parsing fails when frame is not CBOR", () => { 151 + const bytes = new Uint8Array(new TextEncoder().encode("some utf8 bytes")); 152 + const emptyBytes = new Uint8Array(0); 153 + assertThrows( 154 + () => Frame.fromBytes(bytes), 155 + Error, 156 + "Unexpected end of CBOR data", 157 + ); 158 + assertThrows( 159 + () => Frame.fromBytes(emptyBytes), 160 + Error, 161 + "Unexpected end of CBOR data", 162 + ); 163 + }); 164 + 165 + Deno.test("parsing fails when frame header is malformed", () => { 166 + const bytes = uint8arrays.concat([ 167 + cborx.encode({ op: -2 }), // Unknown op 168 + cborx.encode({ a: "b", c: [1, 2, 3] }), 169 + ]); 170 + 171 + assertThrows( 172 + () => Frame.fromBytes(bytes), 173 + Error, 174 + "Invalid frame header:", 175 + ); 176 + }); 177 + 178 + Deno.test("parsing fails when frame is missing body", () => { 179 + const messageFrame = new MessageFrame( 180 + { a: "b", c: [1, 2, 3] }, 181 + { type: "#d" }, 182 + ); 183 + 184 + const headerBytes = cborx.encode(messageFrame.header); 185 + 186 + assertThrows( 187 + () => Frame.fromBytes(headerBytes), 188 + Error, 189 + "Missing frame body", 190 + ); 191 + }); 192 + 193 + Deno.test("parsing fails when frame has too many data items", () => { 194 + const messageFrame = new MessageFrame( 195 + { a: "b", c: [1, 2, 3] }, 196 + { type: "#d" }, 197 + ); 198 + 199 + const bytes = uint8arrays.concat([ 200 + messageFrame.toBytes(), 201 + cborx.encode({ d: "e", f: [4, 5, 6] }), 202 + ]); 203 + 204 + assertThrows( 205 + () => Frame.fromBytes(bytes), 206 + Error, 207 + "Too many CBOR data items in frame", 208 + ); 209 + }); 210 + 211 + Deno.test("parsing fails when error frame has invalid body", () => { 212 + const errorFrame = new ErrorFrame({ error: "BadOops" }); 213 + 214 + const bytes = uint8arrays.concat([ 215 + cborx.encode(errorFrame.header), 216 + cborx.encode({ blah: 1 }), 217 + ]); 218 + 219 + assertThrows( 220 + () => Frame.fromBytes(bytes), 221 + Error, 222 + "Invalid error frame body:", 223 + ); 224 + }); 225 + }, 226 + });
+102
xrpc-server/tests/ipld_test.ts
··· 1 + import { CID } from "multiformats/cid"; 2 + import type { LexiconDoc } from "@atproto/lexicon"; 3 + import { XrpcClient } from "@atproto/xrpc"; 4 + import * as xrpcServer from "../mod.ts"; 5 + import { closeServer, createServer } from "./_util.ts"; 6 + import { assertEquals, assertExists } from "@std/assert"; 7 + 8 + const LEXICONS: LexiconDoc[] = [ 9 + { 10 + lexicon: 1, 11 + id: "io.example.ipld", 12 + defs: { 13 + main: { 14 + type: "procedure", 15 + input: { 16 + encoding: "application/json", 17 + schema: { 18 + type: "object", 19 + properties: { 20 + cid: { 21 + type: "cid-link", 22 + }, 23 + bytes: { 24 + type: "bytes", 25 + }, 26 + }, 27 + }, 28 + }, 29 + output: { 30 + encoding: "application/json", 31 + schema: { 32 + type: "object", 33 + properties: { 34 + cid: { 35 + type: "cid-link", 36 + }, 37 + bytes: { 38 + type: "bytes", 39 + }, 40 + }, 41 + }, 42 + }, 43 + }, 44 + }, 45 + }, 46 + ]; 47 + 48 + Deno.test({ 49 + name: "IPLD Values", 50 + async fn() { 51 + // Setup 52 + const server = xrpcServer.createServer(LEXICONS); 53 + const s = await createServer(server); 54 + server.method( 55 + "io.example.ipld", 56 + (ctx: xrpcServer.HandlerContext) => { 57 + const body = ctx.input?.body as { cid: unknown; bytes: unknown }; 58 + const asCid = CID.asCID(body.cid); 59 + if (!(asCid instanceof CID)) { 60 + throw new Error("expected cid"); 61 + } 62 + const bytes = body.bytes; 63 + if (!(bytes instanceof Uint8Array)) { 64 + throw new Error("expected bytes"); 65 + } 66 + return { encoding: "application/json", body: ctx.input?.body }; 67 + }, 68 + ); 69 + 70 + // Setup server and client 71 + const port = (s as Deno.HttpServer & { port: number }).port; 72 + const client = new XrpcClient(`http://localhost:${port}`, LEXICONS); 73 + 74 + try { 75 + Deno.test("can send and receive ipld vals", async () => { 76 + const cid = CID.parse( 77 + "bafyreidfayvfuwqa7qlnopdjiqrxzs6blmoeu4rujcjtnci5beludirz2a", 78 + ); 79 + const bytes = new Uint8Array([0, 1, 2, 3]); 80 + const res = await client.call( 81 + "io.example.ipld", 82 + {}, 83 + { 84 + cid, 85 + bytes, 86 + }, 87 + { encoding: "application/json" }, 88 + ); 89 + assertExists(res.success); 90 + assertEquals( 91 + res.headers["content-type"], 92 + "application/json; charset=utf-8", 93 + ); 94 + assertExists(cid.equals(res.data.cid)); 95 + assertEquals(bytes, res.data.bytes); 96 + }); 97 + } finally { 98 + // Cleanup 99 + await closeServer(s); 100 + } 101 + }, 102 + });
+190
xrpc-server/tests/parameters_test.ts
··· 1 + import type { LexiconDoc } from "@atproto/lexicon"; 2 + import { XrpcClient } from "@atproto/xrpc"; 3 + import * as xrpcServer from "../mod.ts"; 4 + import { closeServer, createServer } from "./_util.ts"; 5 + import { assertEquals, assertRejects } from "@std/assert"; 6 + 7 + const LEXICONS: LexiconDoc[] = [ 8 + { 9 + lexicon: 1, 10 + id: "io.example.paramTest", 11 + defs: { 12 + main: { 13 + type: "query", 14 + parameters: { 15 + type: "params", 16 + required: ["str", "int", "bool", "arr"], 17 + properties: { 18 + str: { type: "string", minLength: 2, maxLength: 10 }, 19 + int: { type: "integer", minimum: 2, maximum: 10 }, 20 + bool: { type: "boolean" }, 21 + arr: { type: "array", items: { type: "integer" }, maxLength: 2 }, 22 + def: { type: "integer", default: 0 }, 23 + }, 24 + }, 25 + output: { 26 + encoding: "application/json", 27 + }, 28 + }, 29 + }, 30 + }, 31 + ]; 32 + 33 + Deno.test({ 34 + name: "Parameters", 35 + async fn() { 36 + // Setup 37 + const server = xrpcServer.createServer(LEXICONS); 38 + server.method( 39 + "io.example.paramTest", 40 + (ctx: { params: xrpcServer.Params }) => ({ 41 + encoding: "json", 42 + body: ctx.params, 43 + }), 44 + ); 45 + 46 + const s = await createServer(server); 47 + const port = (s as Deno.HttpServer & { port: number }).port; 48 + const client = new XrpcClient(`http://localhost:${port}`, LEXICONS); 49 + 50 + try { 51 + Deno.test("validates query params", async () => { 52 + const res1 = await client.call("io.example.paramTest", { 53 + str: "valid", 54 + int: 5, 55 + bool: true, 56 + arr: [1, 2], 57 + def: 5, 58 + }); 59 + assertEquals(res1.success, true); 60 + assertEquals(res1.data.str, "valid"); 61 + assertEquals(res1.data.int, 5); 62 + assertEquals(res1.data.bool, true); 63 + assertEquals(res1.data.arr, [1, 2]); 64 + assertEquals(res1.data.def, 5); 65 + 66 + const res2 = await client.call("io.example.paramTest", { 67 + str: 10, 68 + int: "5", 69 + bool: "foo", 70 + arr: "3", 71 + }); 72 + assertEquals(res2.success, true); 73 + assertEquals(res2.data.str, "10"); 74 + assertEquals(res2.data.int, 5); 75 + assertEquals(res2.data.bool, true); 76 + assertEquals(res2.data.arr, [3]); 77 + assertEquals(res2.data.def, 0); 78 + 79 + // Test validation errors 80 + await assertRejects( 81 + () => 82 + client.call("io.example.paramTest", { 83 + str: "n", 84 + int: 5, 85 + bool: true, 86 + arr: [1], 87 + }), 88 + Error, 89 + "str must not be shorter than 2 characters", 90 + ); 91 + 92 + await assertRejects( 93 + () => 94 + client.call("io.example.paramTest", { 95 + str: "loooooooooooooong", 96 + int: 5, 97 + bool: true, 98 + arr: [1], 99 + }), 100 + Error, 101 + "str must not be longer than 10 characters", 102 + ); 103 + 104 + await assertRejects( 105 + () => 106 + client.call("io.example.paramTest", { 107 + int: 5, 108 + bool: true, 109 + arr: [1], 110 + }), 111 + Error, 112 + 'Params must have the property "str"', 113 + ); 114 + 115 + await assertRejects( 116 + () => 117 + client.call("io.example.paramTest", { 118 + str: "valid", 119 + int: -1, 120 + bool: true, 121 + arr: [1], 122 + }), 123 + Error, 124 + "int can not be less than 2", 125 + ); 126 + 127 + await assertRejects( 128 + () => 129 + client.call("io.example.paramTest", { 130 + str: "valid", 131 + int: 11, 132 + bool: true, 133 + arr: [1], 134 + }), 135 + Error, 136 + "int can not be greater than 10", 137 + ); 138 + 139 + await assertRejects( 140 + () => 141 + client.call("io.example.paramTest", { 142 + str: "valid", 143 + bool: true, 144 + arr: [1], 145 + }), 146 + Error, 147 + 'Params must have the property "int"', 148 + ); 149 + 150 + await assertRejects( 151 + () => 152 + client.call("io.example.paramTest", { 153 + str: "valid", 154 + int: 5, 155 + arr: [1], 156 + }), 157 + Error, 158 + 'Params must have the property "bool"', 159 + ); 160 + 161 + await assertRejects( 162 + () => 163 + client.call("io.example.paramTest", { 164 + str: "valid", 165 + int: 5, 166 + bool: true, 167 + arr: [], 168 + }), 169 + Error, 170 + 'Error: Params must have the property "arr"', 171 + ); 172 + 173 + await assertRejects( 174 + () => 175 + client.call("io.example.paramTest", { 176 + str: "valid", 177 + int: 5, 178 + bool: true, 179 + arr: [1, 2, 3], 180 + }), 181 + Error, 182 + "Error: arr must not have more than 2 elements", 183 + ); 184 + }); 185 + } finally { 186 + // Cleanup 187 + await closeServer(s); 188 + } 189 + }, 190 + });
+93
xrpc-server/tests/parsing_test.ts
··· 1 + import { parseUrlNsid } from "../util.ts"; 2 + import { assertEquals, assertThrows } from "@std/assert"; 3 + 4 + const testValid = (url: string, expected: string) => { 5 + assertEquals(parseUrlNsid(url), expected); 6 + }; 7 + 8 + const testInvalid = (url: string, errorMessage = "invalid xrpc path") => { 9 + assertThrows(() => parseUrlNsid(url), Error, errorMessage); 10 + }; 11 + 12 + Deno.test({ 13 + name: "parseUrlNsid", 14 + fn() { 15 + Deno.test("should extract the NSID from the URL", () => { 16 + testValid("/xrpc/blee.blah.bloo", "blee.blah.bloo"); 17 + testValid("/xrpc/blee.blah.bloo?foo[]", "blee.blah.bloo"); 18 + testValid("/xrpc/blee.blah.bloo?foo=bar", "blee.blah.bloo"); 19 + testValid("/xrpc/com.example.nsid", "com.example.nsid"); 20 + testValid("/xrpc/com.example.nsid?foo=bar", "com.example.nsid"); 21 + testValid("/xrpc/com.example-domain.nsid", "com.example-domain.nsid"); 22 + }); 23 + 24 + Deno.test("should allow a trailing slash", () => { 25 + testValid("/xrpc/blee.blah.bloo/?", "blee.blah.bloo"); 26 + testValid("/xrpc/blee.blah.bloo/?foo=", "blee.blah.bloo"); 27 + testValid("/xrpc/blee.blah.bloo/?bool", "blee.blah.bloo"); 28 + testValid("/xrpc/com.example.nsid/", "com.example.nsid"); 29 + }); 30 + 31 + Deno.test("should throw an error if the URL is too short", () => { 32 + testInvalid("/xrpc/a"); 33 + }); 34 + 35 + Deno.test("should throw an error if the URL is empty", () => { 36 + testInvalid(""); 37 + }); 38 + 39 + Deno.test("should throw an error if the URL is missing the NSID", () => { 40 + testInvalid("/xrpc/"); 41 + testInvalid("/xrpc/?"); 42 + testInvalid("/xrpc/?foo=bar"); 43 + }); 44 + 45 + Deno.test("should throw an error if the URL contains extra path segments", () => { 46 + testInvalid("/xrpc/123/extra"); 47 + testInvalid("/xrpc/123/extra?foo=bar"); 48 + }); 49 + 50 + Deno.test("should throw an error if the URL is missing the XRPC path prefix", () => { 51 + testInvalid("/foo/123"); 52 + testInvalid("/foo/com.example.nsid"); 53 + }); 54 + 55 + Deno.test("should throw an error if the NSID starts with a dot", () => { 56 + testInvalid("/xrpc/."); 57 + testInvalid("/xrpc/.."); 58 + testInvalid("/xrpc/...."); 59 + testInvalid("/xrpc/.com.example.nsid"); 60 + testInvalid("/xrpc/com..example.nsid"); 61 + testInvalid("/xrpc/com.example..nsid"); 62 + testInvalid("/xrpc/com.example.nsid."); 63 + testInvalid("/xrpc/com.example.nsid./"); 64 + testInvalid("/xrpc/com.example.nsid.?foo=bar"); 65 + testInvalid("/xrpc/com.example.nsid./?foo=bar"); 66 + }); 67 + 68 + Deno.test("should throw an error if the NSID contains a misplaced dash", () => { 69 + testInvalid("/xrpc/-"); 70 + testInvalid("/xrpc/com.example.-nsid"); 71 + testInvalid("/xrpc/com.example-.nsid"); 72 + testInvalid("/xrpc/com.-example.nsid"); 73 + testInvalid("/xrpc/com.-example-.nsid"); 74 + testInvalid("/xrpc/com.example.nsid-"); 75 + testInvalid("/xrpc/-com.example.nsid"); 76 + testInvalid("/xrpc/com.example--domain.nsid"); 77 + }); 78 + 79 + Deno.test("should throw an error if the URL starts with a space", () => { 80 + testInvalid(" /xrpc/com.example.nsid"); 81 + }); 82 + 83 + Deno.test("should throw an error if the NSID contains invalid characters", () => { 84 + testInvalid("/xrpc/com.example.nsid#"); 85 + testInvalid("/xrpc/com.example.nsid!"); 86 + testInvalid("/xrpc/com.example#?nsid"); 87 + testInvalid("/xrpc/!com.example.nsid"); 88 + testInvalid("/xrpc/com.example.nsid "); 89 + testInvalid("/xrpc/ com.example.nsid"); 90 + testInvalid("/xrpc/com. example.nsid"); 91 + }); 92 + }, 93 + });
+172
xrpc-server/tests/procedures_test.ts
··· 1 + import type { LexiconDoc } from "@atproto/lexicon"; 2 + import { XrpcClient } from "@atproto/xrpc"; 3 + import * as xrpcServer from "../mod.ts"; 4 + import { closeServer, createServer } from "./_util.ts"; 5 + import { assertEquals } from "@std/assert"; 6 + 7 + const LEXICONS: LexiconDoc[] = [ 8 + { 9 + lexicon: 1, 10 + id: "io.example.pingOne", 11 + defs: { 12 + main: { 13 + type: "procedure", 14 + parameters: { 15 + type: "params", 16 + properties: { 17 + message: { type: "string" }, 18 + }, 19 + }, 20 + output: { 21 + encoding: "text/plain", 22 + }, 23 + }, 24 + }, 25 + }, 26 + { 27 + lexicon: 1, 28 + id: "io.example.pingTwo", 29 + defs: { 30 + main: { 31 + type: "procedure", 32 + input: { 33 + encoding: "text/plain", 34 + }, 35 + output: { 36 + encoding: "text/plain", 37 + }, 38 + }, 39 + }, 40 + }, 41 + { 42 + lexicon: 1, 43 + id: "io.example.pingThree", 44 + defs: { 45 + main: { 46 + type: "procedure", 47 + input: { 48 + encoding: "application/octet-stream", 49 + }, 50 + output: { 51 + encoding: "application/octet-stream", 52 + }, 53 + }, 54 + }, 55 + }, 56 + { 57 + lexicon: 1, 58 + id: "io.example.pingFour", 59 + defs: { 60 + main: { 61 + type: "procedure", 62 + input: { 63 + encoding: "application/json", 64 + schema: { 65 + type: "object", 66 + required: ["message"], 67 + properties: { message: { type: "string" } }, 68 + }, 69 + }, 70 + output: { 71 + encoding: "application/json", 72 + schema: { 73 + type: "object", 74 + required: ["message"], 75 + properties: { message: { type: "string" } }, 76 + }, 77 + }, 78 + }, 79 + }, 80 + }, 81 + ]; 82 + 83 + Deno.test({ 84 + name: "Procedures", 85 + async fn() { 86 + // Setup 87 + const server = xrpcServer.createServer(LEXICONS); 88 + server.method( 89 + "io.example.pingOne", 90 + (ctx: xrpcServer.HandlerContext) => { 91 + return { encoding: "text/plain", body: ctx.params.message }; 92 + }, 93 + ); 94 + server.method( 95 + "io.example.pingTwo", 96 + (ctx: xrpcServer.HandlerContext) => { 97 + return { encoding: "text/plain", body: ctx.input?.body }; 98 + }, 99 + ); 100 + server.method( 101 + "io.example.pingThree", 102 + (ctx: xrpcServer.HandlerContext) => { 103 + return { 104 + encoding: "application/octet-stream", 105 + body: ctx.input?.body, 106 + }; 107 + }, 108 + ); 109 + server.method( 110 + "io.example.pingFour", 111 + (ctx: xrpcServer.HandlerContext) => { 112 + const body = ctx.input?.body as { message: string }; 113 + return { 114 + encoding: "application/json", 115 + body: { message: body?.message }, 116 + }; 117 + }, 118 + ); 119 + 120 + const s = await createServer(server); 121 + const port = (s as Deno.HttpServer & { port: number }).port; 122 + const client = new XrpcClient(`http://localhost:${port}`, LEXICONS); 123 + 124 + try { 125 + Deno.test("serves requests", async () => { 126 + const res1 = await client.call("io.example.pingOne", { 127 + message: "hello world", 128 + }); 129 + assertEquals(res1.success, true); 130 + assertEquals(res1.headers["content-type"], "text/plain; charset=utf-8"); 131 + assertEquals(res1.data, "hello world"); 132 + 133 + const res2 = await client.call( 134 + "io.example.pingTwo", 135 + {}, 136 + "hello world", 137 + { 138 + encoding: "text/plain", 139 + }, 140 + ); 141 + assertEquals(res2.success, true); 142 + assertEquals(res2.headers["content-type"], "text/plain; charset=utf-8"); 143 + assertEquals(res2.data, "hello world"); 144 + 145 + const res3 = await client.call( 146 + "io.example.pingThree", 147 + {}, 148 + new TextEncoder().encode("hello world"), 149 + { encoding: "application/octet-stream" }, 150 + ); 151 + assertEquals(res3.success, true); 152 + assertEquals(res3.headers["content-type"], "application/octet-stream"); 153 + assertEquals(new TextDecoder().decode(res3.data), "hello world"); 154 + 155 + const res4 = await client.call( 156 + "io.example.pingFour", 157 + {}, 158 + { message: "hello world" }, 159 + ); 160 + assertEquals(res4.success, true); 161 + assertEquals( 162 + res4.headers["content-type"], 163 + "application/json; charset=utf-8", 164 + ); 165 + assertEquals(res4.data?.message, "hello world"); 166 + }); 167 + } finally { 168 + // Cleanup 169 + await closeServer(s); 170 + } 171 + }, 172 + });
+137
xrpc-server/tests/queries_test.ts
··· 1 + import type { LexiconDoc } from "@atproto/lexicon"; 2 + import { XrpcClient } from "@atproto/xrpc"; 3 + import * as xrpcServer from "../mod.ts"; 4 + import { closeServer, createServer } from "./_util.ts"; 5 + import { assertEquals, assertExists } from "@std/assert"; 6 + 7 + const LEXICONS: LexiconDoc[] = [ 8 + { 9 + lexicon: 1, 10 + id: "io.example.pingOne", 11 + defs: { 12 + main: { 13 + type: "query", 14 + parameters: { 15 + type: "params", 16 + properties: { 17 + message: { type: "string" }, 18 + }, 19 + }, 20 + output: { 21 + encoding: "text/plain", 22 + }, 23 + }, 24 + }, 25 + }, 26 + { 27 + lexicon: 1, 28 + id: "io.example.pingTwo", 29 + defs: { 30 + main: { 31 + type: "query", 32 + parameters: { 33 + type: "params", 34 + properties: { 35 + message: { type: "string" }, 36 + }, 37 + }, 38 + output: { 39 + encoding: "application/octet-stream", 40 + }, 41 + }, 42 + }, 43 + }, 44 + { 45 + lexicon: 1, 46 + id: "io.example.pingThree", 47 + defs: { 48 + main: { 49 + type: "query", 50 + parameters: { 51 + type: "params", 52 + properties: { 53 + message: { type: "string" }, 54 + }, 55 + }, 56 + output: { 57 + encoding: "application/json", 58 + schema: { 59 + type: "object", 60 + required: ["message"], 61 + properties: { message: { type: "string" } }, 62 + }, 63 + }, 64 + }, 65 + }, 66 + }, 67 + ]; 68 + 69 + Deno.test({ 70 + name: "Queries", 71 + async fn() { 72 + // Setup 73 + const server = xrpcServer.createServer(LEXICONS); 74 + server.method( 75 + "io.example.pingOne", 76 + (ctx: { params: xrpcServer.Params }) => { 77 + return { encoding: "text/plain", body: ctx.params.message }; 78 + }, 79 + ); 80 + server.method( 81 + "io.example.pingTwo", 82 + (ctx: { params: xrpcServer.Params }) => { 83 + return { 84 + encoding: "application/octet-stream", 85 + body: new TextEncoder().encode(String(ctx.params.message)), 86 + }; 87 + }, 88 + ); 89 + server.method( 90 + "io.example.pingThree", 91 + (ctx: { params: xrpcServer.Params }) => { 92 + return { 93 + encoding: "application/json", 94 + body: { message: ctx.params.message }, 95 + headers: { "x-test-header-name": "test-value" }, 96 + }; 97 + }, 98 + ); 99 + 100 + // Create server and client 101 + const s = await createServer(server); 102 + const port = (s as Deno.HttpServer & { port: number }).port; 103 + const client = new XrpcClient(`http://localhost:${port}`, LEXICONS); 104 + 105 + try { 106 + Deno.test("serves requests", async () => { 107 + const res1 = await client.call("io.example.pingOne", { 108 + message: "hello world", 109 + }); 110 + assertExists(res1.success); 111 + assertEquals(res1.headers["content-type"], "text/plain; charset=utf-8"); 112 + assertEquals(res1.data, "hello world"); 113 + 114 + const res2 = await client.call("io.example.pingTwo", { 115 + message: "hello world", 116 + }); 117 + assertExists(res2.success); 118 + assertEquals(res2.headers["content-type"], "application/octet-stream"); 119 + assertEquals(new TextDecoder().decode(res2.data), "hello world"); 120 + 121 + const res3 = await client.call("io.example.pingThree", { 122 + message: "hello world", 123 + }); 124 + assertExists(res3.success); 125 + assertEquals( 126 + res3.headers["content-type"], 127 + "application/json; charset=utf-8", 128 + ); 129 + assertEquals(res3.data?.message, "hello world"); 130 + assertEquals(res3.headers["x-test-header-name"], "test-value"); 131 + }); 132 + } finally { 133 + // Cleanup 134 + await closeServer(s); 135 + } 136 + }, 137 + });
+351
xrpc-server/tests/rate-limiter_test.ts
··· 1 + import { MINUTE } from "@atproto/common"; 2 + import type { LexiconDoc } from "@atproto/lexicon"; 3 + import { XrpcClient } from "@atproto/xrpc"; 4 + import * as xrpcServer from "../mod.ts"; 5 + import { closeServer, createServer } from "./_util.ts"; 6 + import { assertRejects } from "@std/assert"; 7 + 8 + const LEXICONS: LexiconDoc[] = [ 9 + { 10 + lexicon: 1, 11 + id: "io.example.routeLimit", 12 + defs: { 13 + main: { 14 + type: "query", 15 + parameters: { 16 + type: "params", 17 + required: ["str"], 18 + properties: { 19 + str: { type: "string" }, 20 + }, 21 + }, 22 + output: { 23 + encoding: "application/json", 24 + }, 25 + }, 26 + }, 27 + }, 28 + { 29 + lexicon: 1, 30 + id: "io.example.routeLimitReset", 31 + defs: { 32 + main: { 33 + type: "query", 34 + parameters: { 35 + type: "params", 36 + required: ["count"], 37 + properties: { 38 + count: { type: "integer" }, 39 + }, 40 + }, 41 + output: { 42 + encoding: "application/json", 43 + }, 44 + }, 45 + }, 46 + }, 47 + { 48 + lexicon: 1, 49 + id: "io.example.sharedLimitOne", 50 + defs: { 51 + main: { 52 + type: "query", 53 + parameters: { 54 + type: "params", 55 + required: ["points"], 56 + properties: { 57 + points: { type: "integer" }, 58 + }, 59 + }, 60 + output: { 61 + encoding: "application/json", 62 + }, 63 + }, 64 + }, 65 + }, 66 + { 67 + lexicon: 1, 68 + id: "io.example.sharedLimitTwo", 69 + defs: { 70 + main: { 71 + type: "query", 72 + parameters: { 73 + type: "params", 74 + required: ["points"], 75 + properties: { 76 + points: { type: "integer" }, 77 + }, 78 + }, 79 + output: { 80 + encoding: "application/json", 81 + }, 82 + }, 83 + }, 84 + }, 85 + { 86 + lexicon: 1, 87 + id: "io.example.toggleLimit", 88 + defs: { 89 + main: { 90 + type: "query", 91 + parameters: { 92 + type: "params", 93 + properties: { 94 + shouldCount: { type: "boolean" }, 95 + }, 96 + }, 97 + output: { 98 + encoding: "application/json", 99 + }, 100 + }, 101 + }, 102 + }, 103 + { 104 + lexicon: 1, 105 + id: "io.example.noLimit", 106 + defs: { 107 + main: { 108 + type: "query", 109 + output: { 110 + encoding: "application/json", 111 + }, 112 + }, 113 + }, 114 + }, 115 + { 116 + lexicon: 1, 117 + id: "io.example.nonExistent", 118 + defs: { 119 + main: { 120 + type: "query", 121 + output: { 122 + encoding: "application/json", 123 + }, 124 + }, 125 + }, 126 + }, 127 + ]; 128 + 129 + Deno.test({ 130 + name: "Rate Limiter Tests", 131 + async fn() { 132 + // Setup 133 + const server = xrpcServer.createServer(LEXICONS, { 134 + rateLimits: { 135 + creator: (opts) => new xrpcServer.MemoryRateLimiter(opts), 136 + bypass: (ctx) => ctx.req.headers.get("x-ratelimit-bypass") === "bypass", 137 + shared: [ 138 + { 139 + name: "shared-limit", 140 + durationMs: 5 * MINUTE, 141 + points: 6, 142 + }, 143 + ], 144 + global: [ 145 + { 146 + name: "global-ip", 147 + durationMs: 5 * MINUTE, 148 + points: 100, 149 + }, 150 + ], 151 + }, 152 + }); 153 + 154 + server.method("io.example.routeLimit", { 155 + rateLimit: { 156 + durationMs: 5 * MINUTE, 157 + points: 5, 158 + calcKey: (ctx) => 159 + (ctx as xrpcServer.HandlerContext).params.str as string, 160 + }, 161 + handler: (ctx: xrpcServer.HandlerContext) => ({ 162 + encoding: "application/json", 163 + body: ctx.params, 164 + }), 165 + }); 166 + 167 + server.method("io.example.routeLimitReset", { 168 + rateLimit: { 169 + durationMs: 5 * MINUTE, 170 + points: 2, 171 + }, 172 + handler: (ctx: xrpcServer.HandlerContext) => { 173 + if (ctx.params.count === 1) { 174 + ctx.resetRouteRateLimits(); 175 + } 176 + 177 + return { 178 + encoding: "application/json", 179 + body: {}, 180 + }; 181 + }, 182 + }); 183 + 184 + server.method("io.example.sharedLimitOne", { 185 + rateLimit: { 186 + name: "shared-limit", 187 + calcPoints: (ctx) => 188 + (ctx as xrpcServer.HandlerContext).params.points as number, 189 + }, 190 + handler: (ctx: xrpcServer.HandlerContext) => ({ 191 + encoding: "application/json", 192 + body: ctx.params, 193 + }), 194 + }); 195 + 196 + server.method("io.example.sharedLimitTwo", { 197 + rateLimit: { 198 + name: "shared-limit", 199 + calcPoints: (ctx) => 200 + (ctx as xrpcServer.HandlerContext).params.points as number, 201 + }, 202 + handler: (ctx: xrpcServer.HandlerContext) => ({ 203 + encoding: "application/json", 204 + body: ctx.params, 205 + }), 206 + }); 207 + 208 + server.method("io.example.toggleLimit", { 209 + rateLimit: [ 210 + { 211 + durationMs: 5 * MINUTE, 212 + points: 5, 213 + calcPoints: ( 214 + ctx, 215 + ) => ((ctx as xrpcServer.HandlerContext).params.shouldCount ? 1 : 0), 216 + }, 217 + { 218 + durationMs: 5 * MINUTE, 219 + points: 10, 220 + }, 221 + ], 222 + handler: (ctx: xrpcServer.HandlerContext) => ({ 223 + encoding: "application/json", 224 + body: ctx.params, 225 + }), 226 + }); 227 + 228 + server.method("io.example.noLimit", { 229 + handler: () => ({ 230 + encoding: "application/json", 231 + body: {}, 232 + }), 233 + }); 234 + 235 + // Create server and client 236 + const s = await createServer(server); 237 + const port = (s as Deno.HttpServer & { port: number }).port; 238 + const client = new XrpcClient(`http://localhost:${port}`, LEXICONS); 239 + 240 + try { 241 + Deno.test("rate limits a given route", async () => { 242 + const makeCall = () => 243 + client.call("io.example.routeLimit", { str: "test" }); 244 + for (let i = 0; i < 5; i++) { 245 + await makeCall(); 246 + } 247 + await assertRejects( 248 + () => makeCall(), 249 + Error, 250 + "Rate Limit Exceeded", 251 + ); 252 + }); 253 + 254 + Deno.test("can reset route rate limits", async () => { 255 + // Limit is 2. 256 + // Call 0 is OK (1/2). 257 + // Call 1 is OK (2/2), and resets the limit. 258 + // Call 2 is OK (1/2). 259 + // Call 3 is OK (2/2). 260 + for (let i = 0; i < 4; i++) { 261 + await client.call("io.example.routeLimitReset", { count: i }); 262 + } 263 + 264 + // Call 4 exceeds the limit (3/2). 265 + await assertRejects( 266 + () => client.call("io.example.routeLimitReset", { count: 4 }), 267 + Error, 268 + "Rate Limit Exceeded", 269 + ); 270 + }); 271 + 272 + Deno.test("rate limits on a shared route", async () => { 273 + await client.call("io.example.sharedLimitOne", { points: 1 }); 274 + await client.call("io.example.sharedLimitTwo", { points: 1 }); 275 + await client.call("io.example.sharedLimitOne", { points: 2 }); 276 + await client.call("io.example.sharedLimitTwo", { points: 2 }); 277 + await assertRejects( 278 + () => client.call("io.example.sharedLimitOne", { points: 1 }), 279 + Error, 280 + "Rate Limit Exceeded", 281 + ); 282 + await assertRejects( 283 + () => client.call("io.example.sharedLimitTwo", { points: 1 }), 284 + Error, 285 + "Rate Limit Exceeded", 286 + ); 287 + }); 288 + 289 + Deno.test("applies multiple rate-limits", async () => { 290 + const makeCall = (shouldCount: boolean) => 291 + client.call("io.example.toggleLimit", { shouldCount }); 292 + for (let i = 0; i < 5; i++) { 293 + await makeCall(true); 294 + } 295 + await assertRejects( 296 + () => makeCall(true), 297 + Error, 298 + "Rate Limit Exceeded", 299 + ); 300 + for (let i = 0; i < 4; i++) { 301 + await makeCall(false); 302 + } 303 + await assertRejects( 304 + () => makeCall(false), 305 + Error, 306 + "Rate Limit Exceeded", 307 + ); 308 + }); 309 + 310 + Deno.test("applies global limits", async () => { 311 + const makeCall = () => client.call("io.example.noLimit"); 312 + const calls: Promise<unknown>[] = []; 313 + for (let i = 0; i < 110; i++) { 314 + calls.push(makeCall()); 315 + } 316 + await assertRejects( 317 + () => Promise.all(calls), 318 + Error, 319 + "Rate Limit Exceeded", 320 + ); 321 + }); 322 + 323 + Deno.test("applies global limits to xrpc catchall", async () => { 324 + const makeCall = () => client.call("io.example.nonExistent"); 325 + await assertRejects( 326 + () => makeCall(), 327 + Error, 328 + "Rate Limit Exceeded", 329 + ); 330 + }); 331 + 332 + Deno.test("can bypass rate limits", async () => { 333 + const makeCall = () => 334 + client.call( 335 + "io.example.noLimit", 336 + {}, 337 + {}, 338 + { headers: { "X-RateLimit-Bypass": "bypass" } }, 339 + ); 340 + const calls: Promise<unknown>[] = []; 341 + for (let i = 0; i < 110; i++) { 342 + calls.push(makeCall()); 343 + } 344 + await Promise.all(calls); 345 + }); 346 + } finally { 347 + // Cleanup 348 + await closeServer(s); 349 + } 350 + }, 351 + });
+87
xrpc-server/tests/responses_test.ts
··· 1 + import { byteIterableToStream } from "@atproto/common"; 2 + import type { LexiconDoc } from "@atproto/lexicon"; 3 + import { XrpcClient } from "@atproto/xrpc"; 4 + import * as xrpcServer from "../mod.ts"; 5 + import { closeServer, createServer } from "./_util.ts"; 6 + import { assertEquals, assertInstanceOf } from "@std/assert"; 7 + 8 + const LEXICONS: LexiconDoc[] = [ 9 + { 10 + lexicon: 1, 11 + id: "io.example.readableStream", 12 + defs: { 13 + main: { 14 + type: "query", 15 + parameters: { 16 + type: "params", 17 + properties: { 18 + shouldErr: { type: "boolean" }, 19 + }, 20 + }, 21 + output: { 22 + encoding: "application/vnd.ipld.car", 23 + }, 24 + }, 25 + }, 26 + }, 27 + ]; 28 + 29 + Deno.test({ 30 + name: "Responses", 31 + async fn() { 32 + // Setup 33 + const server = xrpcServer.createServer(LEXICONS); 34 + server.method( 35 + "io.example.readableStream", 36 + (ctx: { params: xrpcServer.Params }) => { 37 + async function* iter(): AsyncIterable<Uint8Array> { 38 + for (let i = 0; i < 5; i++) { 39 + yield new Uint8Array([i]); 40 + } 41 + if (ctx.params.shouldErr) { 42 + throw new Error("error"); 43 + } 44 + } 45 + return { 46 + encoding: "application/vnd.ipld.car", 47 + body: byteIterableToStream(iter()), 48 + }; 49 + }, 50 + ); 51 + 52 + // Create server and client 53 + const s = await createServer(server); 54 + const port = (s as Deno.HttpServer & { port: number }).port; 55 + const client = new XrpcClient(`http://localhost:${port}`, LEXICONS); 56 + 57 + try { 58 + Deno.test("returns readable streams of bytes", async () => { 59 + const res = await client.call("io.example.readableStream", { 60 + shouldErr: false, 61 + }); 62 + const expected = new Uint8Array([0, 1, 2, 3, 4]); 63 + assertEquals(res.data, expected); 64 + }); 65 + 66 + Deno.test("handles errs on readable streams of bytes", async () => { 67 + const originalConsoleError = console.error; 68 + console.error = () => {}; // Suppress expected error log 69 + 70 + let err: unknown; 71 + try { 72 + await client.call("io.example.readableStream", { 73 + shouldErr: true, 74 + }); 75 + } catch (e) { 76 + err = e; 77 + } 78 + assertInstanceOf(err, Error); 79 + 80 + console.error = originalConsoleError; // Restore 81 + }); 82 + } finally { 83 + // Cleanup 84 + await closeServer(s); 85 + } 86 + }, 87 + });
+175
xrpc-server/tests/stream_test.ts
··· 1 + import { XRPCError } from "@atproto/xrpc"; 2 + import { 3 + byFrame, 4 + byMessage, 5 + ErrorFrame, 6 + type Frame, 7 + MessageFrame, 8 + XrpcStreamServer, 9 + } from "../mod.ts"; 10 + import { assertEquals, assertInstanceOf } from "@std/assert"; 11 + 12 + const wait = (ms: number) => new Promise((res) => setTimeout(res, ms)); 13 + 14 + // Helper to create a test server 15 + function createTestServer( 16 + handlerFn: () => AsyncGenerator<Frame, void, unknown>, 17 + ) { 18 + const server = new XrpcStreamServer({ 19 + noServer: true, 20 + handler: handlerFn, 21 + }); 22 + 23 + const httpServer = Deno.serve({ port: 0 }, (req) => { 24 + if (req.headers.get("upgrade")?.toLowerCase() === "websocket") { 25 + const { socket, response } = Deno.upgradeWebSocket(req); 26 + server.wss.emit("connection", socket, req); 27 + return response; 28 + } 29 + return new Response("Not Found", { status: 404 }); 30 + }); 31 + 32 + const addr = httpServer.addr as Deno.NetAddr; 33 + return { 34 + server, 35 + url: `ws://localhost:${addr.port}`, 36 + close: () => { 37 + server.wss.close(); 38 + httpServer.unref(); 39 + }, 40 + }; 41 + } 42 + 43 + Deno.test({ 44 + name: "Stream Tests", 45 + fn() { 46 + Deno.test("streams message and info frames", async () => { 47 + const { url, close } = createTestServer(async function* () { 48 + await wait(1); 49 + yield new MessageFrame(1); 50 + await wait(1); 51 + yield new MessageFrame(2); 52 + await wait(1); 53 + yield new MessageFrame(3); 54 + return; 55 + }); 56 + 57 + const ws = new WebSocket(url); 58 + const frames: Frame[] = []; 59 + for await (const frame of byFrame(ws)) { 60 + frames.push(frame); 61 + } 62 + 63 + assertEquals(frames, [ 64 + new MessageFrame(1), 65 + new MessageFrame(2), 66 + new MessageFrame(3), 67 + ]); 68 + 69 + close(); 70 + }); 71 + 72 + Deno.test("kills handler and closes on error frame", async () => { 73 + let proceededAfterError = false; 74 + const { url, close } = createTestServer(async function* () { 75 + await wait(1); 76 + yield new MessageFrame(1); 77 + await wait(1); 78 + yield new MessageFrame(2); 79 + await wait(1); 80 + yield new ErrorFrame({ error: "BadOops" }); 81 + proceededAfterError = true; 82 + await wait(1); 83 + yield new MessageFrame(3); 84 + return; 85 + }); 86 + 87 + const ws = new WebSocket(url); 88 + const frames: Frame[] = []; 89 + for await (const frame of byFrame(ws)) { 90 + frames.push(frame); 91 + } 92 + 93 + await wait(5); // Ensure handler hasn't kept running 94 + assertEquals(proceededAfterError, false); 95 + 96 + assertEquals(frames, [ 97 + new MessageFrame(1), 98 + new MessageFrame(2), 99 + new ErrorFrame({ error: "BadOops" }), 100 + ]); 101 + 102 + close(); 103 + }); 104 + 105 + Deno.test("kills handler and closes client disconnect", async () => { 106 + let i = 1; 107 + const { url, close } = createTestServer(async function* () { 108 + while (true) { 109 + await wait(0); 110 + yield new MessageFrame(i++); 111 + } 112 + }); 113 + 114 + const ws = new WebSocket(url); 115 + const frames: Frame[] = []; 116 + for await (const frame of byFrame(ws)) { 117 + frames.push(frame); 118 + if (frame.body === 3) ws.close(); 119 + } 120 + 121 + // Grace period to let close take place on the server 122 + await wait(5); 123 + // Ensure handler hasn't kept running 124 + const currentCount = i; 125 + await wait(5); 126 + assertEquals(i, currentCount); 127 + 128 + close(); 129 + }); 130 + 131 + Deno.test("byMessage() tests", async (t) => { 132 + await t.step( 133 + "kills handler and closes client disconnect on error frame", 134 + async () => { 135 + const { url, close } = createTestServer(async function* () { 136 + await wait(1); 137 + yield new MessageFrame(1); 138 + await wait(1); 139 + yield new MessageFrame(2); 140 + await wait(1); 141 + yield new ErrorFrame({ 142 + error: "BadOops", 143 + message: "That was a bad one", 144 + }); 145 + await wait(1); 146 + yield new MessageFrame(3); 147 + return; 148 + }); 149 + 150 + const ws = new WebSocket(url); 151 + const frames: Frame[] = []; 152 + 153 + let error: unknown; 154 + try { 155 + for await (const frame of byMessage(ws)) { 156 + frames.push(frame); 157 + } 158 + } catch (err) { 159 + error = err; 160 + } 161 + 162 + assertEquals(ws.readyState, WebSocket.CLOSING); 163 + assertEquals(frames, [new MessageFrame(1), new MessageFrame(2)]); 164 + assertInstanceOf(error, XRPCError); 165 + if (error instanceof XRPCError) { 166 + assertEquals(error.error, "BadOops"); 167 + assertEquals(error.message, "That was a bad one"); 168 + } 169 + 170 + close(); 171 + }, 172 + ); 173 + }); 174 + }, 175 + });
+423
xrpc-server/tests/subscriptions_test.ts
··· 1 + import { WebSocket, type WebSocketServer } from "ws"; 2 + import { wait } from "@atproto/common"; 3 + import type { LexiconDoc } from "@atproto/lexicon"; 4 + import { 5 + byFrame, 6 + ErrorFrame, 7 + type Frame, 8 + MessageFrame, 9 + Subscription, 10 + } from "../mod.ts"; 11 + import * as xrpcServer from "../mod.ts"; 12 + import { 13 + basicAuthHeaders, 14 + closeServer, 15 + createServer, 16 + createStreamBasicAuth, 17 + } from "./_util.ts"; 18 + import { assertEquals, assertGreater, assertRejects } from "@std/assert"; 19 + 20 + const LEXICONS: LexiconDoc[] = [ 21 + { 22 + lexicon: 1, 23 + id: "io.example.streamOne", 24 + defs: { 25 + main: { 26 + type: "subscription", 27 + parameters: { 28 + type: "params", 29 + required: ["countdown"], 30 + properties: { 31 + countdown: { type: "integer" }, 32 + }, 33 + }, 34 + message: { 35 + schema: { 36 + type: "object", 37 + required: ["count"], 38 + properties: { count: { type: "integer" } }, 39 + }, 40 + }, 41 + }, 42 + }, 43 + }, 44 + { 45 + lexicon: 1, 46 + id: "io.example.streamTwo", 47 + defs: { 48 + main: { 49 + type: "subscription", 50 + parameters: { 51 + type: "params", 52 + required: ["countdown"], 53 + properties: { 54 + countdown: { type: "integer" }, 55 + }, 56 + }, 57 + message: { 58 + schema: { 59 + type: "union", 60 + refs: ["#even", "#odd"], 61 + }, 62 + }, 63 + }, 64 + even: { 65 + type: "object", 66 + required: ["count"], 67 + properties: { count: { type: "integer" } }, 68 + }, 69 + odd: { 70 + type: "object", 71 + required: ["count"], 72 + properties: { count: { type: "integer" } }, 73 + }, 74 + }, 75 + }, 76 + { 77 + lexicon: 1, 78 + id: "io.example.streamAuth", 79 + defs: { 80 + main: { 81 + type: "subscription", 82 + }, 83 + }, 84 + }, 85 + ]; 86 + 87 + Deno.test({ 88 + name: "Subscriptions", 89 + async fn() { 90 + let s: Deno.HttpServer; 91 + const server = xrpcServer.createServer(LEXICONS); 92 + const lex = server.lex; 93 + 94 + server.streamMethod( 95 + "io.example.streamOne", 96 + async function* ({ params }: { params: xrpcServer.Params }) { 97 + const countdown = Number(params.countdown ?? 0); 98 + for (let i = countdown; i >= 0; i--) { 99 + await wait(0); 100 + yield { count: i }; 101 + } 102 + }, 103 + ); 104 + 105 + server.streamMethod( 106 + "io.example.streamTwo", 107 + async function* ({ params }: { params: xrpcServer.Params }) { 108 + const countdown = Number(params.countdown ?? 0); 109 + for (let i = countdown; i >= 0; i--) { 110 + await wait(200); 111 + yield { 112 + $type: i % 2 === 0 ? "#even" : "io.example.streamTwo#odd", 113 + count: i, 114 + }; 115 + } 116 + yield { 117 + $type: "io.example.otherNsid#done", 118 + }; 119 + }, 120 + ); 121 + 122 + server.streamMethod("io.example.streamAuth", { 123 + auth: createStreamBasicAuth({ username: "admin", password: "password" }), 124 + handler: async function* ({ auth }: { auth: unknown }) { 125 + yield auth; 126 + }, 127 + }); 128 + 129 + let addr: Deno.Addr; 130 + 131 + // Setup server before tests 132 + s = await createServer(server); 133 + addr = (s as Deno.HttpServer).addr; 134 + 135 + try { 136 + Deno.test("streams messages", async () => { 137 + const ws = new WebSocket( 138 + `ws://${addr}/xrpc/io.example.streamOne?countdown=5`, 139 + ); 140 + 141 + const frames: Frame[] = []; 142 + for await (const frame of byFrame(ws)) { 143 + frames.push(frame); 144 + } 145 + 146 + assertEquals(frames, [ 147 + new MessageFrame({ count: 5 }), 148 + new MessageFrame({ count: 4 }), 149 + new MessageFrame({ count: 3 }), 150 + new MessageFrame({ count: 2 }), 151 + new MessageFrame({ count: 1 }), 152 + new MessageFrame({ count: 0 }), 153 + ]); 154 + }); 155 + 156 + Deno.test("streams messages in a union", async () => { 157 + const ws = new WebSocket( 158 + `ws://${addr}/xrpc/io.example.streamTwo?countdown=5`, 159 + ); 160 + 161 + const frames: Frame[] = []; 162 + for await (const frame of byFrame(ws)) { 163 + frames.push(frame); 164 + } 165 + 166 + assertEquals(frames, [ 167 + new MessageFrame({ count: 5 }, { type: "#odd" }), 168 + new MessageFrame({ count: 4 }, { type: "#even" }), 169 + new MessageFrame({ count: 3 }, { type: "#odd" }), 170 + new MessageFrame({ count: 2 }, { type: "#even" }), 171 + new MessageFrame({ count: 1 }, { type: "#odd" }), 172 + new MessageFrame({ count: 0 }, { type: "#even" }), 173 + new MessageFrame({}, { type: "io.example.otherNsid#done" }), 174 + ]); 175 + }); 176 + 177 + Deno.test("resolves auth into handler", async () => { 178 + const ws = new WebSocket( 179 + `ws://${addr}/xrpc/io.example.streamAuth`, 180 + { 181 + headers: basicAuthHeaders({ 182 + username: "admin", 183 + password: "password", 184 + }), 185 + }, 186 + ); 187 + 188 + const frames: Frame[] = []; 189 + for await (const frame of byFrame(ws)) { 190 + frames.push(frame); 191 + } 192 + 193 + assertEquals(frames, [ 194 + new MessageFrame({ 195 + credentials: { 196 + username: "admin", 197 + }, 198 + artifacts: { 199 + original: "YWRtaW46cGFzc3dvcmQ=", 200 + }, 201 + }), 202 + ]); 203 + }); 204 + 205 + Deno.test("errors immediately on bad parameter", async () => { 206 + const ws = new WebSocket( 207 + `ws://${addr}/xrpc/io.example.streamOne`, 208 + ); 209 + 210 + const frames: Frame[] = []; 211 + for await (const frame of byFrame(ws)) { 212 + frames.push(frame); 213 + } 214 + 215 + assertEquals(frames, [ 216 + new ErrorFrame({ 217 + error: "InvalidRequest", 218 + message: 'Error: Params must have the property "countdown"', 219 + }), 220 + ]); 221 + }); 222 + 223 + Deno.test("errors immediately on bad auth", async () => { 224 + const ws = new WebSocket( 225 + `ws://${addr}/xrpc/io.example.streamAuth`, 226 + { 227 + headers: basicAuthHeaders({ 228 + username: "bad", 229 + password: "wrong", 230 + }), 231 + }, 232 + ); 233 + 234 + const frames: Frame[] = []; 235 + for await (const frame of byFrame(ws)) { 236 + frames.push(frame); 237 + } 238 + 239 + assertEquals(frames, [ 240 + new ErrorFrame({ 241 + error: "AuthenticationRequired", 242 + message: "Authentication Required", 243 + }), 244 + ]); 245 + }); 246 + 247 + Deno.test("does not websocket upgrade at bad endpoint", async () => { 248 + const ws = new WebSocket(`ws://${addr}/xrpc/does.not.exist`); 249 + await assertRejects( 250 + () => 251 + new Promise((_, reject) => { 252 + ws.onerror = () => reject(new Error("ECONNRESET")); 253 + }), 254 + Error, 255 + "ECONNRESET", 256 + ); 257 + }); 258 + 259 + Deno.test("subscription consumer tests", async (t) => { 260 + await t.step("receives messages w/ skips", async () => { 261 + const sub = new Subscription({ 262 + service: `ws://${addr}`, 263 + method: "io.example.streamOne", 264 + getParams: () => ({ countdown: 5 }), 265 + validate: (obj: unknown) => { 266 + const result = lex.assertValidXrpcMessage<{ count: number }>( 267 + "io.example.streamOne", 268 + obj, 269 + ); 270 + if (!result.count || result.count % 2) { 271 + return result; 272 + } 273 + }, 274 + }); 275 + 276 + const messages: { count: number }[] = []; 277 + for await (const msg of sub) { 278 + const typedMsg = msg as { count: number }; 279 + messages.push(typedMsg); 280 + } 281 + 282 + assertEquals(messages, [ 283 + { count: 5 }, 284 + { count: 3 }, 285 + { count: 1 }, 286 + { count: 0 }, 287 + ]); 288 + }); 289 + 290 + await t.step("reconnects w/ param update", async () => { 291 + let countdown = 10; 292 + let reconnects = 0; 293 + const sub = new Subscription({ 294 + service: `ws://${addr}`, 295 + method: "io.example.streamOne", 296 + onReconnectError: () => reconnects++, 297 + getParams: () => ({ countdown }), 298 + validate: (obj: unknown) => { 299 + return lex.assertValidXrpcMessage<{ count: number }>( 300 + "io.example.streamOne", 301 + obj, 302 + ); 303 + }, 304 + }); 305 + 306 + let disconnected = false; 307 + for await (const msg of sub) { 308 + const typedMsg = msg as { count: number }; 309 + assertEquals(typedMsg.count >= countdown - 1, true); // No skips 310 + countdown = Math.min(countdown, typedMsg.count); // Only allow forward movement 311 + if (typedMsg.count <= 6 && !disconnected) { 312 + disconnected = true; 313 + server.subscriptions.forEach( 314 + ({ wss }: { wss: WebSocketServer }) => { 315 + wss.clients.forEach((c: WebSocket) => c.terminate()); 316 + }, 317 + ); 318 + } 319 + } 320 + 321 + assertEquals(countdown, 0); 322 + assertGreater(reconnects, 0); 323 + }); 324 + 325 + await t.step("aborts with signal", async () => { 326 + const abortController = new AbortController(); 327 + const sub = new Subscription({ 328 + service: `ws://${addr}`, 329 + method: "io.example.streamOne", 330 + signal: abortController.signal, 331 + getParams: () => ({ countdown: 10 }), 332 + validate: (obj: unknown) => { 333 + const result = lex.assertValidXrpcMessage<{ count: number }>( 334 + "io.example.streamOne", 335 + obj, 336 + ); 337 + return result; 338 + }, 339 + }); 340 + 341 + let error: unknown; 342 + let disconnected = false; 343 + const messages: { count: number }[] = []; 344 + try { 345 + for await (const msg of sub) { 346 + const typedMsg = msg as { count: number }; 347 + messages.push(typedMsg); 348 + if (typedMsg.count <= 6 && !disconnected) { 349 + disconnected = true; 350 + abortController.abort(new Error("Oops!")); 351 + } 352 + } 353 + } catch (err) { 354 + error = err; 355 + } 356 + 357 + assertEquals(error, new Error("Oops!")); 358 + assertEquals(messages, [ 359 + { count: 10 }, 360 + { count: 9 }, 361 + { count: 8 }, 362 + { count: 7 }, 363 + { count: 6 }, 364 + ]); 365 + }); 366 + }); 367 + 368 + Deno.test("closing websocket server while client connected", async (t) => { 369 + // First close the current server 370 + if (s) { 371 + await closeServer(s); 372 + } 373 + 374 + await t.step( 375 + "uses heartbeat to reconnect if connection dropped", 376 + async () => { 377 + // Run a server that pauses longer than heartbeat interval on first connection 378 + const localPort = 6003; 379 + const server = Deno.serve( 380 + { port: localPort }, 381 + () => new Response(), 382 + ); 383 + const firstWasClosed = false; 384 + const firstSocketClosed = new Promise<void>((resolve) => { 385 + // TODO: Implement WebSocket server handling in Deno 386 + resolve(); 387 + }); 388 + 389 + const subscription = new Subscription({ 390 + service: `ws://localhost:${localPort}`, 391 + method: "", 392 + heartbeatIntervalMs: 500, 393 + validate: (obj: unknown) => { 394 + return lex.assertValidXrpcMessage<{ count: number }>( 395 + "io.example.streamOne", 396 + obj, 397 + ); 398 + }, 399 + }); 400 + 401 + const messages: { count: number }[] = []; 402 + for await (const msg of subscription) { 403 + const typedMsg = msg as { count: number }; 404 + messages.push(typedMsg); 405 + } 406 + 407 + await firstSocketClosed; 408 + assertEquals(messages, [{ count: 1 }]); 409 + assertEquals(firstWasClosed, true); 410 + await server.shutdown(); 411 + }, 412 + ); 413 + 414 + // Restart the server for other tests 415 + s = await createServer(server); 416 + addr = (s as Deno.HttpServer).addr; 417 + }); 418 + } finally { 419 + // Cleanup 420 + if (s) await closeServer(s); 421 + } 422 + }, 423 + });
+478
xrpc-server/types.ts
··· 1 + import { z } from "zod"; 2 + import type { ErrorResult, XRPCError } from "./errors.ts"; 3 + import type { CalcKeyFn, CalcPointsFn } from "./rate-limiter.ts"; 4 + import type { RateLimiterI } from "./rate-limiter.ts"; 5 + 6 + /** 7 + * Represents a value that can be either synchronous or asynchronous. 8 + * @template T - The type of the value 9 + */ 10 + export type Awaitable<T> = T | Promise<T>; 11 + 12 + /** 13 + * Handler function for catching all unmatched routes. 14 + * @param req - The HTTP request object 15 + * @returns A promise that resolves to a Response 16 + */ 17 + export type CatchallHandler = ( 18 + req: Request, 19 + res: Response, 20 + ) => Promise<Response>; 21 + 22 + /** 23 + * Configuration options for the XRPC server. 24 + */ 25 + export type Options = { 26 + /** Whether to validate response schemas */ 27 + validateResponse?: boolean; 28 + /** Handler for catching all unmatched routes */ 29 + catchall?: CatchallHandler; 30 + /** Payload size limits for different content types */ 31 + payload?: RouteOptions; 32 + /** Rate limiting configuration */ 33 + rateLimits?: { 34 + /** Factory function for creating rate limiters */ 35 + creator: RateLimiterCreator<HandlerContext>; 36 + /** Global rate limits applied to all routes */ 37 + global?: ServerRateLimitDescription<HandlerContext>[]; 38 + /** Shared rate limits that can be referenced by name */ 39 + shared?: ServerRateLimitDescription<HandlerContext>[]; 40 + /** Function to determine if rate limits should be bypassed for a request */ 41 + bypass?: (ctx: HandlerContext) => boolean; 42 + }; 43 + /** 44 + * By default, errors are converted to {@link XRPCError} using 45 + * {@link XRPCError.fromError} before being rendered. If method handlers throw 46 + * error objects that are not properly rendered in the HTTP response, this 47 + * function can be used to properly convert them to {@link XRPCError}. The 48 + * provided function will typically fallback to the default error conversion 49 + * (`return XRPCError.fromError(err)`) if the error is not recognized. 50 + * 51 + * @note This function should not throw errors. 52 + */ 53 + errorParser?: (err: unknown) => XRPCError; 54 + }; 55 + 56 + /** 57 + * Basic primitive types supported in XRPC parameters. 58 + */ 59 + export type Primitive = string | number | boolean; 60 + 61 + /** 62 + * Type-safe parameter object with optional primitive values or arrays. 63 + */ 64 + export type Params = { [P in string]?: undefined | Primitive | Primitive[] }; 65 + 66 + /** 67 + * Input data for XRPC method handlers. 68 + */ 69 + export type HandlerInput = { 70 + /** Content encoding of the request body */ 71 + encoding: string; 72 + /** Parsed request body */ 73 + body: unknown; 74 + }; 75 + 76 + /** 77 + * Result of successful authentication. 78 + */ 79 + export type AuthResult = { 80 + /** Authentication credentials (e.g., user info, tokens) */ 81 + credentials: unknown; 82 + /** Optional authentication artifacts (e.g., session data) */ 83 + artifacts?: unknown; 84 + }; 85 + 86 + export const headersSchema: z.ZodRecord<z.ZodString, z.ZodString> = z.record( 87 + z.string(), 88 + z.string(), 89 + ); 90 + 91 + /** 92 + * HTTP headers as a record of string key-value pairs. 93 + */ 94 + export type Headers = z.infer<typeof headersSchema>; 95 + 96 + export const handlerSuccess: z.ZodObject<{ 97 + encoding: z.ZodString; 98 + body: z.ZodAny; 99 + headers: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodString>>; 100 + }> = z.object({ 101 + encoding: z.string(), 102 + body: z.any(), 103 + headers: headersSchema.optional(), 104 + }); 105 + 106 + /** 107 + * Successful response from a method handler. 108 + */ 109 + export type HandlerSuccess = z.infer<typeof handlerSuccess>; 110 + 111 + /** 112 + * Handler response that pipes through a buffer. 113 + */ 114 + export type HandlerPipeThroughBuffer = { 115 + /** Content encoding of the response */ 116 + encoding: string; 117 + /** Response data as a buffer */ 118 + buffer: Uint8Array; 119 + /** Optional HTTP headers */ 120 + headers?: Headers; 121 + }; 122 + 123 + /** 124 + * Handler response that pipes through a stream. 125 + */ 126 + export type HandlerPipeThroughStream = { 127 + /** Content encoding of the response */ 128 + encoding: string; 129 + /** Response data as a readable stream */ 130 + stream: ReadableStream<Uint8Array>; 131 + /** Optional HTTP headers */ 132 + headers?: Headers; 133 + }; 134 + 135 + /** 136 + * Union type for handler responses that pipe data through either a buffer or stream. 137 + */ 138 + export type HandlerPipeThrough = 139 + | HandlerPipeThroughBuffer 140 + | HandlerPipeThroughStream; 141 + 142 + /** 143 + * Authentication state for a handler context. 144 + */ 145 + export type Auth = void | AuthResult; 146 + 147 + /** 148 + * Input data for a handler context. 149 + */ 150 + export type Input = void | HandlerInput; 151 + 152 + /** 153 + * Output data from a handler. 154 + */ 155 + export type Output = void | HandlerSuccess | ErrorResult; 156 + 157 + /** 158 + * Function that verifies authentication for a request. 159 + * @template C - The context type 160 + * @template A - The authentication result type 161 + */ 162 + export type AuthVerifier<C, A extends AuthResult = AuthResult> = 163 + | ((ctx: C) => Awaitable<A | ErrorResult>) 164 + | ((ctx: C) => Awaitable<A>); 165 + 166 + /** 167 + * Context object provided to XRPC method handlers containing request data and utilities. 168 + * @template A - Authentication type 169 + * @template P - Parameters type 170 + * @template I - Input type 171 + */ 172 + export type HandlerContext< 173 + A extends Auth = Auth, 174 + P extends Params = Params, 175 + I extends Input = Input, 176 + > = MethodAuthContext<P> & { 177 + /** Authentication result */ 178 + auth: A; 179 + /** Request input data */ 180 + input: I; 181 + /** Function to reset rate limits for this route */ 182 + resetRouteRateLimits: () => Promise<void>; 183 + }; 184 + 185 + /** 186 + * Handler function for XRPC methods. 187 + * @template A - Authentication type 188 + * @template P - Parameters type 189 + * @template I - Input type 190 + * @template O - Output type 191 + */ 192 + export type MethodHandler< 193 + A extends Auth = Auth, 194 + P extends Params = Params, 195 + I extends Input = Input, 196 + O extends Output = Output, 197 + > = (ctx: HandlerContext<A, P, I>) => Awaitable<O | HandlerPipeThrough>; 198 + 199 + /** 200 + * Factory function for creating rate limiter instances. 201 + * @template T - The handler context type 202 + */ 203 + export type RateLimiterCreator<T extends HandlerContext = HandlerContext> = < 204 + C extends T = T, 205 + >(opts: { 206 + /** Prefix for rate limiter keys */ 207 + keyPrefix: string; 208 + /** Duration window in milliseconds */ 209 + durationMs: number; 210 + /** Number of points allowed in the duration window */ 211 + points: number; 212 + /** Function to calculate the rate limit key */ 213 + calcKey: CalcKeyFn<C>; 214 + /** Function to calculate points consumed */ 215 + calcPoints: CalcPointsFn<C>; 216 + /** Whether to fail closed (deny) when rate limiter is unavailable */ 217 + failClosed?: boolean; 218 + }) => RateLimiterI<C>; 219 + 220 + /** 221 + * Context object for method authentication containing request data. 222 + * @template P - Parameters type 223 + * @template I - Input type 224 + */ 225 + export type MethodAuthContext< 226 + P extends Params = Params, 227 + I extends Input = Input, 228 + > = { 229 + /** Parsed request parameters */ 230 + params: P; 231 + /** Request input data */ 232 + input: I; 233 + /** HTTP request object */ 234 + req: Request; 235 + /** HTTP response object */ 236 + res: Response; 237 + }; 238 + 239 + /** 240 + * Authentication verifier function for XRPC methods. 241 + * @template A - Authentication result type 242 + * @template P - Parameters type 243 + * @template I - Input type 244 + */ 245 + export type MethodAuthVerifier< 246 + A extends AuthResult = AuthResult, 247 + P extends Params = Params, 248 + I extends Input = Input, 249 + > = (ctx: MethodAuthContext<P, I>) => Awaitable<A>; 250 + 251 + /** 252 + * Context object for streaming handlers. 253 + * @template A - Authentication type 254 + * @template P - Parameters type 255 + */ 256 + export type StreamContext< 257 + A extends Auth = Auth, 258 + P extends Params = Params, 259 + > = StreamAuthContext<P> & { 260 + /** Authentication result */ 261 + auth: A; 262 + /** Abort signal for cancelling the stream */ 263 + signal: AbortSignal; 264 + }; 265 + 266 + /** 267 + * Handler function for streaming XRPC endpoints. 268 + * @template A - Authentication type 269 + * @template P - Parameters type 270 + * @template O - Output item type 271 + */ 272 + export type StreamHandler< 273 + A extends Auth = Auth, 274 + P extends Params = Params, 275 + O = unknown, 276 + > = (ctx: StreamContext<A, P>) => AsyncIterable<O>; 277 + 278 + /** 279 + * Context object for stream authentication. 280 + * @template P - Parameters type 281 + */ 282 + export type StreamAuthContext<P extends Params = Params> = { 283 + /** Parsed request parameters */ 284 + params: P; 285 + /** HTTP request object */ 286 + req: Request; 287 + }; 288 + 289 + /** 290 + * Authentication verifier function for streaming endpoints. 291 + * @template A - Authentication result type 292 + * @template P - Parameters type 293 + */ 294 + export type StreamAuthVerifier< 295 + A extends AuthResult = AuthResult, 296 + P extends Params = Params, 297 + > = AuthVerifier<StreamAuthContext<P>, A>; 298 + 299 + /** 300 + * Configuration for server-level rate limits. 301 + * @template C - Handler context type 302 + */ 303 + export type ServerRateLimitDescription< 304 + C extends HandlerContext = HandlerContext, 305 + > = { 306 + /** Unique name for this rate limit */ 307 + name: string; 308 + /** Duration window in milliseconds */ 309 + durationMs: number; 310 + /** Number of points allowed in the duration window */ 311 + points: number; 312 + /** Optional function to calculate the rate limit key */ 313 + calcKey?: CalcKeyFn<C>; 314 + /** Optional function to calculate points consumed */ 315 + calcPoints?: CalcPointsFn<C>; 316 + /** Whether to fail closed when rate limiter is unavailable */ 317 + failClosed?: boolean; 318 + }; 319 + 320 + /** 321 + * Options for referencing a shared rate limit by name. 322 + * @template C - Handler context type 323 + */ 324 + export type SharedRateLimitOpts<C extends HandlerContext = HandlerContext> = { 325 + /** Name of the shared rate limit to use */ 326 + name: string; 327 + /** Optional function to calculate the rate limit key */ 328 + calcKey?: CalcKeyFn<C>; 329 + /** Optional function to calculate points consumed */ 330 + calcPoints?: CalcPointsFn<C>; 331 + }; 332 + 333 + /** 334 + * Options for defining a route-specific rate limit. 335 + * @template C - Handler context type 336 + */ 337 + export type RouteRateLimitOpts<C extends HandlerContext = HandlerContext> = { 338 + /** Duration window in milliseconds */ 339 + durationMs: number; 340 + /** Number of points allowed in the duration window */ 341 + points: number; 342 + /** Optional function to calculate the rate limit key */ 343 + calcKey?: CalcKeyFn<C>; 344 + /** Optional function to calculate points consumed */ 345 + calcPoints?: CalcPointsFn<C>; 346 + }; 347 + 348 + /** 349 + * Union type for rate limit options - either shared or route-specific. 350 + * @template C - Handler context type 351 + */ 352 + export type RateLimitOpts<C extends HandlerContext = HandlerContext> = 353 + | SharedRateLimitOpts<C> 354 + | RouteRateLimitOpts<C>; 355 + 356 + /** 357 + * Type guard to check if rate limit options are for a shared rate limit. 358 + * @template C - Handler context type 359 + * @param opts Rate limit options to check 360 + * @returns True if the options reference a shared rate limit 361 + */ 362 + export function isSharedRateLimitOpts< 363 + C extends HandlerContext = HandlerContext, 364 + >(opts: RateLimitOpts<C>): opts is SharedRateLimitOpts<C> { 365 + return "name" in opts && typeof opts.name === "string"; 366 + } 367 + 368 + /** 369 + * Options for configuring payload size limits by content type. 370 + */ 371 + export type RouteOptions = { 372 + /** Maximum size for binary/blob payloads in bytes */ 373 + blobLimit?: number; 374 + /** Maximum size for JSON payloads in bytes */ 375 + jsonLimit?: number; 376 + /** Maximum size for text payloads in bytes */ 377 + textLimit?: number; 378 + }; 379 + 380 + /** 381 + * Simplified route options with only blob limit configuration. 382 + */ 383 + export type RouteOpts = { 384 + /** Maximum size for binary/blob payloads in bytes */ 385 + blobLimit?: number; 386 + }; 387 + 388 + /** 389 + * Configuration object for an XRPC method including handler, auth, and options. 390 + * @template A - Authentication type 391 + * @template P - Parameters type 392 + * @template I - Input type 393 + * @template O - Output type 394 + */ 395 + export type MethodConfig< 396 + A extends Auth = Auth, 397 + P extends Params = Params, 398 + I extends Input = Input, 399 + O extends Output = Output, 400 + > = { 401 + /** The method handler function */ 402 + handler: MethodHandler<A, P, I, O>; 403 + /** Optional authentication verifier */ 404 + auth?: MethodAuthVerifier<Extract<A, AuthResult>, P>; 405 + /** Optional route configuration */ 406 + opts?: RouteOptions; 407 + /** Optional rate limiting configuration */ 408 + rateLimit?: 409 + | RateLimitOpts<HandlerContext<A, P, I>> 410 + | RateLimitOpts<HandlerContext<A, P, I>>[]; 411 + }; 412 + 413 + /** 414 + * Union type allowing either a simple handler function or full method configuration. 415 + * @template A - Authentication type 416 + * @template P - Parameters type 417 + * @template I - Input type 418 + * @template O - Output type 419 + */ 420 + export type MethodConfigOrHandler< 421 + A extends Auth = Auth, 422 + P extends Params = Params, 423 + I extends Input = Input, 424 + O extends Output = Output, 425 + > = MethodHandler<A, P, I, O> | MethodConfig<A, P, I, O>; 426 + 427 + /** 428 + * Configuration object for a streaming XRPC endpoint. 429 + * @template A - Authentication type 430 + * @template P - Parameters type 431 + * @template O - Output item type 432 + */ 433 + export type StreamConfig< 434 + A extends Auth = Auth, 435 + P extends Params = Params, 436 + O = unknown, 437 + > = { 438 + /** Optional authentication verifier for the stream */ 439 + auth?: StreamAuthVerifier<Extract<A, AuthResult>, P>; 440 + /** The stream handler function */ 441 + handler: StreamHandler<A, P, O>; 442 + }; 443 + 444 + /** 445 + * Union type allowing either a simple stream handler or full stream configuration. 446 + * @template A - Authentication type 447 + * @template P - Parameters type 448 + * @template O - Output item type 449 + */ 450 + export type StreamConfigOrHandler< 451 + A extends Auth = Auth, 452 + P extends Params = Params, 453 + O = unknown, 454 + > = StreamHandler<A, P, O> | StreamConfig<A, P, O>; 455 + 456 + /** 457 + * Type guard to check if handler output is a pipe-through buffer response. 458 + * @param output - The handler output to check 459 + * @returns True if the output is a buffer pipe-through response 460 + */ 461 + export function isHandlerPipeThroughBuffer( 462 + output: Output | HandlerPipeThrough, 463 + ): output is HandlerPipeThroughBuffer { 464 + // We only need to discriminate between possible Output values 465 + return output != null && "buffer" in output && output["buffer"] !== undefined; 466 + } 467 + 468 + /** 469 + * Type guard to check if handler output is a pipe-through stream response. 470 + * @param output - The handler output to check 471 + * @returns True if the output is a stream pipe-through response 472 + */ 473 + export function isHandlerPipeThroughStream( 474 + output: Output | HandlerPipeThrough, 475 + ): output is HandlerPipeThroughStream { 476 + // We only need to discriminate between possible Output values 477 + return output != null && "stream" in output && output["stream"] !== undefined; 478 + }
+547
xrpc-server/util.ts
··· 1 + import type { 2 + Lexicons, 3 + LexXrpcProcedure, 4 + LexXrpcQuery, 5 + LexXrpcSubscription, 6 + } from "@atproto/lexicon"; 7 + import { jsonToLex } from "@atproto/lexicon"; 8 + import { InternalServerError, InvalidRequestError } from "./errors.ts"; 9 + import { handlerSuccess } from "./types.ts"; 10 + import type { HandlerInput, HandlerSuccess, Params } from "./types.ts"; 11 + import type { Context, HonoRequest } from "hono"; 12 + 13 + function assert(condition: unknown, message?: string): asserts condition { 14 + if (!condition) { 15 + throw new Error(message || "Assertion failed"); 16 + } 17 + } 18 + 19 + /** 20 + * Decodes query parameters from HTTP request into typed parameters. 21 + * Handles type conversion for strings, numbers, booleans, and arrays based on lexicon definitions. 22 + * @param def - The lexicon definition containing parameter schema 23 + * @param params - Raw query parameters from the HTTP request 24 + * @returns Decoded and type-converted parameters 25 + */ 26 + export function decodeQueryParams( 27 + def: LexXrpcProcedure | LexXrpcQuery | LexXrpcSubscription, 28 + params: Record<string, string | string[]>, 29 + ): Params { 30 + const decoded: Params = {}; 31 + if (!def.parameters?.properties) { 32 + return decoded; 33 + } 34 + 35 + for (const k in def.parameters.properties) { 36 + const property = def.parameters.properties[k]; 37 + const val = params[k]; 38 + if (property && val !== undefined) { 39 + if (property.type === "array") { 40 + const vals = (Array.isArray(val) ? val : [val]).filter( 41 + (v) => v !== undefined, 42 + ); 43 + decoded[k] = vals 44 + .map((v) => decodeQueryParam(property.items?.type || "string", v)) 45 + .filter((v) => v !== undefined) as (string | number | boolean)[]; 46 + } else { 47 + const actualVal = Array.isArray(val) ? val[0] : val; 48 + decoded[k] = decodeQueryParam(property.type, actualVal); 49 + } 50 + } 51 + } 52 + return decoded; 53 + } 54 + 55 + /** 56 + * Decodes a single query parameter value based on its expected type. 57 + * Converts string values to appropriate JavaScript types (string, number, boolean). 58 + * @param type - The expected parameter type from the lexicon 59 + * @param value - The raw parameter value from the query string 60 + * @returns The decoded parameter value or undefined if conversion fails 61 + */ 62 + export function decodeQueryParam( 63 + type: string, 64 + value: unknown, 65 + ): string | number | boolean | undefined { 66 + if (!value) { 67 + return undefined; 68 + } 69 + if (type === "string" || type === "datetime") { 70 + return String(value); 71 + } 72 + if (type === "float") { 73 + return Number(String(value)); 74 + } else if (type === "integer") { 75 + return parseInt(String(value), 10) || 0; 76 + } else if (type === "boolean") { 77 + return value === "true"; 78 + } 79 + } 80 + 81 + /** 82 + * Extracts query parameters from a URL and returns them as arrays of strings. 83 + * @param url - The URL to parse (defaults to empty string) 84 + * @returns Object mapping parameter names to arrays of values 85 + */ 86 + export function getQueryParams(url = ""): Record<string, string[]> { 87 + const { searchParams } = new URL(url ?? "", "http://x"); 88 + const result: Record<string, string[]> = {}; 89 + for (const key of searchParams.keys()) { 90 + result[key] = searchParams.getAll(key); 91 + } 92 + return result; 93 + } 94 + 95 + /** 96 + * Represents a request-like object with essential HTTP request properties. 97 + * Used for handling both standard HTTP requests and custom request implementations. 98 + */ 99 + export type RequestLike = { 100 + headers: Headers | { [key: string]: string | string[] | undefined }; 101 + body?: ReadableStream | unknown; 102 + method?: string; 103 + url?: string; 104 + signal?: AbortSignal; 105 + }; 106 + 107 + /** 108 + * Validates the input of an XRPC method against its lexicon definition. 109 + * Performs content-type validation, body presence checks, and schema validation. 110 + * @param nsid - The namespace identifier of the method 111 + * @param def - The lexicon definition for the method 112 + * @param body - The request body content 113 + * @param contentType - The Content-Type header value 114 + * @param lexicons - The lexicon registry for schema validation 115 + * @returns Validated handler input or undefined for methods without input 116 + * @throws {InvalidRequestError} If validation fails 117 + */ 118 + export async function validateInput( 119 + nsid: string, 120 + def: LexXrpcProcedure | LexXrpcQuery, 121 + body: unknown, 122 + contentType: string | undefined | null, 123 + lexicons: Lexicons, 124 + ): Promise<HandlerInput | undefined> { 125 + let processedBody: unknown | Uint8Array = body; 126 + if (body instanceof ReadableStream) { 127 + const reader = body.getReader(); 128 + const chunks: Uint8Array[] = []; 129 + while (true) { 130 + const { done, value } = await reader.read(); 131 + if (done) break; 132 + chunks.push(value); 133 + } 134 + const totalLength = chunks.reduce((acc, chunk) => acc + chunk.length, 0); 135 + const tempBody = new Uint8Array(totalLength); 136 + let offset = 0; 137 + for (const chunk of chunks) { 138 + tempBody.set(chunk, offset); 139 + offset += chunk.length; 140 + } 141 + processedBody = tempBody; 142 + } 143 + 144 + const bodyPresence = getBodyPresence(processedBody, contentType); 145 + if (bodyPresence === "present" && (def.type !== "procedure" || !def.input)) { 146 + throw new InvalidRequestError( 147 + `A request body was provided when none was expected`, 148 + ); 149 + } 150 + if (def.type === "query") { 151 + return; 152 + } 153 + if (bodyPresence === "missing" && def.input) { 154 + throw new InvalidRequestError( 155 + `A request body is expected but none was provided`, 156 + ); 157 + } 158 + 159 + // mimetype 160 + const inputEncoding = normalizeMime(contentType || ""); 161 + if ( 162 + def.input?.encoding && 163 + (!inputEncoding || !isValidEncoding(def.input?.encoding, inputEncoding)) 164 + ) { 165 + if (!inputEncoding) { 166 + throw new InvalidRequestError( 167 + `Request encoding (Content-Type) required but not provided`, 168 + ); 169 + } else { 170 + throw new InvalidRequestError( 171 + `Wrong request encoding (Content-Type): ${inputEncoding}`, 172 + ); 173 + } 174 + } 175 + 176 + if (!inputEncoding) { 177 + // no input body 178 + return undefined; 179 + } 180 + 181 + // if input schema, validate 182 + if (def.input?.schema) { 183 + try { 184 + const lexBody = processedBody ? jsonToLex(processedBody) : processedBody; 185 + processedBody = lexicons.assertValidXrpcInput(nsid, lexBody); 186 + } catch (e) { 187 + throw new InvalidRequestError(e instanceof Error ? e.message : String(e)); 188 + } 189 + } 190 + 191 + return { 192 + encoding: inputEncoding, 193 + body: processedBody, 194 + }; 195 + } 196 + 197 + /** 198 + * Validates the output of an XRPC method against its lexicon definition. 199 + * Performs response body validation, content-type checks, and schema validation. 200 + * @param nsid - The namespace identifier of the method 201 + * @param def - The lexicon definition for the method 202 + * @param output - The handler output to validate 203 + * @param lexicons - The lexicon registry for schema validation 204 + * @throws {InternalServerError} If validation fails 205 + */ 206 + export function validateOutput( 207 + nsid: string, 208 + def: LexXrpcProcedure | LexXrpcQuery, 209 + output: HandlerSuccess | undefined, 210 + lexicons: Lexicons, 211 + ): void { 212 + // initial validation 213 + if (output) { 214 + handlerSuccess.parse(output); 215 + } 216 + 217 + // response expectation 218 + if (output?.body && !def.output) { 219 + throw new InternalServerError( 220 + `A response body was provided when none was expected`, 221 + ); 222 + } 223 + if (!output?.body && def.output) { 224 + throw new InternalServerError( 225 + `A response body is expected but none was provided`, 226 + ); 227 + } 228 + 229 + // mimetype 230 + if ( 231 + def.output?.encoding && 232 + (!output?.encoding || 233 + !isValidEncoding(def.output?.encoding, output?.encoding)) 234 + ) { 235 + throw new InternalServerError( 236 + `Invalid response encoding: ${output?.encoding}`, 237 + ); 238 + } 239 + 240 + // output schema 241 + if (def.output?.schema) { 242 + try { 243 + const result = lexicons.assertValidXrpcOutput(nsid, output?.body); 244 + if (output) { 245 + output.body = result; 246 + } 247 + } catch (e) { 248 + throw new InternalServerError(e instanceof Error ? e.message : String(e)); 249 + } 250 + } 251 + } 252 + 253 + /** 254 + * Normalizes a MIME type by extracting the base type and converting to lowercase. 255 + * Removes parameters (e.g., charset) from the MIME type. 256 + * @param mime - The MIME type string to normalize 257 + * @returns The normalized MIME type (base type only) 258 + */ 259 + export function normalizeMime(mime: string): string { 260 + const [base] = mime.split(";"); 261 + return base.trim().toLowerCase(); 262 + } 263 + 264 + /** 265 + * Checks if an actual encoding matches the expected encoding. 266 + * Supports wildcard matching and JSON aliases. 267 + * @param expected - The expected encoding from the lexicon 268 + * @param actual - The actual encoding from the request 269 + * @returns True if the encodings are compatible 270 + */ 271 + function isValidEncoding(expected: string, actual: string): boolean { 272 + if (expected === "*/*") return true; 273 + if (expected === actual) return true; 274 + if (expected === "application/json" && actual === "json") return true; 275 + return false; 276 + } 277 + 278 + /** 279 + * Determines if a request body is present or missing. 280 + * Considers empty strings and empty arrays as missing when no content type is provided. 281 + * @param body - The request body 282 + * @param contentType - The Content-Type header value 283 + * @returns "present" if body exists, "missing" otherwise 284 + */ 285 + function getBodyPresence( 286 + body: unknown, 287 + contentType: string | undefined | null, 288 + ): "present" | "missing" { 289 + if (body === undefined || body === null) { 290 + return "missing"; 291 + } 292 + if (typeof body === "string" && body.length === 0 && !contentType) { 293 + return "missing"; 294 + } 295 + if (body instanceof Uint8Array && body.length === 0 && !contentType) { 296 + return "missing"; 297 + } 298 + return "present"; 299 + } 300 + 301 + /** 302 + * Formats server timing data into an HTTP Server-Timing header value. 303 + * Creates a header string with timing metrics for performance monitoring. 304 + * @param timings - Array of timing measurements 305 + * @returns Formatted Server-Timing header value 306 + */ 307 + export function serverTimingHeader(timings: ServerTiming[]): string { 308 + return timings 309 + .map((timing) => { 310 + let header = timing.name; 311 + if (timing.duration) header += `;dur=${timing.duration}`; 312 + if (timing.description) header += `;desc="${timing.description}"`; 313 + return header; 314 + }) 315 + .join(", "); 316 + } 317 + 318 + /** 319 + * Utility class for measuring server-side operation timings. 320 + * Provides start/stop functionality and implements the ServerTiming interface. 321 + */ 322 + export class ServerTimer implements ServerTiming { 323 + public duration?: number; 324 + private startMs?: number; 325 + /** 326 + * Creates a new ServerTimer instance. 327 + * @param name Identifier for the timing measurement 328 + * @param description Optional description of what is being timed 329 + */ 330 + constructor( 331 + public name: string, 332 + public description?: string, 333 + ) {} 334 + /** 335 + * Starts the timer by recording the current timestamp. 336 + * @returns This timer instance for method chaining 337 + */ 338 + start(): ServerTimer { 339 + this.startMs = Date.now(); 340 + return this; 341 + } 342 + /** 343 + * Stops the timer and calculates the duration. 344 + * @returns This timer instance for method chaining 345 + * @throws {Error} If the timer hasn't been started 346 + */ 347 + stop(): ServerTimer { 348 + assert(this.startMs, "timer hasn't been started"); 349 + this.duration = Date.now() - this.startMs; 350 + return this; 351 + } 352 + } 353 + 354 + /** 355 + * Represents timing information for server-side operations. 356 + * Used for performance monitoring and debugging. 357 + */ 358 + export interface ServerTiming { 359 + name: string; 360 + duration?: number; 361 + description?: string; 362 + } 363 + 364 + /** 365 + * Represents a minimal HTTP request with essential properties. 366 + * Used when full request information is not needed. 367 + */ 368 + export interface MinimalRequest { 369 + url?: string; 370 + method?: string; 371 + headers: Headers | { [key: string]: string | string[] | undefined }; 372 + } 373 + 374 + /** 375 + * Validates and extracts the NSID from a request object. 376 + * Convenience wrapper for parseUrlNsid that works with request objects. 377 + * @param req - The request object containing a URL 378 + * @returns The extracted NSID from the request URL 379 + * @throws {InvalidRequestError} If the URL doesn't contain a valid XRPC path 380 + */ 381 + export const parseReqNsid = ( 382 + req: MinimalRequest | HonoRequest, 383 + ): string => parseUrlNsid(req.url || "/"); 384 + 385 + /** 386 + * Validates and extracts the NSID (Namespace Identifier) from an XRPC URL. 387 + * Performs strict validation of the /xrpc/ path format and NSID syntax. 388 + * @param url - The URL or path to parse 389 + * @returns The extracted NSID 390 + * @throws {InvalidRequestError} If the URL doesn't contain a valid XRPC path or NSID 391 + */ 392 + export const parseUrlNsid = (url: string): string => { 393 + // Extract path from full URL if needed 394 + let path = url; 395 + try { 396 + const urlObj = new URL(url); 397 + path = urlObj.pathname; 398 + } catch { 399 + // If URL parsing fails, assume it's already a path 400 + } 401 + 402 + if ( 403 + // Ordered by likelihood of failure 404 + path.length <= 6 || 405 + path[5] !== "/" || 406 + path[4] !== "c" || 407 + path[3] !== "p" || 408 + path[2] !== "r" || 409 + path[1] !== "x" || 410 + path[0] !== "/" 411 + ) { 412 + throw new InvalidRequestError("invalid xrpc path"); 413 + } 414 + 415 + const startOfNsid = 6; 416 + 417 + let curr = startOfNsid; 418 + let char: number; 419 + let alphaNumRequired = true; 420 + for (; curr < path.length; curr++) { 421 + char = path.charCodeAt(curr); 422 + if ( 423 + (char >= 48 && char <= 57) || // 0-9 424 + (char >= 65 && char <= 90) || // A-Z 425 + (char >= 97 && char <= 122) // a-z 426 + ) { 427 + alphaNumRequired = false; 428 + } else if (char === 45 /* "-" */ || char === 46 /* "." */) { 429 + if (alphaNumRequired) { 430 + throw new InvalidRequestError("invalid xrpc path"); 431 + } 432 + alphaNumRequired = true; 433 + } else if (char === 47 /* "/" */) { 434 + // Allow trailing slash (next char is either EOS or "?") 435 + if (curr === path.length - 1 || path.charCodeAt(curr + 1) === 63) { 436 + break; 437 + } 438 + throw new InvalidRequestError("invalid xrpc path"); 439 + } else if (char === 63 /* "?"" */) { 440 + break; 441 + } else { 442 + throw new InvalidRequestError("invalid xrpc path"); 443 + } 444 + } 445 + 446 + // last char was one of: '-', '.', '/' 447 + if (alphaNumRequired) { 448 + throw new InvalidRequestError("invalid xrpc path"); 449 + } 450 + 451 + // A domain name consists of minimum two characters 452 + if (curr - startOfNsid < 2) { 453 + throw new InvalidRequestError("invalid xrpc path"); 454 + } 455 + 456 + // @TODO is there a max ? 457 + 458 + return path.slice(startOfNsid, curr); 459 + }; 460 + 461 + /** 462 + * Alias for parseUrlNsid for backward compatibility. 463 + * @deprecated Use parseUrlNsid instead 464 + */ 465 + export const extractUrlNsid = parseUrlNsid; 466 + 467 + /** 468 + * Creates an input verifier function for XRPC methods. 469 + * Returns a function that validates and processes request input based on lexicon definitions. 470 + * @param lexicons - The lexicon registry for validation 471 + * @param nsid - The namespace identifier of the method 472 + * @param def - The lexicon definition for the method 473 + * @returns A function that verifies request input 474 + */ 475 + export function createInputVerifier( 476 + lexicons: Lexicons, 477 + nsid: string, 478 + def: LexXrpcProcedure | LexXrpcQuery, 479 + ) { 480 + return async (req: Request): Promise<HandlerInput | undefined> => { 481 + if (def.type === "query") { 482 + return undefined; 483 + } 484 + 485 + const contentType = req.headers.get("content-type"); 486 + let body: unknown; 487 + 488 + // Clone the request to avoid consuming the body multiple times 489 + const clonedReq = req.clone(); 490 + 491 + if (contentType?.includes("application/json")) { 492 + body = await clonedReq.json(); 493 + } else if (contentType?.includes("text/")) { 494 + body = await clonedReq.text(); 495 + } else { 496 + const arrayBuffer = await clonedReq.arrayBuffer(); 497 + body = new Uint8Array(arrayBuffer); 498 + } 499 + 500 + return await validateInput(nsid, def, body, contentType, lexicons); 501 + }; 502 + } 503 + 504 + /** 505 + * Sets headers on a Hono context response. 506 + * Iterates through the provided headers and sets them on the response. 507 + * @param c - The Hono context object 508 + * @param headers - Optional headers to set as key-value pairs 509 + */ 510 + export function setHeaders(c: Context, headers?: Record<string, string>) { 511 + if (headers) { 512 + for (const [key, value] of Object.entries(headers)) { 513 + c.header(key, value); 514 + } 515 + } 516 + } 517 + 518 + /** 519 + * Converts a value to an array. 520 + * If the value is already an array, returns it as-is. Otherwise, wraps it in an array. 521 + * @template T - The type of the value 522 + * @param value - The value to convert to an array 523 + * @returns An array containing the value(s) 524 + */ 525 + export function asArray<T>(value: T | T[]): T[] { 526 + return Array.isArray(value) ? value : [value]; 527 + } 528 + 529 + /** 530 + * Decodes query parameters from URL search params into a typed parameter object. 531 + * Converts arrays of single values to single values, preserves multiple values as arrays. 532 + * @param params - Raw query parameters as arrays of strings 533 + * @returns Decoded parameters with single values or arrays 534 + */ 535 + export function decodeUrlQueryParams(params: Record<string, string[]>): Params { 536 + const decoded: Params = {}; 537 + 538 + for (const [key, values] of Object.entries(params)) { 539 + if (values.length === 1) { 540 + decoded[key] = values[0]; 541 + } else if (values.length > 1) { 542 + decoded[key] = values; 543 + } 544 + } 545 + 546 + return decoded; 547 + }