Suite of AT Protocol TypeScript libraries built on web standards
20
fork

Configure Feed

Select the types of activity you want to include in your feed.

repo

+4988 -63
-1
bytes/deno.json
··· 4 4 "exports": "./mod.ts", 5 5 "license": "MIT", 6 6 "imports": { 7 - "@std/assert": "jsr:@std/assert@^1.0.14", 8 7 "multiformats": "npm:multiformats@^13.4.1" 9 8 } 10 9 }
+1 -3
common/deno.json
··· 1 1 { 2 2 "name": "@atp/common", 3 - "version": "0.1.0-alpha.3", 3 + "version": "0.1.0-alpha.4", 4 4 "exports": "./mod.ts", 5 5 "license": "MIT", 6 6 "imports": { 7 7 "@ipld/dag-cbor": "npm:@ipld/dag-cbor@^9.2.5", 8 8 "@logtape/file": "jsr:@logtape/file@^1.2.0-dev.344+834f24a9", 9 9 "@logtape/logtape": "jsr:@logtape/logtape@^1.2.0-dev.344+834f24a9", 10 - "@std/assert": "jsr:@std/assert@^1.0.14", 11 10 "@std/cbor": "jsr:@std/cbor@^0.1.8", 12 11 "@std/crypto": "jsr:@std/crypto@^1.0.5", 13 12 "@std/encoding": "jsr:@std/encoding@^1.0.10", 14 13 "@std/fs": "jsr:@std/fs@^1.0.19", 15 - "@std/io": "jsr:@std/io@^0.225.2", 16 14 "@std/streams": "jsr:@std/streams@^1.0.12", 17 15 "multiformats": "npm:multiformats@^13.4.1", 18 16 "zod": "jsr:@zod/zod@^4.1.11"
+1 -1
common/ipld.ts
··· 225 225 } 226 226 // check cids 227 227 if (CID.asCID(a) && CID.asCID(b)) { 228 - return (CID.asCID(a)?.equals(CID.asCID(b))) ? true : false; 228 + return CID.asCID(a)?.equals(CID.asCID(b)) ?? false; 229 229 } 230 230 // walk plain objects 231 231 const objA = a as Record<string, IpldValue>;
+8 -10
common/streams.ts
··· 1 1 import { concat } from "@atp/bytes"; 2 - import { Buffer } from "@std/io"; 2 + import { flattenUint8Arrays } from "./util.ts"; 3 3 4 4 export const forwardStreamErrors = (..._streams: ReadableStream[]) => { 5 5 // Web Streams don't have the same error forwarding mechanism as streams ··· 60 60 }; 61 61 62 62 export const streamToBuffer = async ( 63 - stream: 64 - | Iterable<Uint8Array> 65 - | AsyncIterable<Uint8Array> 66 - | ReadableStream<Uint8Array>, 67 - ): Promise<Buffer> => { 68 - const bytes = await streamToBytes(stream as AsyncIterable<Uint8Array>); 69 - const buffer = new Buffer(); 70 - await buffer.write(bytes); 71 - return buffer; 63 + stream: AsyncIterable<Uint8Array>, 64 + ): Promise<Uint8Array> => { 65 + const arrays: Uint8Array[] = []; 66 + for await (const chunk of stream) { 67 + arrays.push(chunk); 68 + } 69 + return flattenUint8Arrays(arrays); 72 70 }; 73 71 74 72 export const byteIterableToStream = (
+9 -25
common/tests/streams_test.ts
··· 1 - import { assert, assertEquals, assertRejects } from "@std/assert"; 1 + import { assert, assertEquals } from "@std/assert"; 2 2 import * as streams from "../streams.ts"; 3 3 4 4 Deno.test("forwardStreamErrors - is a no-op in Web Streams", () => { ··· 74 74 }); 75 75 76 76 const buffer = await streams.streamToBuffer(stream); 77 - const bytes = new Uint8Array(buffer.bytes()); 78 77 79 - assertEquals(bytes[0], "f".charCodeAt(0)); 80 - assertEquals(bytes[1], "o".charCodeAt(0)); 81 - assertEquals(bytes[2], "o".charCodeAt(0)); 82 - assertEquals(bytes.length, 3); 78 + assertEquals(buffer[0], "f".charCodeAt(0)); 79 + assertEquals(buffer[1], "o".charCodeAt(0)); 80 + assertEquals(buffer[2], "o".charCodeAt(0)); 81 + assertEquals(buffer.length, 3); 83 82 }); 84 83 85 84 Deno.test("streamToBuffer - converts async iterable to buffer", async () => { ··· 90 89 })(); 91 90 92 91 const buffer = await streams.streamToBuffer(iterable); 93 - const bytes = new Uint8Array(buffer.bytes()); 94 - 95 - assertEquals(bytes[0], "b".charCodeAt(0)); 96 - assertEquals(bytes[1], "a".charCodeAt(0)); 97 - assertEquals(bytes[2], "r".charCodeAt(0)); 98 - assertEquals(bytes.length, 3); 99 - }); 100 92 101 - Deno.test("streamToBuffer - throws error for non Uint8Array chunks", async () => { 102 - const iterable = (async function* () { 103 - yield new Uint8Array([98]); // "b" 104 - yield new Uint8Array([97]); // "a" 105 - yield "r"; // This should cause an error 106 - })(); 107 - 108 - await assertRejects( 109 - () => streams.streamToBuffer(iterable as AsyncIterable<Uint8Array>), 110 - TypeError, 111 - "expected Uint8Array", 112 - ); 93 + assertEquals(buffer[0], "b".charCodeAt(0)); 94 + assertEquals(buffer[1], "a".charCodeAt(0)); 95 + assertEquals(buffer[2], "r".charCodeAt(0)); 96 + assertEquals(buffer.length, 3); 113 97 }); 114 98 115 99 Deno.test("byteIterableToStream - converts byte iterable to stream", async () => {
-1
crypto/deno.json
··· 6 6 "imports": { 7 7 "@noble/curves": "jsr:@noble/curves@^2.0.1", 8 8 "@noble/hashes": "jsr:@noble/hashes@^2.0.1", 9 - "@std/assert": "jsr:@std/assert@^1.0.14", 10 9 "multiformats": "npm:multiformats@^13.4.1" 11 10 }, 12 11 "test": {
+5 -1
deno.json
··· 5 5 "syntax", 6 6 "crypto", 7 7 "lexicon", 8 + "repo", 8 9 "xrpc", 9 10 "xrpc-server", 10 11 "lex-cli" 11 - ] 12 + ], 13 + "imports": { 14 + "@std/assert": "jsr:@std/assert@^1.0.14" 15 + } 12 16 }
+9 -16
deno.lock
··· 28 28 "jsr:@std/internal@^1.0.10": "1.0.10", 29 29 "jsr:@std/internal@^1.0.9": "1.0.10", 30 30 "jsr:@std/io@~0.224.9": "0.224.9", 31 - "jsr:@std/io@~0.225.2": "0.225.2", 32 31 "jsr:@std/path@1": "1.1.2", 33 32 "jsr:@std/path@^1.1.1": "1.1.2", 34 33 "jsr:@std/path@^1.1.2": "1.1.2", ··· 57 56 "jsr:@cliffy/internal", 58 57 "jsr:@std/encoding@~1.0.5", 59 58 "jsr:@std/fmt", 60 - "jsr:@std/io@~0.224.9" 59 + "jsr:@std/io" 61 60 ] 62 61 }, 63 62 "@cliffy/command@1.0.0-rc.8": { ··· 146 145 }, 147 146 "@std/io@0.224.9": { 148 147 "integrity": "4414664b6926f665102e73c969cfda06d2c4c59bd5d0c603fd4f1b1c840d6ee3" 149 - }, 150 - "@std/io@0.225.2": { 151 - "integrity": "3c740cd4ee4c082e6cfc86458f47e2ab7cb353dc6234d5e9b1f91a2de5f4d6c7", 152 - "dependencies": [ 153 - "jsr:@std/bytes@^1.0.5" 154 - ] 155 148 }, 156 149 "@std/path@1.1.2": { 157 150 "integrity": "c0b13b97dfe06546d5e16bf3966b1cadf92e1cc83e56ba5476ad8b498d9e3038", ··· 403 396 "https://deno.land/x/deno_fmt@0.1.5/src/sha256.ts": "eea8272cd53072763ad5fa8e632a572676cde41e027485116a8f84a49a669bf8" 404 397 }, 405 398 "workspace": { 399 + "dependencies": [ 400 + "jsr:@std/assert@^1.0.14" 401 + ], 406 402 "members": { 407 403 "bytes": { 408 404 "dependencies": [ 409 - "jsr:@std/assert@^1.0.14", 410 405 "npm:multiformats@^13.4.1" 411 406 ] 412 407 }, ··· 414 409 "dependencies": [ 415 410 "jsr:@logtape/file@^1.2.0-dev.344+834f24a9", 416 411 "jsr:@logtape/logtape@^1.2.0-dev.344+834f24a9", 417 - "jsr:@std/assert@^1.0.14", 418 412 "jsr:@std/cbor@~0.1.8", 419 413 "jsr:@std/crypto@^1.0.5", 420 414 "jsr:@std/encoding@^1.0.10", 421 415 "jsr:@std/fs@^1.0.19", 422 - "jsr:@std/io@~0.225.2", 423 416 "jsr:@std/streams@^1.0.12", 424 417 "jsr:@zod/zod@^4.1.11", 425 418 "npm:@ipld/dag-cbor@^9.2.5", ··· 430 423 "dependencies": [ 431 424 "jsr:@noble/curves@^2.0.1", 432 425 "jsr:@noble/hashes@^2.0.1", 433 - "jsr:@std/assert@^1.0.14", 434 426 "npm:multiformats@^13.4.1" 435 427 ] 436 428 }, ··· 447 439 }, 448 440 "lexicon": { 449 441 "dependencies": [ 450 - "jsr:@std/assert@^1.0.14", 451 442 "npm:multiformats@^13.4.1", 452 443 "npm:zod@^4.1.11" 453 444 ] 454 445 }, 455 - "syntax": { 446 + "repo": { 456 447 "dependencies": [ 457 - "jsr:@std/assert@^1.0.14" 448 + "jsr:@std/encoding@^1.0.10", 449 + "npm:@ipld/dag-cbor@^9.2.5", 450 + "npm:multiformats@^13.4.1", 451 + "npm:zod@^4.1.11" 458 452 ] 459 453 }, 460 454 "xrpc": { ··· 465 459 "xrpc-server": { 466 460 "dependencies": [ 467 461 "jsr:@hono/hono@^4.9.8", 468 - "jsr:@std/assert@^1.0.14", 469 462 "jsr:@std/cbor@~0.1.8", 470 463 "jsr:@std/encoding@^1.0.10", 471 464 "jsr:@zod/zod@^4.1.11",
-1
lexicon/deno.json
··· 4 4 "exports": "./mod.ts", 5 5 "license": "MIT", 6 6 "imports": { 7 - "@std/assert": "jsr:@std/assert@^1.0.14", 8 7 "multiformats": "npm:multiformats@^13.4.1", 9 8 "zod": "npm:zod@^4.1.11" 10 9 },
+126
repo/block-map.ts
··· 1 + import { CID } from "multiformats/cid"; 2 + import { equals } from "@atp/bytes"; 3 + import { dataToCborBlock } from "@atp/common"; 4 + import { lexToIpld, type LexValue } from "@atp/lexicon"; 5 + 6 + export class BlockMap implements Iterable<[cid: CID, bytes: Uint8Array]> { 7 + private map: Map<string, Uint8Array> = new Map(); 8 + 9 + constructor(entries?: Iterable<readonly [cid: CID, bytes: Uint8Array]>) { 10 + if (entries) { 11 + for (const [cid, bytes] of entries) { 12 + this.set(cid, bytes); 13 + } 14 + } 15 + } 16 + 17 + async add(value: LexValue): Promise<CID> { 18 + const block = await dataToCborBlock(lexToIpld(value)); 19 + this.set(block.cid, block.bytes); 20 + return block.cid; 21 + } 22 + 23 + set(cid: CID, bytes: Uint8Array): BlockMap { 24 + this.map.set(cid.toString(), bytes); 25 + return this; 26 + } 27 + 28 + get(cid: CID): Uint8Array | undefined { 29 + return this.map.get(cid.toString()); 30 + } 31 + 32 + delete(cid: CID): BlockMap { 33 + this.map.delete(cid.toString()); 34 + return this; 35 + } 36 + 37 + getMany(cids: CID[]): { blocks: BlockMap; missing: CID[] } { 38 + const missing: CID[] = []; 39 + const blocks = new BlockMap(); 40 + for (const cid of cids) { 41 + const got = this.map.get(cid.toString()); 42 + if (got) { 43 + blocks.set(cid, got); 44 + } else { 45 + missing.push(cid); 46 + } 47 + } 48 + return { blocks, missing }; 49 + } 50 + 51 + has(cid: CID): boolean { 52 + return this.map.has(cid.toString()); 53 + } 54 + 55 + clear(): void { 56 + this.map.clear(); 57 + } 58 + 59 + forEach(cb: (bytes: Uint8Array, cid: CID) => void): void { 60 + for (const [cid, bytes] of this) cb(bytes, cid); 61 + } 62 + 63 + entries(): Entry[] { 64 + return Array.from(this, toEntry); 65 + } 66 + 67 + cids(): CID[] { 68 + return Array.from(this.keys()); 69 + } 70 + 71 + addMap(toAdd: BlockMap): BlockMap { 72 + for (const [cid, bytes] of toAdd) this.set(cid, bytes); 73 + return this; 74 + } 75 + 76 + get size(): number { 77 + return this.map.size; 78 + } 79 + 80 + get byteSize(): number { 81 + let size = 0; 82 + for (const bytes of this.values()) size += bytes.length; 83 + return size; 84 + } 85 + 86 + equals(other: BlockMap): boolean { 87 + if (this.size !== other.size) { 88 + return false; 89 + } 90 + for (const [cid, bytes] of this) { 91 + const otherBytes = other.get(cid); 92 + if (!otherBytes) return false; 93 + if (!equals(bytes, otherBytes)) { 94 + return false; 95 + } 96 + } 97 + return true; 98 + } 99 + 100 + *keys(): Generator<CID, void, unknown> { 101 + for (const key of this.map.keys()) { 102 + yield CID.parse(key); 103 + } 104 + } 105 + 106 + *values(): Generator<Uint8Array, void, unknown> { 107 + yield* this.map.values(); 108 + } 109 + 110 + *[Symbol.iterator](): Generator<[CID, Uint8Array], void, unknown> { 111 + for (const [key, value] of this.map) { 112 + yield [CID.parse(key), value]; 113 + } 114 + } 115 + } 116 + 117 + function toEntry([cid, bytes]: readonly [CID, Uint8Array]): Entry { 118 + return { cid, bytes }; 119 + } 120 + 121 + type Entry = { 122 + cid: CID; 123 + bytes: Uint8Array; 124 + }; 125 + 126 + export default BlockMap;
+254
repo/car.ts
··· 1 + import * as cbor from "@ipld/dag-cbor"; 2 + import type { CID } from "multiformats/cid"; 3 + import * as ui8 from "@atp/bytes"; 4 + import { encodeVarint } from "@std/encoding/varint"; 5 + import { 6 + type CarHeader, 7 + check, 8 + def, 9 + parseCidFromBytes, 10 + streamToBuffer, 11 + verifyCidForBytes, 12 + } from "@atp/common"; 13 + import { BlockMap } from "./block-map.ts"; 14 + import type { CarBlock } from "./types.ts"; 15 + 16 + // Custom varint decoding to work around std library bug 17 + const decodeVarintCustom = (bytes: Uint8Array): number => { 18 + let result = 0; 19 + let shift = 0; 20 + 21 + for (let i = 0; i < bytes.length; i++) { 22 + const byte = bytes[i]; 23 + result |= (byte & 0x7F) << shift; 24 + 25 + // If MSB is 0, this is the last byte 26 + if ((byte & 0x80) === 0) { 27 + return result; 28 + } 29 + 30 + shift += 7; 31 + } 32 + 33 + throw new Error("Invalid varint: no terminating byte found"); 34 + }; 35 + 36 + export async function* writeCarStream( 37 + root: CID | null, 38 + blocks: AsyncIterable<CarBlock>, 39 + ): AsyncIterable<Uint8Array> { 40 + const headerObj = { 41 + version: 1, 42 + roots: root ? [root] : [], 43 + }; 44 + const header = new Uint8Array(cbor.encode(headerObj)); 45 + const [ui8] = encodeVarint(header.byteLength); 46 + yield ui8; 47 + yield header; 48 + for await (const block of blocks) { 49 + const [blockUi8] = encodeVarint( 50 + block.cid.bytes.byteLength + block.bytes.byteLength, 51 + ); 52 + yield blockUi8; 53 + yield block.cid.bytes; 54 + yield block.bytes; 55 + } 56 + } 57 + 58 + export const blocksToCarFile = ( 59 + root: CID | null, 60 + blocks: BlockMap, 61 + ): Promise<Uint8Array> => { 62 + const carStream = blocksToCarStream(root, blocks); 63 + return streamToBuffer(carStream); 64 + }; 65 + 66 + export const blocksToCarStream = ( 67 + root: CID | null, 68 + blocks: BlockMap, 69 + ): AsyncIterable<Uint8Array> => { 70 + return writeCarStream(root, iterateBlocks(blocks)); 71 + }; 72 + 73 + async function* iterateBlocks(blocks: BlockMap) { 74 + for (const entry of blocks.entries()) { 75 + yield { cid: entry.cid, bytes: entry.bytes }; 76 + } 77 + } 78 + 79 + export type ReadCarOptions = { 80 + /** 81 + * When true, does not verify CID-to-content mapping within CAR. 82 + */ 83 + skipCidVerification?: boolean; 84 + }; 85 + 86 + export const readCar = async ( 87 + bytes: Uint8Array, 88 + opts?: ReadCarOptions, 89 + ): Promise<{ roots: CID[]; blocks: BlockMap }> => { 90 + const { roots, blocks } = await readCarStream([bytes], opts); 91 + const blockMap = new BlockMap(); 92 + for await (const block of blocks) { 93 + blockMap.set(block.cid, block.bytes); 94 + } 95 + return { roots, blocks: blockMap }; 96 + }; 97 + 98 + export const readCarWithRoot = async ( 99 + bytes: Uint8Array, 100 + opts?: ReadCarOptions, 101 + ): Promise<{ root: CID; blocks: BlockMap }> => { 102 + const { roots, blocks } = await readCar(bytes, opts); 103 + if (roots.length !== 1) { 104 + throw new Error(`Expected one root, got ${roots.length}`); 105 + } 106 + const root = roots[0]; 107 + return { 108 + root, 109 + blocks, 110 + }; 111 + }; 112 + export type CarBlockIterable = AsyncGenerator<CarBlock, void, unknown> & { 113 + dump: () => Promise<void>; 114 + }; 115 + 116 + export const readCarStream = async ( 117 + car: Iterable<Uint8Array> | AsyncIterable<Uint8Array>, 118 + opts?: ReadCarOptions, 119 + ): Promise<{ 120 + roots: CID[]; 121 + blocks: CarBlockIterable; 122 + }> => { 123 + const reader = new BufferedReader(car); 124 + try { 125 + const headerSize = await reader.readVarint(); 126 + if (headerSize === null) { 127 + throw new Error("Could not parse CAR header"); 128 + } 129 + const headerBytes = await reader.read(headerSize); 130 + const headerData = cbor.decode(headerBytes); 131 + const header = check.assure(def.carHeader.schema, headerData) as CarHeader; 132 + return { 133 + roots: header.roots as CID[], 134 + blocks: readCarBlocksIter(reader, opts), 135 + }; 136 + } catch (err) { 137 + await reader.close(); 138 + throw err; 139 + } 140 + }; 141 + 142 + const readCarBlocksIter = ( 143 + reader: BufferedReader, 144 + opts?: ReadCarOptions, 145 + ): CarBlockIterable => { 146 + let generator = readCarBlocksIterGenerator(reader); 147 + if (!opts?.skipCidVerification) { 148 + generator = verifyIncomingCarBlocks(generator); 149 + } 150 + return Object.assign(generator, { 151 + async dump() { 152 + // try/finally to ensure that reader.close is called even if blocks.return throws. 153 + try { 154 + // Prevent the iterator from being started after this method is called. 155 + await generator.return(); 156 + } finally { 157 + // @NOTE the "finally" block of the async generator won't be called 158 + // if the iteration was never started so we need to manually close here. 159 + await reader.close(); 160 + } 161 + }, 162 + }); 163 + }; 164 + 165 + async function* readCarBlocksIterGenerator( 166 + reader: BufferedReader, 167 + ): AsyncGenerator<CarBlock, void, unknown> { 168 + try { 169 + while (!reader.isDone) { 170 + const blockSize = await reader.readVarint(); 171 + if (blockSize === null) { 172 + break; 173 + } 174 + const blockBytes = await reader.read(blockSize); 175 + const cid = parseCidFromBytes(blockBytes.subarray(0, 36)); 176 + const bytes = blockBytes.subarray(36); 177 + yield { cid, bytes }; 178 + } 179 + } finally { 180 + await reader.close(); 181 + } 182 + } 183 + 184 + export async function* verifyIncomingCarBlocks( 185 + car: AsyncIterable<CarBlock>, 186 + ): AsyncGenerator<CarBlock, void, unknown> { 187 + for await (const block of car) { 188 + await verifyCidForBytes(block.cid, block.bytes); 189 + yield block; 190 + } 191 + } 192 + 193 + class BufferedReader { 194 + buffer: Uint8Array = new Uint8Array(); 195 + iterator: Iterator<Uint8Array> | AsyncIterator<Uint8Array>; 196 + isDone = false; 197 + 198 + constructor(stream: Iterable<Uint8Array> | AsyncIterable<Uint8Array>) { 199 + this.iterator = Symbol.asyncIterator in stream 200 + ? stream[Symbol.asyncIterator]() 201 + : stream[Symbol.iterator](); 202 + } 203 + 204 + async read(bytesToRead: number): Promise<Uint8Array> { 205 + await this.readUntilBuffered(bytesToRead); 206 + const value = this.buffer.subarray(0, bytesToRead); 207 + this.buffer = this.buffer.subarray(bytesToRead); 208 + return value; 209 + } 210 + 211 + async readVarint(): Promise<number | null> { 212 + let done = false; 213 + const bytes: Uint8Array[] = []; 214 + while (!done) { 215 + const byte = await this.read(1); 216 + if (byte.byteLength === 0) { 217 + if (bytes.length > 0) { 218 + throw new Error("could not parse varint"); 219 + } else { 220 + return null; 221 + } 222 + } 223 + bytes.push(byte); 224 + if (byte[0] < 128) { 225 + done = true; 226 + } 227 + } 228 + const concatted = ui8.concat(bytes); 229 + const number = decodeVarintCustom(concatted); 230 + return number; 231 + } 232 + 233 + private async readUntilBuffered(bytesToRead: number) { 234 + if (this.isDone) { 235 + return; 236 + } 237 + while (this.buffer.length < bytesToRead) { 238 + const next = await this.iterator.next(); 239 + if (next.done) { 240 + this.isDone = true; 241 + return; 242 + } 243 + this.buffer = ui8.concat([this.buffer, next.value]); 244 + } 245 + } 246 + 247 + async close(): Promise<void> { 248 + if (!this.isDone && this.iterator.return) { 249 + await this.iterator.return(); 250 + } 251 + this.isDone = true; 252 + this.buffer = new Uint8Array(); 253 + } 254 + }
+49
repo/cid-set.ts
··· 1 + import { CID } from "multiformats"; 2 + 3 + export class CidSet { 4 + private set: Set<string>; 5 + 6 + constructor(arr: CID[] = []) { 7 + const strArr = arr.map((c) => c.toString()); 8 + this.set = new Set(strArr); 9 + } 10 + 11 + add(cid: CID): CidSet { 12 + this.set.add(cid.toString()); 13 + return this; 14 + } 15 + 16 + addSet(toMerge: CidSet): CidSet { 17 + toMerge.toList().map((c) => this.add(c)); 18 + return this; 19 + } 20 + 21 + subtractSet(toSubtract: CidSet): CidSet { 22 + toSubtract.toList().map((c) => this.delete(c)); 23 + return this; 24 + } 25 + 26 + delete(cid: CID) { 27 + this.set.delete(cid.toString()); 28 + return this; 29 + } 30 + 31 + has(cid: CID): boolean { 32 + return this.set.has(cid.toString()); 33 + } 34 + 35 + size(): number { 36 + return this.set.size; 37 + } 38 + 39 + clear(): CidSet { 40 + this.set.clear(); 41 + return this; 42 + } 43 + 44 + toList(): CID[] { 45 + return [...this.set].map((c) => CID.parse(c)); 46 + } 47 + } 48 + 49 + export default CidSet;
+117
repo/data-diff.ts
··· 1 + import type { CID } from "multiformats"; 2 + import { BlockMap } from "./block-map.ts"; 3 + import { CidSet } from "./cid-set.ts"; 4 + import { type MST, mstDiff, type NodeEntry } from "./mst/index.ts"; 5 + 6 + export class DataDiff { 7 + adds: Record<string, DataAdd> = {}; 8 + updates: Record<string, DataUpdate> = {}; 9 + deletes: Record<string, DataDelete> = {}; 10 + 11 + newMstBlocks: BlockMap = new BlockMap(); 12 + newLeafCids: CidSet = new CidSet(); 13 + removedCids: CidSet = new CidSet(); 14 + 15 + static of(curr: MST, prev: MST | null): Promise<DataDiff> { 16 + return mstDiff(curr, prev); 17 + } 18 + 19 + async nodeAdd(node: NodeEntry) { 20 + if (node.isLeaf()) { 21 + this.leafAdd(node.key, node.value); 22 + } else { 23 + const data = await node.serialize(); 24 + this.treeAdd(data.cid, data.bytes); 25 + } 26 + } 27 + 28 + async nodeDelete(node: NodeEntry) { 29 + if (node.isLeaf()) { 30 + const key = node.key; 31 + const cid = node.value; 32 + this.deletes[key] = { key, cid }; 33 + this.removedCids.add(cid); 34 + } else { 35 + const cid = await node.getPointer(); 36 + this.treeDelete(cid); 37 + } 38 + } 39 + 40 + leafAdd(key: string, cid: CID) { 41 + this.adds[key] = { key, cid }; 42 + if (this.removedCids.has(cid)) { 43 + this.removedCids.delete(cid); 44 + } else { 45 + this.newLeafCids.add(cid); 46 + } 47 + } 48 + 49 + leafUpdate(key: string, prev: CID, cid: CID) { 50 + if (prev.equals(cid)) return; 51 + this.updates[key] = { key, prev, cid }; 52 + this.removedCids.add(prev); 53 + this.newLeafCids.add(cid); 54 + } 55 + 56 + leafDelete(key: string, cid: CID) { 57 + this.deletes[key] = { key, cid }; 58 + if (this.newLeafCids.has(cid)) { 59 + this.newLeafCids.delete(cid); 60 + } else { 61 + this.removedCids.add(cid); 62 + } 63 + } 64 + 65 + treeAdd(cid: CID, bytes: Uint8Array) { 66 + if (this.removedCids.has(cid)) { 67 + this.removedCids.delete(cid); 68 + } else { 69 + this.newMstBlocks.set(cid, bytes); 70 + } 71 + } 72 + 73 + treeDelete(cid: CID) { 74 + if (this.newMstBlocks.has(cid)) { 75 + this.newMstBlocks.delete(cid); 76 + } else { 77 + this.removedCids.add(cid); 78 + } 79 + } 80 + 81 + addList(): DataAdd[] { 82 + return Object.values(this.adds); 83 + } 84 + 85 + updateList(): DataUpdate[] { 86 + return Object.values(this.updates); 87 + } 88 + 89 + deleteList(): DataDelete[] { 90 + return Object.values(this.deletes); 91 + } 92 + 93 + updatedKeys(): string[] { 94 + const keys = [ 95 + ...Object.keys(this.adds), 96 + ...Object.keys(this.updates), 97 + ...Object.keys(this.deletes), 98 + ]; 99 + return [...new Set(keys)]; 100 + } 101 + } 102 + 103 + export type DataAdd = { 104 + key: string; 105 + cid: CID; 106 + }; 107 + 108 + export type DataUpdate = { 109 + key: string; 110 + prev: CID; 111 + cid: CID; 112 + }; 113 + 114 + export type DataDelete = { 115 + key: string; 116 + cid: CID; 117 + };
+17
repo/deno.json
··· 1 + { 2 + "name": "@atp/repo", 3 + "version": "0.1.0-alpha.1", 4 + "exports": "./mod.ts", 5 + "license": "MIT", 6 + "imports": { 7 + "@ipld/dag-cbor": "npm:@ipld/dag-cbor@^9.2.5", 8 + "@std/encoding": "jsr:@std/encoding@^1.0.10", 9 + "multiformats": "npm:multiformats@^13.4.1", 10 + "zod": "npm:zod@^4.1.11" 11 + }, 12 + "test": { 13 + "permissions": { 14 + "env": true 15 + } 16 + } 17 + }
+43
repo/error.ts
··· 1 + import { CID } from 'multiformats/cid' 2 + 3 + export class MissingBlockError extends Error { 4 + constructor( 5 + public cid: CID, 6 + def?: string, 7 + ) { 8 + let msg = `block not found: ${cid.toString()}` 9 + if (def) { 10 + msg += `, expected type: ${def}` 11 + } 12 + super(msg) 13 + } 14 + } 15 + 16 + export class MissingBlocksError extends Error { 17 + constructor( 18 + public context: string, 19 + public cids: CID[], 20 + ) { 21 + const cidStr = cids.map((c) => c.toString()) 22 + super(`missing ${context} blocks: ${cidStr}`) 23 + } 24 + } 25 + 26 + export class MissingCommitBlocksError extends Error { 27 + constructor( 28 + public commit: CID, 29 + public cids: CID[], 30 + ) { 31 + const cidStr = cids.map((c) => c.toString()) 32 + super(`missing blocks for commit ${commit.toString()}: ${cidStr}`) 33 + } 34 + } 35 + 36 + export class UnexpectedObjectError extends Error { 37 + constructor( 38 + public cid: CID, 39 + public def: string, 40 + ) { 41 + super(`unexpected object at ${cid.toString()}, expected: ${def}`) 42 + } 43 + }
+7
repo/logger.ts
··· 1 + import { subsystemLogger } from "@atp/common"; 2 + 3 + export const logger: ReturnType<typeof subsystemLogger> = subsystemLogger( 4 + "repo", 5 + ); 6 + 7 + export default logger;
+11
repo/mod.ts
··· 1 + export * from "./block-map.ts"; 2 + export * from "./cid-set.ts"; 3 + export * from "./repo.ts"; 4 + export * from "./mst/index.ts"; 5 + export * from "./parse.ts"; 6 + export * from "./storage/index.ts"; 7 + export * from "./sync/index.ts"; 8 + export * from "./types.ts"; 9 + export * from "./data-diff.ts"; 10 + export * from "./car.ts"; 11 + export * from "./util.ts";
+114
repo/mst/diff.ts
··· 1 + import { DataDiff } from "../data-diff.ts"; 2 + import type { MST } from "./mst.ts"; 3 + import { MstWalker } from "./walker.ts"; 4 + 5 + export const nullDiff = async (tree: MST): Promise<DataDiff> => { 6 + const diff = new DataDiff(); 7 + for await (const entry of tree.walk()) { 8 + await diff.nodeAdd(entry); 9 + } 10 + return diff; 11 + }; 12 + 13 + export const mstDiff = async ( 14 + curr: MST, 15 + prev: MST | null, 16 + ): Promise<DataDiff> => { 17 + await curr.getPointer(); 18 + if (prev === null) { 19 + return nullDiff(curr); 20 + } 21 + 22 + await prev.getPointer(); 23 + const diff = new DataDiff(); 24 + 25 + const leftWalker = new MstWalker(prev); 26 + const rightWalker = new MstWalker(curr); 27 + while (!leftWalker.status.done || !rightWalker.status.done) { 28 + // if one walker is finished, continue walking the other & logging all nodes 29 + if (leftWalker.status.done && !rightWalker.status.done) { 30 + await diff.nodeAdd(rightWalker.status.curr); 31 + await rightWalker.advance(); 32 + continue; 33 + } else if (!leftWalker.status.done && rightWalker.status.done) { 34 + await diff.nodeDelete(leftWalker.status.curr); 35 + await leftWalker.advance(); 36 + continue; 37 + } 38 + if (leftWalker.status.done || rightWalker.status.done) break; 39 + const left = leftWalker.status.curr; 40 + const right = rightWalker.status.curr; 41 + if (left === null || right === null) break; 42 + 43 + // if both pointers are leaves, record an update & advance both or record the lowest key and advance that pointer 44 + if (left.isLeaf() && right.isLeaf()) { 45 + if (left.key === right.key) { 46 + if (!left.value.equals(right.value)) { 47 + diff.leafUpdate(left.key, left.value, right.value); 48 + } 49 + await leftWalker.advance(); 50 + await rightWalker.advance(); 51 + } else if (left.key < right.key) { 52 + diff.leafDelete(left.key, left.value); 53 + await leftWalker.advance(); 54 + } else { 55 + diff.leafAdd(right.key, right.value); 56 + await rightWalker.advance(); 57 + } 58 + continue; 59 + } 60 + 61 + // next, ensure that we're on the same layer 62 + // if one walker is at a higher layer than the other, we need to do one of two things 63 + // if the higher walker is pointed at a tree, step into that tree to try to catch up with the lower 64 + // if the higher walker is pointed at a leaf, then advance the lower walker to try to catch up the higher 65 + if (leftWalker.layer() > rightWalker.layer()) { 66 + if (left.isLeaf()) { 67 + await diff.nodeAdd(right); 68 + await rightWalker.advance(); 69 + } else { 70 + await diff.nodeDelete(left); 71 + await leftWalker.stepInto(); 72 + } 73 + continue; 74 + } else if (leftWalker.layer() < rightWalker.layer()) { 75 + if (right.isLeaf()) { 76 + await diff.nodeDelete(left); 77 + await leftWalker.advance(); 78 + } else { 79 + await diff.nodeAdd(right); 80 + await rightWalker.stepInto(); 81 + } 82 + continue; 83 + } 84 + 85 + // if we're on the same level, and both pointers are trees, do a comparison 86 + // if they're the same, step over. if they're different, step in to find the subdiff 87 + if (left.isTree() && right.isTree()) { 88 + if (left.pointer.equals(right.pointer)) { 89 + await leftWalker.stepOver(); 90 + await rightWalker.stepOver(); 91 + } else { 92 + await diff.nodeAdd(right); 93 + await diff.nodeDelete(left); 94 + await leftWalker.stepInto(); 95 + await rightWalker.stepInto(); 96 + } 97 + continue; 98 + } 99 + 100 + // finally, if one pointer is a tree and the other is a leaf, simply step into the tree 101 + if (left.isLeaf() && right.isTree()) { 102 + await diff.nodeAdd(right); 103 + await rightWalker.stepInto(); 104 + continue; 105 + } else if (left.isTree() && right.isLeaf()) { 106 + await diff.nodeDelete(left); 107 + await leftWalker.stepInto(); 108 + continue; 109 + } 110 + 111 + throw new Error("Unidentifiable case in diff walk"); 112 + } 113 + return diff; 114 + };
+4
repo/mst/index.ts
··· 1 + export * from "./mst.ts"; 2 + export * from "./diff.ts"; 3 + export * from "./walker.ts"; 4 + export * as mstUtil from "./util.ts";
+892
repo/mst/mst.ts
··· 1 + import type { CID } from "multiformats"; 2 + import { z } from "zod"; 3 + import { cidForCbor, dataToCborBlock, schema as common } from "@atp/common"; 4 + import { BlockMap } from "../block-map.ts"; 5 + import { CidSet } from "../cid-set.ts"; 6 + import { MissingBlockError, MissingBlocksError } from "../error.ts"; 7 + import * as parse from "../parse.ts"; 8 + import type { ReadableBlockstore } from "../storage/index.ts"; 9 + import type { CarBlock } from "../types.ts"; 10 + import * as util from "./util.ts"; 11 + 12 + /** 13 + * This is an implementation of a Merkle Search Tree (MST) 14 + * The data structure is described here: https://hal.inria.fr/hal-02303490/document 15 + * The MST is an ordered, insert-order-independent, deterministic tree. 16 + * Keys are laid out in alphabetic order. 17 + * The key insight of an MST is that each key is hashed and starting 0s are counted 18 + * to determine which layer it falls on (5 zeros for ~32 fanout). 19 + * This is a merkle tree, so each subtree is referred to by it's hash (CID). 20 + * When a leaf is changed, ever tree on the path to that leaf is changed as well, 21 + * thereby updating the root hash. 22 + * 23 + * For atproto, we use SHA-256 as the key hashing algorithm, and ~4 fanout 24 + * (2-bits of zero per layer). 25 + */ 26 + 27 + /** 28 + * A couple notes on CBOR encoding: 29 + * 30 + * There are never two neighboring subtrees. 31 + * Therefore, we can represent a node as an array of 32 + * leaves & pointers to their right neighbor (possibly null), 33 + * along with a pointer to the left-most subtree (also possibly null). 34 + * 35 + * Most keys in a subtree will have overlap. 36 + * We do compression on prefixes by describing keys as: 37 + * - the length of the prefix that it shares in common with the preceding key 38 + * - the rest of the string 39 + * 40 + * For example: 41 + * If the first leaf in a tree is `bsky/posts/abcdefg` and the second is `bsky/posts/abcdehi` 42 + * Then the first will be described as `prefix: 0, key: 'bsky/posts/abcdefg'`, 43 + * and the second will be described as `prefix: 16, key: 'hi'.` 44 + */ 45 + const subTreePointer = z.nullable(common.cid); 46 + const treeEntry = z.object({ 47 + p: z.number(), // prefix count of ascii chars that this key shares with the prev key 48 + k: common.bytes, // the rest of the key outside the shared prefix 49 + v: common.cid, // value 50 + t: subTreePointer, // next subtree (to the right of leaf) 51 + }); 52 + const nodeData = z.object({ 53 + l: subTreePointer, // left-most subtree 54 + e: z.array(treeEntry), //entries 55 + }); 56 + export type NodeData = z.infer<typeof nodeData>; 57 + 58 + export const nodeDataDef = { 59 + name: "mst node", 60 + schema: nodeData, 61 + }; 62 + 63 + export type NodeEntry = MST | Leaf; 64 + 65 + export type MstOpts = { 66 + layer: number; 67 + }; 68 + 69 + export class MST { 70 + storage: ReadableBlockstore; 71 + entries: NodeEntry[] | null; 72 + layer: number | null; 73 + pointer: CID; 74 + outdatedPointer = false; 75 + 76 + constructor( 77 + storage: ReadableBlockstore, 78 + pointer: CID, 79 + entries: NodeEntry[] | null, 80 + layer: number | null, 81 + ) { 82 + this.storage = storage; 83 + this.entries = entries; 84 + this.layer = layer; 85 + this.pointer = pointer; 86 + } 87 + 88 + static async create( 89 + storage: ReadableBlockstore, 90 + entries: NodeEntry[] = [], 91 + opts?: Partial<MstOpts>, 92 + ): Promise<MST> { 93 + const pointer = await util.cidForEntries(entries); 94 + const { layer = null } = opts || {}; 95 + return new MST(storage, pointer, entries, layer); 96 + } 97 + 98 + static async fromData( 99 + storage: ReadableBlockstore, 100 + data: NodeData, 101 + opts?: Partial<MstOpts>, 102 + ): Promise<MST> { 103 + const { layer = null } = opts || {}; 104 + const entries = util.deserializeNodeData(storage, data, opts); 105 + const pointer = await cidForCbor(data); 106 + return new MST(storage, pointer, entries, layer); 107 + } 108 + 109 + // this is really a *lazy* load, doesn't actually touch storage 110 + static load( 111 + storage: ReadableBlockstore, 112 + cid: CID, 113 + opts?: Partial<MstOpts>, 114 + ): MST { 115 + const { layer = null } = opts || {}; 116 + return new MST(storage, cid, null, layer); 117 + } 118 + 119 + // Immutability 120 + // ------------------- 121 + 122 + // We never mutate an MST, we just return a new MST with updated values 123 + newTree(entries: NodeEntry[]): MST { 124 + const mst = new MST(this.storage, this.pointer, entries, this.layer); 125 + mst.outdatedPointer = true; 126 + return mst; 127 + } 128 + 129 + // Getters (lazy load) 130 + // ------------------- 131 + 132 + // We don't want to load entries of every subtree, just the ones we need 133 + getEntries(): NodeEntry[] { 134 + if (this.entries) return [...this.entries]; 135 + if (this.pointer) { 136 + const data = this.storage.readObj(this.pointer, nodeDataDef); 137 + const firstLeaf = data.e[0]; 138 + const layer = firstLeaf !== undefined 139 + ? util.leadingZerosOnHash(firstLeaf.k as Uint8Array) 140 + : undefined; 141 + this.entries = util.deserializeNodeData(this.storage, data, { 142 + layer, 143 + }); 144 + 145 + return this.entries; 146 + } 147 + throw new Error("No entries or CID provided"); 148 + } 149 + 150 + // We don't hash the node on every mutation for performance reasons 151 + // Instead we keep track of whether the pointer is outdated and only (recursively) calculate when needed 152 + async getPointer(): Promise<CID> { 153 + if (!this.outdatedPointer) return this.pointer; 154 + const { cid } = await this.serialize(); 155 + this.pointer = cid; 156 + this.outdatedPointer = false; 157 + return this.pointer; 158 + } 159 + 160 + async serialize(): Promise<{ cid: CID; bytes: Uint8Array }> { 161 + let entries = this.getEntries(); 162 + const outdated = entries.filter( 163 + (e) => e.isTree() && e.outdatedPointer, 164 + ) as MST[]; 165 + if (outdated.length > 0) { 166 + await Promise.all(outdated.map((e) => e.getPointer())); 167 + entries = this.getEntries(); 168 + } 169 + const data = util.serializeNodeData(entries); 170 + const block = await dataToCborBlock(data); 171 + return { 172 + cid: block.cid, 173 + bytes: block.bytes, 174 + }; 175 + } 176 + 177 + // In most cases, we get the layer of a node from a hint on creation 178 + // In the case of the topmost node in the tree, we look for a key in the node & determine the layer 179 + // In the case where we don't find one, we recurse down until we do. 180 + // If we still can't find one, then we have an empty tree and the node is layer 0 181 + async getLayer(): Promise<number> { 182 + this.layer = await this.attemptGetLayer(); 183 + if (this.layer === null) this.layer = 0; 184 + return this.layer; 185 + } 186 + 187 + async attemptGetLayer(): Promise<number | null> { 188 + if (this.layer !== null) return this.layer; 189 + const entries = this.getEntries(); 190 + let layer = util.layerForEntries(entries); 191 + if (layer === null) { 192 + for (const entry of entries) { 193 + if (entry.isTree()) { 194 + const childLayer = await entry.attemptGetLayer(); 195 + if (childLayer !== null) { 196 + layer = childLayer + 1; 197 + break; 198 + } 199 + } 200 + } 201 + } 202 + if (layer !== null) this.layer = layer; 203 + return layer; 204 + } 205 + 206 + // Core functionality 207 + // ------------------- 208 + 209 + // Return the necessary blocks to persist the MST to repo storage 210 + async getUnstoredBlocks(): Promise<{ root: CID; blocks: BlockMap }> { 211 + const blocks = new BlockMap(); 212 + const pointer = await this.getPointer(); 213 + const alreadyHas = this.storage.has(pointer); 214 + if (alreadyHas) return { root: pointer, blocks }; 215 + const entries = this.getEntries(); 216 + const data = util.serializeNodeData(entries); 217 + await blocks.add(data); 218 + for (const entry of entries) { 219 + if (entry.isTree()) { 220 + const subtree = await entry.getUnstoredBlocks(); 221 + blocks.addMap(subtree.blocks); 222 + } 223 + } 224 + return { root: pointer, blocks: blocks }; 225 + } 226 + 227 + // Adds a new leaf for the given key/value pair 228 + // Throws if a leaf with that key already exists 229 + async add(key: string, value: CID, knownZeros?: number): Promise<MST> { 230 + util.ensureValidMstKey(key); 231 + const keyZeros = knownZeros ?? (util.leadingZerosOnHash(key)); 232 + const layer = await this.getLayer(); 233 + const newLeaf = new Leaf(key, value); 234 + if (keyZeros === layer) { 235 + // it belongs in this layer 236 + const index = this.findGtOrEqualLeafIndex(key); 237 + const found = this.atIndex(index); 238 + if (found?.isLeaf() && found.key === key) { 239 + throw new Error(`There is already a value at key: ${key}`); 240 + } 241 + const prevNode = this.atIndex(index - 1); 242 + if (!prevNode || prevNode.isLeaf()) { 243 + // if entry before is a leaf, (or we're on far left) we can just splice in 244 + return this.spliceIn(newLeaf, index); 245 + } else { 246 + // else we try to split the subtree around the key 247 + const splitSubTree = await prevNode.splitAround(key); 248 + return this.replaceWithSplit( 249 + index - 1, 250 + splitSubTree[0], 251 + newLeaf, 252 + splitSubTree[1], 253 + ); 254 + } 255 + } else if (keyZeros < layer) { 256 + // it belongs on a lower layer 257 + const index = this.findGtOrEqualLeafIndex(key); 258 + const prevNode = this.atIndex(index - 1); 259 + if (prevNode && prevNode.isTree()) { 260 + // if entry before is a tree, we add it to that tree 261 + const newSubtree = await prevNode.add(key, value, keyZeros); 262 + return this.updateEntry(index - 1, newSubtree); 263 + } else { 264 + const subTree = await this.createChild(); 265 + const newSubTree = await subTree.add(key, value, keyZeros); 266 + return this.spliceIn(newSubTree, index); 267 + } 268 + } else { 269 + // it belongs on a higher layer & we must push the rest of the tree down 270 + const split = await this.splitAround(key); 271 + // if the newly added key has >=2 more leading zeros than the current highest layer 272 + // then we need to add in structural nodes in between as well 273 + let left: MST | null = split[0]; 274 + let right: MST | null = split[1]; 275 + const layer = await this.getLayer(); 276 + const extraLayersToAdd = keyZeros - layer; 277 + // intentionally starting at 1, since first layer is taken care of by split 278 + for (let i = 1; i < extraLayersToAdd; i++) { 279 + if (left !== null) { 280 + left = await left.createParent(); 281 + } 282 + if (right !== null) { 283 + right = await right.createParent(); 284 + } 285 + } 286 + const updated: NodeEntry[] = []; 287 + if (left) updated.push(left); 288 + updated.push(new Leaf(key, value)); 289 + if (right) updated.push(right); 290 + const newRoot = await MST.create(this.storage, updated, { 291 + layer: keyZeros, 292 + }); 293 + newRoot.outdatedPointer = true; 294 + return newRoot; 295 + } 296 + } 297 + 298 + // Gets the value at the given key 299 + get(key: string): CID | null { 300 + const index = this.findGtOrEqualLeafIndex(key); 301 + const found = this.atIndex(index); 302 + if (found && found.isLeaf() && found.key === key) { 303 + return found.value; 304 + } 305 + const prev = this.atIndex(index - 1); 306 + if (prev && prev.isTree()) { 307 + return prev.get(key); 308 + } 309 + return null; 310 + } 311 + 312 + // Edits the value at the given key 313 + // Throws if the given key does not exist 314 + async update(key: string, value: CID): Promise<MST> { 315 + util.ensureValidMstKey(key); 316 + const index = this.findGtOrEqualLeafIndex(key); 317 + const found = this.atIndex(index); 318 + if (found && found.isLeaf() && found.key === key) { 319 + return this.updateEntry(index, new Leaf(key, value)); 320 + } 321 + const prev = this.atIndex(index - 1); 322 + if (prev && prev.isTree()) { 323 + const updatedTree = await prev.update(key, value); 324 + return this.updateEntry(index - 1, updatedTree); 325 + } 326 + throw new Error(`Could not find a record with key: ${key}`); 327 + } 328 + 329 + // Deletes the value at the given key 330 + async delete(key: string): Promise<MST> { 331 + const altered = await this.deleteRecurse(key); 332 + return altered.trimTop(); 333 + } 334 + 335 + async deleteRecurse(key: string): Promise<MST> { 336 + const index = this.findGtOrEqualLeafIndex(key); 337 + const found = this.atIndex(index); 338 + // if found, remove it on this level 339 + if (found?.isLeaf() && found.key === key) { 340 + const prev = this.atIndex(index - 1); 341 + const next = this.atIndex(index + 1); 342 + if (prev?.isTree() && next?.isTree()) { 343 + const merged = await prev.appendMerge(next); 344 + return this.newTree([ 345 + ...(this.slice(0, index - 1)), 346 + merged, 347 + ...(this.slice(index + 2)), 348 + ]); 349 + } else { 350 + return this.removeEntry(index); 351 + } 352 + } 353 + // else recurse down to find it 354 + const prev = this.atIndex(index - 1); 355 + if (prev?.isTree()) { 356 + const subtree = await prev.deleteRecurse(key); 357 + const subTreeEntries = subtree.getEntries(); 358 + if (subTreeEntries.length === 0) { 359 + return this.removeEntry(index - 1); 360 + } else { 361 + return this.updateEntry(index - 1, subtree); 362 + } 363 + } else { 364 + throw new Error(`Could not find a record with key: ${key}`); 365 + } 366 + } 367 + 368 + // Simple Operations 369 + // ------------------- 370 + 371 + // update entry in place 372 + updateEntry(index: number, entry: NodeEntry): MST { 373 + const update = [ 374 + ...(this.slice(0, index)), 375 + entry, 376 + ...(this.slice(index + 1)), 377 + ]; 378 + return this.newTree(update); 379 + } 380 + 381 + // remove entry at index 382 + removeEntry(index: number): MST { 383 + const updated = [ 384 + ...(this.slice(0, index)), 385 + ...(this.slice(index + 1)), 386 + ]; 387 + return this.newTree(updated); 388 + } 389 + 390 + // append entry to end of the node 391 + append(entry: NodeEntry): MST { 392 + const entries = this.getEntries(); 393 + return this.newTree([...entries, entry]); 394 + } 395 + 396 + // prepend entry to start of the node 397 + prepend(entry: NodeEntry): MST { 398 + const entries = this.getEntries(); 399 + return this.newTree([entry, ...entries]); 400 + } 401 + 402 + // returns entry at index 403 + atIndex(index: number): NodeEntry | null { 404 + const entries = this.getEntries(); 405 + return entries[index] ?? null; 406 + } 407 + 408 + // returns a slice of the node (like array.slice) 409 + slice( 410 + start?: number | undefined, 411 + end?: number | undefined, 412 + ): NodeEntry[] { 413 + const entries = this.getEntries(); 414 + return entries.slice(start, end); 415 + } 416 + 417 + // inserts entry at index 418 + spliceIn(entry: NodeEntry, index: number): MST { 419 + const update = [ 420 + ...this.slice(0, index), 421 + entry, 422 + ...this.slice(index), 423 + ]; 424 + return this.newTree(update); 425 + } 426 + 427 + // replaces an entry with [ Maybe(tree), Leaf, Maybe(tree) ] 428 + replaceWithSplit( 429 + index: number, 430 + left: MST | null, 431 + leaf: Leaf, 432 + right: MST | null, 433 + ): MST { 434 + const update = this.slice(0, index); 435 + if (left) update.push(left); 436 + update.push(leaf); 437 + if (right) update.push(right); 438 + update.push(...this.slice(index + 1)); 439 + return this.newTree(update); 440 + } 441 + 442 + // if the topmost node in the tree only points to another tree, trim the top and return the subtree 443 + trimTop(): MST { 444 + let entries: NodeEntry[]; 445 + try { 446 + entries = this.getEntries(); 447 + } catch (err) { 448 + if (err instanceof MissingBlockError) { 449 + return this; 450 + } else { 451 + throw err; 452 + } 453 + } 454 + if (entries.length === 1 && entries[0].isTree()) { 455 + return entries[0].trimTop(); 456 + } else { 457 + return this; 458 + } 459 + } 460 + 461 + // Subtree & Splits 462 + // ------------------- 463 + 464 + // Recursively splits a sub tree around a given key 465 + async splitAround(key: string): Promise<[MST | null, MST | null]> { 466 + const index = this.findGtOrEqualLeafIndex(key); 467 + // split tree around key 468 + const leftData = this.slice(0, index); 469 + const rightData = this.slice(index); 470 + let left = this.newTree(leftData); 471 + let right = this.newTree(rightData); 472 + 473 + // if the far right of the left side is a subtree, 474 + // we need to split it on the key as well 475 + const lastInLeft = leftData[leftData.length - 1]; 476 + if (lastInLeft?.isTree()) { 477 + left = left.removeEntry(leftData.length - 1); 478 + const split = await lastInLeft.splitAround(key); 479 + if (split[0]) { 480 + left = left.append(split[0]); 481 + } 482 + if (split[1]) { 483 + right = right.prepend(split[1]); 484 + } 485 + } 486 + 487 + return [ 488 + (left.getEntries()).length > 0 ? left : null, 489 + (right.getEntries()).length > 0 ? right : null, 490 + ]; 491 + } 492 + 493 + // The simple merge case where every key in the right tree is greater than every key in the left tree 494 + // (used primarily for deletes) 495 + async appendMerge(toMerge: MST): Promise<MST> { 496 + if ((await this.getLayer()) !== (await toMerge.getLayer())) { 497 + throw new Error( 498 + "Trying to merge two nodes from different layers of the MST", 499 + ); 500 + } 501 + const thisEntries = this.getEntries(); 502 + const toMergeEntries = toMerge.getEntries(); 503 + const lastInLeft = thisEntries[thisEntries.length - 1]; 504 + const firstInRight = toMergeEntries[0]; 505 + if (lastInLeft?.isTree() && firstInRight?.isTree()) { 506 + const merged = await lastInLeft.appendMerge(firstInRight); 507 + return this.newTree([ 508 + ...thisEntries.slice(0, thisEntries.length - 1), 509 + merged, 510 + ...toMergeEntries.slice(1), 511 + ]); 512 + } else { 513 + return this.newTree([...thisEntries, ...toMergeEntries]); 514 + } 515 + } 516 + 517 + // Create relatives 518 + // ------------------- 519 + 520 + async createChild(): Promise<MST> { 521 + const layer = await this.getLayer(); 522 + return MST.create(this.storage, [], { 523 + layer: layer - 1, 524 + }); 525 + } 526 + 527 + async createParent(): Promise<MST> { 528 + const layer = await this.getLayer(); 529 + const parent = await MST.create(this.storage, [this], { 530 + layer: layer + 1, 531 + }); 532 + parent.outdatedPointer = true; 533 + return parent; 534 + } 535 + 536 + // Finding insertion points 537 + // ------------------- 538 + 539 + // finds index of first leaf node that is greater than or equal to the value 540 + findGtOrEqualLeafIndex(key: string): number { 541 + const entries = this.getEntries(); 542 + const maybeIndex = entries.findIndex( 543 + (entry) => entry.isLeaf() && entry.key >= key, 544 + ); 545 + // if we can't find, we're on the end 546 + return maybeIndex >= 0 ? maybeIndex : entries.length; 547 + } 548 + 549 + // List operations (partial tree traversal) 550 + // ------------------- 551 + 552 + // @TODO write tests for these 553 + 554 + // Walk tree starting at key 555 + async *walkFrom(key: string): AsyncIterable<NodeEntry> { 556 + yield this; 557 + const index = this.findGtOrEqualLeafIndex(key); 558 + const entries = this.getEntries(); 559 + const found = entries[index]; 560 + if (found && found.isLeaf() && found.key === key) { 561 + yield found; 562 + } else { 563 + const prev = entries[index - 1]; 564 + if (prev) { 565 + if (prev.isLeaf() && prev.key === key) { 566 + yield prev; 567 + } else if (prev.isTree()) { 568 + yield* prev.walkFrom(key); 569 + } 570 + } 571 + } 572 + 573 + for (let i = index; i < entries.length; i++) { 574 + const entry = entries[i]; 575 + if (entry.isLeaf()) { 576 + yield entry; 577 + } else { 578 + yield* entry.walkFrom(key); 579 + } 580 + } 581 + } 582 + 583 + async *walkLeavesFrom(key: string): AsyncIterable<Leaf> { 584 + for await (const node of this.walkFrom(key)) { 585 + if (node.isLeaf()) { 586 + yield node; 587 + } 588 + } 589 + } 590 + 591 + async list( 592 + count = Number.MAX_SAFE_INTEGER, 593 + after?: string, 594 + before?: string, 595 + ): Promise<Leaf[]> { 596 + const vals: Leaf[] = []; 597 + for await (const leaf of this.walkLeavesFrom(after || "")) { 598 + if (leaf.key === after) continue; 599 + if (vals.length >= count) break; 600 + if (before && leaf.key >= before) break; 601 + vals.push(leaf); 602 + } 603 + return vals; 604 + } 605 + 606 + async listWithPrefix( 607 + prefix: string, 608 + count = Number.MAX_SAFE_INTEGER, 609 + ): Promise<Leaf[]> { 610 + const vals: Leaf[] = []; 611 + for await (const leaf of this.walkLeavesFrom(prefix)) { 612 + if (vals.length >= count || !leaf.key.startsWith(prefix)) break; 613 + vals.push(leaf); 614 + } 615 + return vals; 616 + } 617 + 618 + // Full tree traversal 619 + // ------------------- 620 + 621 + // Walk full tree & emit nodes, consumer can bail at any point by returning false 622 + async *walk(): AsyncIterable<NodeEntry> { 623 + yield this; 624 + const entries = this.getEntries(); 625 + for (const entry of entries) { 626 + if (entry.isTree()) { 627 + for await (const e of entry.walk()) { 628 + yield e; 629 + } 630 + } else { 631 + yield entry; 632 + } 633 + } 634 + } 635 + 636 + // Walk full tree & emit nodes, consumer can bail at any point by returning false 637 + async paths(): Promise<NodeEntry[][]> { 638 + const entries = this.getEntries(); 639 + let paths: NodeEntry[][] = []; 640 + for (const entry of entries) { 641 + if (entry.isLeaf()) { 642 + paths.push([entry]); 643 + } 644 + if (entry.isTree()) { 645 + const subPaths = await entry.paths(); 646 + paths = [...paths, ...subPaths.map((p) => [entry, ...p])]; 647 + } 648 + } 649 + return paths; 650 + } 651 + 652 + // Walks tree & returns all nodes 653 + async allNodes(): Promise<NodeEntry[]> { 654 + const nodes: NodeEntry[] = []; 655 + for await (const entry of this.walk()) { 656 + nodes.push(entry); 657 + } 658 + return nodes; 659 + } 660 + 661 + // Walks tree & returns all cids 662 + async allCids(): Promise<CidSet> { 663 + const cids = new CidSet(); 664 + const entries = this.getEntries(); 665 + for (const entry of entries) { 666 + if (entry.isLeaf()) { 667 + cids.add(entry.value); 668 + } else { 669 + const subtreeCids = await entry.allCids(); 670 + cids.addSet(subtreeCids); 671 + } 672 + } 673 + cids.add(await this.getPointer()); 674 + return cids; 675 + } 676 + 677 + // Walks tree & returns all leaves 678 + async leaves() { 679 + const leaves: Leaf[] = []; 680 + for await (const entry of this.walk()) { 681 + if (entry.isLeaf()) leaves.push(entry); 682 + } 683 + return leaves; 684 + } 685 + 686 + // Returns total leaf count 687 + async leafCount(): Promise<number> { 688 + const leaves = await this.leaves(); 689 + return leaves.length; 690 + } 691 + 692 + // Reachable tree traversal 693 + // ------------------- 694 + 695 + // Walk reachable branches of tree & emit nodes, consumer can bail at any point by returning false 696 + async *walkReachable(): AsyncIterable<NodeEntry> { 697 + yield this; 698 + const entries = this.getEntries(); 699 + for (const entry of entries) { 700 + if (entry.isTree()) { 701 + try { 702 + for await (const e of entry.walkReachable()) { 703 + yield e; 704 + } 705 + } catch (err) { 706 + if (err instanceof MissingBlockError) { 707 + continue; 708 + } else { 709 + throw err; 710 + } 711 + } 712 + } else { 713 + yield entry; 714 + } 715 + } 716 + } 717 + 718 + async reachableLeaves(): Promise<Leaf[]> { 719 + const leaves: Leaf[] = []; 720 + for await (const entry of this.walkReachable()) { 721 + if (entry.isLeaf()) leaves.push(entry); 722 + } 723 + return leaves; 724 + } 725 + 726 + // Sync Protocol 727 + 728 + async *carBlockStream(): AsyncIterable<CarBlock> { 729 + const leaves = new CidSet(); 730 + let toFetch = new CidSet(); 731 + toFetch.add(await this.getPointer()); 732 + while (toFetch.size() > 0) { 733 + const nextLayer = new CidSet(); 734 + const fetched = this.storage.getBlocks(toFetch.toList()); 735 + if (fetched.missing.length > 0) { 736 + throw new MissingBlocksError("mst node", fetched.missing); 737 + } 738 + for (const cid of toFetch.toList()) { 739 + const found = parse.getAndParseByDef( 740 + fetched.blocks, 741 + cid, 742 + nodeDataDef, 743 + ); 744 + yield { cid, bytes: found.bytes }; 745 + const entries = util.deserializeNodeData(this.storage, found.obj); 746 + 747 + for (const entry of entries) { 748 + if (entry.isLeaf()) { 749 + leaves.add(entry.value); 750 + } else { 751 + nextLayer.add(await entry.getPointer()); 752 + } 753 + } 754 + } 755 + toFetch = nextLayer; 756 + } 757 + const leafData = this.storage.getBlocks(leaves.toList()); 758 + if (leafData.missing.length > 0) { 759 + throw new MissingBlocksError("mst leaf", leafData.missing); 760 + } 761 + 762 + for (const leaf of leafData.blocks.entries()) { 763 + yield leaf; 764 + } 765 + } 766 + 767 + async cidsForPath(key: string): Promise<CID[]> { 768 + const cids: CID[] = [await this.getPointer()]; 769 + const index = this.findGtOrEqualLeafIndex(key); 770 + const found = this.atIndex(index); 771 + if (found && found.isLeaf() && found.key === key) { 772 + return [...cids, found.value]; 773 + } 774 + const prev = this.atIndex(index - 1); 775 + if (prev && prev.isTree()) { 776 + return [...cids, ...(await prev.cidsForPath(key))]; 777 + } 778 + return cids; 779 + } 780 + 781 + // A covering proof is all MST nodes (leaves excluded) needed to prove the value of a given leaf 782 + // and its siblings to its immediate right and left (if applicable) 783 + // We simply find the immediately preceeding node and then walk from that node until we reach the 784 + // first key that is greater than the requested key (the right sibling) 785 + async getCoveringProof(key: string): Promise<BlockMap> { 786 + const [self, left, right] = await Promise.all([ 787 + this.proofForKey(key), 788 + this.proofForLeftSib(key), 789 + this.proofForRightSib(key), 790 + ]); 791 + return self.addMap(left).addMap(right); 792 + } 793 + 794 + async proofForKey(key: string): Promise<BlockMap> { 795 + const index = this.findGtOrEqualLeafIndex(key); 796 + const found = this.atIndex(index); 797 + let blocks: BlockMap; 798 + if (found && found.isLeaf() && found.key === key) { 799 + blocks = new BlockMap(); 800 + } else { 801 + const prev = this.atIndex(index - 1); 802 + if (!prev || prev.isLeaf()) { 803 + return new BlockMap(); 804 + } else { 805 + blocks = await prev.proofForKey(key); 806 + } 807 + } 808 + const serialized = await this.serialize(); 809 + return blocks.set(serialized.cid, serialized.bytes); 810 + } 811 + 812 + async proofForLeftSib(key: string): Promise<BlockMap> { 813 + const index = this.findGtOrEqualLeafIndex(key); 814 + const prev = this.atIndex(index - 1); 815 + let blocks: BlockMap; 816 + if (!prev || prev.isLeaf()) { 817 + blocks = new BlockMap(); 818 + } else { 819 + blocks = await prev.proofForLeftSib(key); 820 + } 821 + const serialized = await this.serialize(); 822 + return blocks.set(serialized.cid, serialized.bytes); 823 + } 824 + 825 + async proofForRightSib(key: string): Promise<BlockMap> { 826 + const index = this.findGtOrEqualLeafIndex(key); 827 + let found = this.atIndex(index); 828 + if (!found) { 829 + found = this.atIndex(index - 1); 830 + } 831 + let blocks: BlockMap; 832 + if (!found) { 833 + // shouldn't ever hit, null case 834 + blocks = new BlockMap(); 835 + } else if (found.isTree()) { 836 + blocks = await found.proofForRightSib(key); 837 + // recurse down 838 + } else { 839 + const node = found.key === key 840 + ? this.atIndex(index + 1) 841 + : this.atIndex(index - 1); 842 + if (!node || node.isLeaf()) { 843 + blocks = new BlockMap(); 844 + } else { 845 + blocks = await node.proofForRightSib(key); 846 + } 847 + } 848 + const serialized = await this.serialize(); 849 + return blocks.set(serialized.cid, serialized.bytes); 850 + } 851 + 852 + // Matching Leaf interface 853 + // ------------------- 854 + 855 + isTree(): this is MST { 856 + return true; 857 + } 858 + 859 + isLeaf(): this is Leaf { 860 + return false; 861 + } 862 + 863 + async equals(other: NodeEntry): Promise<boolean> { 864 + if (other.isLeaf()) return false; 865 + const thisPointer = await this.getPointer(); 866 + const otherPointer = await other.getPointer(); 867 + return thisPointer.equals(otherPointer); 868 + } 869 + } 870 + 871 + export class Leaf { 872 + constructor( 873 + public key: string, 874 + public value: CID, 875 + ) {} 876 + 877 + isTree(): this is MST { 878 + return false; 879 + } 880 + 881 + isLeaf(): this is Leaf { 882 + return true; 883 + } 884 + 885 + equals(entry: NodeEntry): boolean { 886 + if (entry.isLeaf()) { 887 + return this.key === entry.key && this.value.equals(entry.value); 888 + } else { 889 + return false; 890 + } 891 + } 892 + }
+151
repo/mst/util.ts
··· 1 + import type { CID } from "multiformats"; 2 + import * as bytes from "@atp/bytes"; 3 + import { cidForCbor } from "@atp/common"; 4 + import { sha256 } from "@atp/crypto"; 5 + import type { ReadableBlockstore } from "../storage/index.ts"; 6 + import { 7 + Leaf, 8 + MST, 9 + type MstOpts, 10 + type NodeData, 11 + type NodeEntry, 12 + } from "./mst.ts"; 13 + 14 + export const leadingZerosOnHash = (key: string | Uint8Array) => { 15 + const hash = sha256(key); 16 + let leadingZeros = 0; 17 + for (let i = 0; i < hash.length; i++) { 18 + const byte = hash[i]; 19 + if (byte < 64) leadingZeros++; 20 + if (byte < 16) leadingZeros++; 21 + if (byte < 4) leadingZeros++; 22 + if (byte === 0) { 23 + leadingZeros++; 24 + } else { 25 + break; 26 + } 27 + } 28 + return leadingZeros; 29 + }; 30 + 31 + export const layerForEntries = ( 32 + entries: NodeEntry[], 33 + ): number | null => { 34 + const firstLeaf = entries.find((entry) => entry.isLeaf()); 35 + if (!firstLeaf || firstLeaf.isTree()) return null; 36 + return leadingZerosOnHash(firstLeaf.key); 37 + }; 38 + 39 + export const deserializeNodeData = ( 40 + storage: ReadableBlockstore, 41 + data: NodeData, 42 + opts?: Partial<MstOpts>, 43 + ): NodeEntry[] => { 44 + const { layer } = opts || {}; 45 + const entries: NodeEntry[] = []; 46 + if (data.l !== null) { 47 + entries.push( 48 + MST.load(storage, data.l as CID, { 49 + layer: layer ? layer - 1 : undefined, 50 + }), 51 + ); 52 + } 53 + let lastKey = ""; 54 + for (const entry of data.e) { 55 + const keyStr = bytes.toString(entry.k as Uint8Array, "ascii"); 56 + const key = lastKey.slice(0, entry.p) + keyStr; 57 + ensureValidMstKey(key); 58 + entries.push(new Leaf(key, entry.v as CID)); 59 + lastKey = key; 60 + if (entry.t !== null) { 61 + entries.push( 62 + MST.load(storage, entry.t as CID, { 63 + layer: layer ? layer - 1 : undefined, 64 + }), 65 + ); 66 + } 67 + } 68 + return entries; 69 + }; 70 + 71 + export const serializeNodeData = (entries: NodeEntry[]): NodeData => { 72 + const data: NodeData = { 73 + l: null, 74 + e: [], 75 + }; 76 + let i = 0; 77 + if (entries[0]?.isTree()) { 78 + i++; 79 + data.l = entries[0].pointer; 80 + } 81 + let lastKey = ""; 82 + while (i < entries.length) { 83 + const leaf = entries[i]; 84 + const next = entries[i + 1]; 85 + if (!leaf.isLeaf()) { 86 + throw new Error("Not a valid node: two subtrees next to each other"); 87 + } 88 + i++; 89 + let subtree: CID | null = null; 90 + if (next?.isTree()) { 91 + subtree = next.pointer; 92 + i++; 93 + } 94 + ensureValidMstKey(leaf.key); 95 + const prefixLen = countPrefixLen(lastKey, leaf.key); 96 + data.e.push({ 97 + p: prefixLen, 98 + k: bytes.fromString(leaf.key.slice(prefixLen), "ascii"), 99 + v: leaf.value, 100 + t: subtree, 101 + }); 102 + 103 + lastKey = leaf.key; 104 + } 105 + return data; 106 + }; 107 + 108 + export const countPrefixLen = (a: string, b: string): number => { 109 + let i; 110 + for (i = 0; i < a.length; i++) { 111 + if (a[i] !== b[i]) { 112 + break; 113 + } 114 + } 115 + return i; 116 + }; 117 + 118 + export const cidForEntries = (entries: NodeEntry[]): Promise<CID> => { 119 + const data = serializeNodeData(entries); 120 + return cidForCbor(data); 121 + }; 122 + 123 + export const isValidMstKey = (str: string): boolean => { 124 + const split = str.split("/"); 125 + return ( 126 + str.length <= 1024 && 127 + split.length === 2 && 128 + split[0].length > 0 && 129 + split[1].length > 0 && 130 + isValidChars(split[0]) && 131 + isValidChars(split[1]) 132 + ); 133 + }; 134 + 135 + export const validCharsRegex = /^[a-zA-Z0-9_~\-:.]*$/; 136 + 137 + export const isValidChars = (str: string): boolean => { 138 + return str.match(validCharsRegex) !== null; 139 + }; 140 + 141 + export const ensureValidMstKey = (str: string) => { 142 + if (!isValidMstKey(str)) { 143 + throw new InvalidMstKeyError(str); 144 + } 145 + }; 146 + 147 + export class InvalidMstKeyError extends Error { 148 + constructor(public key: string) { 149 + super(`Not a valid MST key: ${key}`); 150 + } 151 + }
+118
repo/mst/walker.ts
··· 1 + import type { MST, NodeEntry } from "./mst.ts"; 2 + 3 + type WalkerStatusDone = { 4 + done: true; 5 + }; 6 + 7 + type WalkerStatusProgress = { 8 + done: false; 9 + curr: NodeEntry; 10 + walking: MST | null; // walking set to null if `curr` is the root of the tree 11 + index: number; 12 + }; 13 + 14 + type WalkerStatus = WalkerStatusDone | WalkerStatusProgress; 15 + 16 + export class MstWalker { 17 + stack: WalkerStatus[] = []; 18 + status: WalkerStatus; 19 + 20 + constructor(public root: MST) { 21 + this.status = { 22 + done: false, 23 + curr: root, 24 + walking: null, 25 + index: 0, 26 + }; 27 + } 28 + 29 + // return the current layer of the node you are walking 30 + layer(): number { 31 + if (this.status.done) { 32 + throw new Error("Walk is done"); 33 + } 34 + if (this.status.walking) { 35 + return this.status.walking.layer ?? 0; 36 + } 37 + // if curr is the root of the tree, add 1 38 + if (this.status.curr.isTree()) { 39 + return (this.status.curr.layer ?? 0) + 1; 40 + } 41 + throw new Error("Could not identify layer of walk"); 42 + } 43 + 44 + // move to the next node in the subtree, skipping over the subtree 45 + async stepOver(): Promise<void> { 46 + if (this.status.done) return; 47 + // if stepping over the root of the node, we're done 48 + if (this.status.walking === null) { 49 + this.status = { done: true }; 50 + return; 51 + } 52 + const entries = this.status.walking.getEntries(); 53 + this.status.index++; 54 + const next = entries[this.status.index]; 55 + if (!next) { 56 + const popped = this.stack.pop(); 57 + if (!popped) { 58 + this.status = { done: true }; 59 + return; 60 + } else { 61 + this.status = popped; 62 + await this.stepOver(); 63 + return; 64 + } 65 + } else { 66 + this.status.curr = next; 67 + } 68 + } 69 + 70 + // step into a subtree, throws if currently pointed at a leaf 71 + stepInto(): void { 72 + if (this.status.done) return; 73 + // edge case for very start of walk 74 + if (this.status.walking === null) { 75 + if (!this.status.curr.isTree()) { 76 + throw new Error("The root of the tree cannot be a leaf"); 77 + } 78 + const next = this.status.curr.atIndex(0); 79 + if (!next) { 80 + this.status = { done: true }; 81 + } else { 82 + this.status = { 83 + done: false, 84 + walking: this.status.curr, 85 + curr: next, 86 + index: 0, 87 + }; 88 + } 89 + return; 90 + } 91 + if (!this.status.curr.isTree()) { 92 + throw new Error("No tree at pointer, cannot step into"); 93 + } 94 + 95 + const next = this.status.curr.atIndex(0); 96 + if (!next) { 97 + throw new Error( 98 + "Tried to step into a node with 0 entries which is invalid", 99 + ); 100 + } 101 + 102 + this.stack.push({ ...this.status }); 103 + this.status.walking = this.status.curr; 104 + this.status.curr = next; 105 + this.status.index = 0; 106 + } 107 + 108 + // advance the pointer to the next node in the tree, 109 + // stepping into the current node if necessary 110 + async advance(): Promise<void> { 111 + if (this.status.done) return; 112 + if (this.status.curr.isLeaf()) { 113 + await this.stepOver(); 114 + } else { 115 + this.stepInto(); 116 + } 117 + } 118 + }
+44
repo/parse.ts
··· 1 + import { CID } from "multiformats/cid"; 2 + import { cborDecode, check } from "@atp/common"; 3 + import { RepoRecord } from "@atp/lexicon"; 4 + import { BlockMap } from "./block-map.ts"; 5 + import { MissingBlockError, UnexpectedObjectError } from "./error.ts"; 6 + import { cborToLexRecord } from "./util.ts"; 7 + 8 + export const getAndParseRecord = ( 9 + blocks: BlockMap, 10 + cid: CID, 11 + ): { record: RepoRecord; bytes: Uint8Array } => { 12 + const bytes = blocks.get(cid); 13 + if (!bytes) { 14 + throw new MissingBlockError(cid, "record"); 15 + } 16 + const record = cborToLexRecord(bytes); 17 + return { record, bytes }; 18 + }; 19 + 20 + export const getAndParseByDef = <T>( 21 + blocks: BlockMap, 22 + cid: CID, 23 + def: check.Def<T>, 24 + ): { obj: T; bytes: Uint8Array } => { 25 + const bytes = blocks.get(cid); 26 + if (!bytes) { 27 + throw new MissingBlockError(cid, def.name); 28 + } 29 + return parseObjByDef(bytes, cid, def); 30 + }; 31 + 32 + export const parseObjByDef = <T>( 33 + bytes: Uint8Array, 34 + cid: CID, 35 + def: check.Def<T>, 36 + ): { obj: T; bytes: Uint8Array } => { 37 + const obj = cborDecode(bytes); 38 + const res = def.schema.safeParse(obj); 39 + if (res.success) { 40 + return { obj: res.data, bytes }; 41 + } else { 42 + throw new UnexpectedObjectError(cid, def.name); 43 + } 44 + };
+87
repo/readable-repo.ts
··· 1 + import { CID } from "multiformats/cid"; 2 + import { RepoRecord } from "@atp/lexicon"; 3 + import { MissingBlocksError } from "./error.ts"; 4 + import log from "./logger.ts"; 5 + import { MST } from "./mst/index.ts"; 6 + import * as parse from "./parse.ts"; 7 + import { ReadableBlockstore } from "./storage/index.ts"; 8 + import { Commit, def, RepoContents } from "./types.ts"; 9 + import * as util from "./util.ts"; 10 + 11 + type Params = { 12 + storage: ReadableBlockstore; 13 + data: MST; 14 + commit: Commit; 15 + cid: CID; 16 + }; 17 + 18 + export class ReadableRepo { 19 + storage: ReadableBlockstore; 20 + data: MST; 21 + commit: Commit; 22 + cid: CID; 23 + 24 + constructor(params: Params) { 25 + this.storage = params.storage; 26 + this.data = params.data; 27 + this.commit = params.commit; 28 + this.cid = params.cid; 29 + } 30 + 31 + static load(storage: ReadableBlockstore, commitCid: CID) { 32 + const commit = storage.readObj(commitCid, def.versionedCommit); 33 + const data = MST.load(storage, (commit as { data: CID }).data); 34 + log.info("loaded repo for", { did: commit.did }); 35 + return new ReadableRepo({ 36 + storage, 37 + data, 38 + commit: util.ensureV3Commit(commit), 39 + cid: commitCid, 40 + }); 41 + } 42 + 43 + get did(): string { 44 + return this.commit.did; 45 + } 46 + 47 + get version(): number { 48 + return this.commit.version; 49 + } 50 + 51 + async *walkRecords(from?: string): AsyncIterable<{ 52 + collection: string; 53 + rkey: string; 54 + cid: CID; 55 + record: RepoRecord; 56 + }> { 57 + for await (const leaf of this.data.walkLeavesFrom(from ?? "")) { 58 + const { collection, rkey } = util.parseDataKey(leaf.key); 59 + const record = await this.storage.readRecord(leaf.value); 60 + yield { collection, rkey, cid: leaf.value, record }; 61 + } 62 + } 63 + 64 + async getRecord(collection: string, rkey: string): Promise<unknown | null> { 65 + const dataKey = collection + "/" + rkey; 66 + const cid = await this.data.get(dataKey); 67 + if (!cid) return null; 68 + return this.storage.readObj(cid, def.unknown); 69 + } 70 + 71 + async getContents(): Promise<RepoContents> { 72 + const entries = await this.data.list(); 73 + const cids = entries.map((e: { key: string; value: CID }) => e.value); 74 + const { blocks, missing } = await this.storage.getBlocks(cids); 75 + if (missing.length > 0) { 76 + throw new MissingBlocksError("getContents record", missing); 77 + } 78 + const contents: RepoContents = {}; 79 + for (const entry of entries) { 80 + const { collection, rkey } = util.parseDataKey(entry.key); 81 + contents[collection] ??= {}; 82 + const parsed = await parse.getAndParseRecord(blocks, entry.value); 83 + contents[collection][rkey] = parsed.record; 84 + } 85 + return contents; 86 + } 87 + }
+233
repo/repo.ts
··· 1 + import { CID } from "multiformats/cid"; 2 + import { dataToCborBlock, TID } from "@atp/common"; 3 + import * as crypto from "@atp/crypto"; 4 + import { lexToIpld } from "@atp/lexicon"; 5 + import { BlockMap } from "./block-map.ts"; 6 + import { CidSet } from "./cid-set.ts"; 7 + import { DataDiff } from "./data-diff.ts"; 8 + import log from "./logger.ts"; 9 + import { MST } from "./mst/index.ts"; 10 + import { ReadableRepo } from "./readable-repo.ts"; 11 + import { RepoStorage } from "./storage/index.ts"; 12 + import { 13 + Commit, 14 + CommitData, 15 + def, 16 + RecordCreateOp, 17 + RecordWriteOp, 18 + WriteOpAction, 19 + } from "./types.ts"; 20 + import * as util from "./util.ts"; 21 + 22 + type Params = { 23 + storage: RepoStorage; 24 + data: MST; 25 + commit: Commit; 26 + cid: CID; 27 + }; 28 + 29 + export class Repo extends ReadableRepo { 30 + override storage: RepoStorage; 31 + 32 + constructor(params: Params) { 33 + super(params); 34 + this.storage = params.storage; 35 + } 36 + 37 + static async formatInitCommit( 38 + storage: RepoStorage, 39 + did: string, 40 + keypair: crypto.Keypair, 41 + initialWrites: RecordCreateOp[] = [], 42 + revOverride?: string, 43 + ): Promise<CommitData> { 44 + const newBlocks = new BlockMap(); 45 + 46 + let data = await MST.create(storage); 47 + for (const record of initialWrites) { 48 + const cid = await newBlocks.add(record.record); 49 + const dataKey = util.formatDataKey(record.collection, record.rkey); 50 + data = await data.add(dataKey, cid); 51 + } 52 + const dataCid = await data.getPointer(); 53 + const diff = await DataDiff.of(data, null); 54 + newBlocks.addMap(diff.newMstBlocks); 55 + 56 + const rev = revOverride ?? TID.nextStr(); 57 + const commit = await util.signCommit( 58 + { 59 + did, 60 + version: 3, 61 + rev, 62 + prev: null, // added for backwards compatibility with v2 63 + data: dataCid, 64 + }, 65 + keypair, 66 + ); 67 + const commitCid = await newBlocks.add(commit); 68 + return { 69 + cid: commitCid, 70 + rev, 71 + since: null, 72 + prev: null, 73 + newBlocks, 74 + relevantBlocks: newBlocks, 75 + removedCids: diff.removedCids, 76 + }; 77 + } 78 + 79 + static async createFromCommit( 80 + storage: RepoStorage, 81 + commit: CommitData, 82 + ): Promise<Repo> { 83 + await storage.applyCommit(commit); 84 + return Repo.load(storage, commit.cid); 85 + } 86 + 87 + static async create( 88 + storage: RepoStorage, 89 + did: string, 90 + keypair: crypto.Keypair, 91 + initialWrites: RecordCreateOp[] = [], 92 + ): Promise<Repo> { 93 + const commit = await Repo.formatInitCommit( 94 + storage, 95 + did, 96 + keypair, 97 + initialWrites, 98 + ); 99 + return Repo.createFromCommit(storage, commit); 100 + } 101 + 102 + static override load(storage: RepoStorage, cid?: CID) { 103 + const commitCid = cid || (storage.getRoot()); 104 + if (!commitCid) { 105 + throw new Error("No cid provided and none in storage"); 106 + } 107 + const commit = storage.readObj(commitCid, def.versionedCommit); 108 + const data = MST.load(storage, (commit as { data: CID }).data); 109 + log.info("loaded repo for", { did: commit.did }); 110 + return new Repo({ 111 + storage, 112 + data, 113 + commit: util.ensureV3Commit(commit), 114 + cid: commitCid, 115 + }); 116 + } 117 + 118 + async formatCommit( 119 + toWrite: RecordWriteOp | RecordWriteOp[], 120 + keypair: crypto.Keypair, 121 + ): Promise<CommitData> { 122 + const writes = Array.isArray(toWrite) ? toWrite : [toWrite]; 123 + const leaves = new BlockMap(); 124 + 125 + let data = this.data; 126 + for (const write of writes) { 127 + if (write.action === WriteOpAction.Create) { 128 + const cid = await leaves.add(write.record); 129 + const dataKey = write.collection + "/" + write.rkey; 130 + data = await data.add(dataKey, cid); 131 + } else if (write.action === WriteOpAction.Update) { 132 + const cid = await leaves.add(write.record); 133 + const dataKey = write.collection + "/" + write.rkey; 134 + data = await data.update(dataKey, cid); 135 + } else if (write.action === WriteOpAction.Delete) { 136 + const dataKey = write.collection + "/" + write.rkey; 137 + data = await data.delete(dataKey); 138 + } 139 + } 140 + 141 + const dataCid = await data.getPointer(); 142 + const diff = await DataDiff.of(data, this.data); 143 + const newBlocks = diff.newMstBlocks; 144 + const removedCids = diff.removedCids; 145 + 146 + const proofs = await Promise.all( 147 + writes.map((op) => 148 + data.getCoveringProof(util.formatDataKey(op.collection, op.rkey)) 149 + ), 150 + ); 151 + const relevantBlocks = new BlockMap(); 152 + for (const proof of proofs) relevantBlocks.addMap(proof); 153 + 154 + const addedLeaves = leaves.getMany(diff.newLeafCids.toList()); 155 + if (addedLeaves.missing.length > 0) { 156 + throw new Error(`Missing leaf blocks: ${addedLeaves.missing}`); 157 + } 158 + newBlocks.addMap(addedLeaves.blocks); 159 + relevantBlocks.addMap(addedLeaves.blocks); 160 + 161 + const rev = TID.nextStr(this.commit.rev); 162 + const commit = await util.signCommit( 163 + { 164 + did: this.did, 165 + version: 3, 166 + rev, 167 + prev: null, // added for backwards compatibility with v2 168 + data: dataCid, 169 + }, 170 + keypair, 171 + ); 172 + const commitBlock = await dataToCborBlock(lexToIpld(commit)); 173 + if (!commitBlock.cid.equals(this.cid)) { 174 + newBlocks.set(commitBlock.cid, commitBlock.bytes); 175 + relevantBlocks.set(commitBlock.cid, commitBlock.bytes); 176 + removedCids.add(this.cid); 177 + } 178 + 179 + return { 180 + cid: commitBlock.cid, 181 + rev, 182 + since: this.commit.rev, 183 + prev: this.cid, 184 + newBlocks, 185 + relevantBlocks, 186 + removedCids, 187 + }; 188 + } 189 + 190 + async applyCommit(commitData: CommitData): Promise<Repo> { 191 + await this.storage.applyCommit(commitData); 192 + return Repo.load(this.storage, commitData.cid); 193 + } 194 + 195 + async applyWrites( 196 + toWrite: RecordWriteOp | RecordWriteOp[], 197 + keypair: crypto.Keypair, 198 + ): Promise<Repo> { 199 + const commit = await this.formatCommit(toWrite, keypair); 200 + return this.applyCommit(commit); 201 + } 202 + 203 + async formatResignCommit(rev: string, keypair: crypto.Keypair) { 204 + const commit = await util.signCommit( 205 + { 206 + did: this.did, 207 + version: 3, 208 + rev, 209 + prev: null, // added for backwards compatibility with v2 210 + data: this.commit.data, 211 + }, 212 + keypair, 213 + ); 214 + const newBlocks = new BlockMap(); 215 + const commitCid = await newBlocks.add(commit); 216 + return { 217 + cid: commitCid, 218 + rev, 219 + since: null, 220 + prev: null, 221 + newBlocks, 222 + relevantBlocks: newBlocks, 223 + removedCids: new CidSet([this.cid]), 224 + }; 225 + } 226 + 227 + async resignCommit(rev: string, keypair: crypto.Keypair) { 228 + const formatted = await this.formatResignCommit(rev, keypair); 229 + return this.applyCommit(formatted); 230 + } 231 + } 232 + 233 + export default Repo;
+4
repo/storage/index.ts
··· 1 + export * from "./readable-blockstore.ts"; 2 + export * from "./memory-blockstore.ts"; 3 + export * from "./sync-storage.ts"; 4 + export * from "./types.ts";
+74
repo/storage/memory-blockstore.ts
··· 1 + import type { CID } from "multiformats/cid"; 2 + import { BlockMap } from "../block-map.ts"; 3 + import type { CommitData } from "../types.ts"; 4 + import { ReadableBlockstore } from "./readable-blockstore.ts"; 5 + import type { RepoStorage } from "./types.ts"; 6 + 7 + export class MemoryBlockstore extends ReadableBlockstore 8 + implements RepoStorage { 9 + blocks: BlockMap; 10 + root: CID | null = null; 11 + rev: string | null = null; 12 + 13 + constructor(blocks?: BlockMap) { 14 + super(); 15 + this.blocks = new BlockMap(); 16 + if (blocks) { 17 + this.blocks.addMap(blocks); 18 + } 19 + } 20 + 21 + getRoot(): CID | null { 22 + return this.root; 23 + } 24 + 25 + getBytes(cid: CID): Uint8Array | null { 26 + return this.blocks.get(cid) || null; 27 + } 28 + 29 + has(cid: CID): boolean { 30 + return this.blocks.has(cid); 31 + } 32 + 33 + getBlocks(cids: CID[]): { blocks: BlockMap; missing: CID[] } { 34 + return this.blocks.getMany(cids); 35 + } 36 + 37 + putBlock(cid: CID, block: Uint8Array): void { 38 + this.blocks.set(cid, block); 39 + } 40 + 41 + putMany(blocks: BlockMap): void { 42 + this.blocks.addMap(blocks); 43 + } 44 + 45 + updateRoot(cid: CID, rev: string): void { 46 + this.root = cid; 47 + this.rev = rev; 48 + } 49 + 50 + applyCommit(commit: CommitData): void { 51 + this.root = commit.cid; 52 + const rmCids = commit.removedCids.toList(); 53 + for (const cid of rmCids) { 54 + this.blocks.delete(cid); 55 + } 56 + commit.newBlocks.forEach((bytes, cid) => { 57 + this.blocks.set(cid, bytes); 58 + }); 59 + } 60 + 61 + sizeInBytes(): number { 62 + let total = 0; 63 + this.blocks.forEach((bytes) => { 64 + total += bytes.byteLength; 65 + }); 66 + return total; 67 + } 68 + 69 + destroy(): void { 70 + this.blocks.clear(); 71 + } 72 + } 73 + 74 + export default MemoryBlockstore;
+58
repo/storage/readable-blockstore.ts
··· 1 + import type { CID } from "multiformats/cid"; 2 + import type { check } from "@atp/common"; 3 + import type { RepoRecord } from "@atp/lexicon"; 4 + import type { BlockMap } from "../block-map.ts"; 5 + import { MissingBlockError } from "../error.ts"; 6 + import * as parse from "../parse.ts"; 7 + import { cborToLexRecord } from "../util.ts"; 8 + 9 + export abstract class ReadableBlockstore { 10 + abstract getBytes(cid: CID): Uint8Array | null; 11 + abstract has(cid: CID): boolean; 12 + abstract getBlocks( 13 + cids: CID[], 14 + ): { blocks: BlockMap; missing: CID[] }; 15 + 16 + attemptRead<T>( 17 + cid: CID, 18 + def: check.Def<T>, 19 + ): { obj: T; bytes: Uint8Array } | null { 20 + const bytes = this.getBytes(cid); 21 + if (!bytes) return null; 22 + return parse.parseObjByDef(bytes, cid, def); 23 + } 24 + 25 + readObjAndBytes<T>( 26 + cid: CID, 27 + def: check.Def<T>, 28 + ): { obj: T; bytes: Uint8Array } { 29 + const read = this.attemptRead(cid, def); 30 + if (!read) { 31 + throw new MissingBlockError(cid, def.name); 32 + } 33 + return read; 34 + } 35 + 36 + readObj<T>(cid: CID, def: check.Def<T>): T { 37 + const obj = this.readObjAndBytes(cid, def); 38 + return obj.obj; 39 + } 40 + 41 + attemptReadRecord(cid: CID): RepoRecord | null { 42 + try { 43 + return this.readRecord(cid); 44 + } catch { 45 + return null; 46 + } 47 + } 48 + 49 + readRecord(cid: CID): RepoRecord { 50 + const bytes = this.getBytes(cid); 51 + if (!bytes) { 52 + throw new MissingBlockError(cid); 53 + } 54 + return cborToLexRecord(bytes); 55 + } 56 + } 57 + 58 + export default ReadableBlockstore;
+35
repo/storage/sync-storage.ts
··· 1 + import type { CID } from "multiformats/cid"; 2 + import type { BlockMap } from "../block-map.ts"; 3 + import { ReadableBlockstore } from "./readable-blockstore.ts"; 4 + 5 + export class SyncStorage extends ReadableBlockstore { 6 + constructor( 7 + public staged: ReadableBlockstore, 8 + public saved: ReadableBlockstore, 9 + ) { 10 + super(); 11 + } 12 + 13 + getBytes(cid: CID): Uint8Array | null { 14 + const got = this.staged.getBytes(cid); 15 + if (got) return got; 16 + return this.saved.getBytes(cid); 17 + } 18 + 19 + getBlocks(cids: CID[]): { blocks: BlockMap; missing: CID[] } { 20 + const fromStaged = this.staged.getBlocks(cids); 21 + const fromSaved = this.saved.getBlocks(fromStaged.missing); 22 + const blocks = fromStaged.blocks; 23 + blocks.addMap(fromSaved.blocks); 24 + return { 25 + blocks, 26 + missing: fromSaved.missing, 27 + }; 28 + } 29 + 30 + has(cid: CID): boolean { 31 + return (this.staged.has(cid)) || (this.saved.has(cid)); 32 + } 33 + } 34 + 35 + export default SyncStorage;
+46
repo/storage/types.ts
··· 1 + import type { CID } from "multiformats/cid"; 2 + import type { check } from "@atp/common"; 3 + import type { RepoRecord } from "@atp/lexicon"; 4 + import type { BlockMap } from "../block-map.ts"; 5 + import type { CommitData } from "../types.ts"; 6 + 7 + export interface RepoStorage { 8 + // Writable 9 + getRoot(): CID | null; 10 + putBlock(cid: CID, block: Uint8Array, rev: string): void; 11 + putMany(blocks: BlockMap, rev: string): void; 12 + updateRoot(cid: CID, rev: string): void; 13 + applyCommit(commit: CommitData): void; 14 + 15 + // Readable 16 + getBytes(cid: CID): Uint8Array | null; 17 + has(cid: CID): boolean; 18 + getBlocks(cids: CID[]): { blocks: BlockMap; missing: CID[] }; 19 + attemptRead<T>( 20 + cid: CID, 21 + def: check.Def<T>, 22 + ): { obj: T; bytes: Uint8Array } | null; 23 + readObjAndBytes<T>( 24 + cid: CID, 25 + def: check.Def<T>, 26 + ): { obj: T; bytes: Uint8Array }; 27 + readObj<T>(cid: CID, def: check.Def<T>): T; 28 + attemptReadRecord(cid: CID): RepoRecord | null; 29 + readRecord(cid: CID): RepoRecord; 30 + } 31 + 32 + export interface BlobStore { 33 + putTemp(bytes: Uint8Array | ReadableStream): Promise<string>; 34 + makePermanent(key: string, cid: CID): Promise<void>; 35 + putPermanent(cid: CID, bytes: Uint8Array | ReadableStream): Promise<void>; 36 + quarantine(cid: CID): Promise<void>; 37 + unquarantine(cid: CID): Promise<void>; 38 + getBytes(cid: CID): Uint8Array; 39 + getStream(cid: CID): Promise<ReadableStream>; 40 + hasTemp(key: string): Promise<boolean>; 41 + hasStored(cid: CID): Promise<boolean>; 42 + delete(cid: CID): Promise<void>; 43 + deleteMany(cid: CID[]): Promise<void>; 44 + } 45 + 46 + export class BlobNotFoundError extends Error {}
+207
repo/sync/consumer.ts
··· 1 + import type { CID } from "multiformats/cid"; 2 + import type { BlockMap } from "../block-map.ts"; 3 + import { readCarWithRoot } from "../car.ts"; 4 + import { DataDiff } from "../data-diff.ts"; 5 + import { MST } from "../mst/index.ts"; 6 + import { ReadableRepo } from "../readable-repo.ts"; 7 + import { 8 + MemoryBlockstore, 9 + type ReadableBlockstore, 10 + SyncStorage, 11 + } from "../storage/index.ts"; 12 + import { 13 + def, 14 + type RecordCidClaim, 15 + type RecordClaim, 16 + type VerifiedDiff, 17 + type VerifiedRepo, 18 + } from "../types.ts"; 19 + import * as util from "../util.ts"; 20 + 21 + export const verifyRepoCar = async ( 22 + carBytes: Uint8Array, 23 + did?: string, 24 + signingKey?: string, 25 + ): Promise<VerifiedRepo> => { 26 + const car = await readCarWithRoot(carBytes); 27 + return verifyRepo(car.blocks, car.root, did, signingKey); 28 + }; 29 + 30 + export const verifyRepo = async ( 31 + blocks: BlockMap, 32 + head: CID, 33 + did?: string, 34 + signingKey?: string, 35 + opts?: { ensureLeaves?: boolean }, 36 + ): Promise<VerifiedRepo> => { 37 + const diff = await verifyDiff(null, blocks, head, did, signingKey, opts); 38 + const creates = util.ensureCreates(diff.writes); 39 + return { 40 + creates, 41 + commit: diff.commit, 42 + }; 43 + }; 44 + 45 + export const verifyDiffCar = async ( 46 + repo: ReadableRepo | null, 47 + carBytes: Uint8Array, 48 + did?: string, 49 + signingKey?: string, 50 + opts?: { ensureLeaves?: boolean }, 51 + ): Promise<VerifiedDiff> => { 52 + const car = await readCarWithRoot(carBytes); 53 + return verifyDiff(repo, car.blocks, car.root, did, signingKey, opts); 54 + }; 55 + 56 + export const verifyDiff = async ( 57 + repo: ReadableRepo | null, 58 + updateBlocks: BlockMap, 59 + updateRoot: CID, 60 + did?: string, 61 + signingKey?: string, 62 + opts?: { ensureLeaves?: boolean }, 63 + ): Promise<VerifiedDiff> => { 64 + const { ensureLeaves = true } = opts ?? {}; 65 + const stagedStorage = new MemoryBlockstore(updateBlocks); 66 + const updateStorage = repo 67 + ? new SyncStorage(stagedStorage, repo.storage) 68 + : stagedStorage; 69 + const updated = verifyRepoRoot( 70 + updateStorage as ReadableBlockstore, 71 + updateRoot, 72 + did, 73 + signingKey, 74 + ); 75 + const diff = await DataDiff.of(updated.data, repo?.data ?? null); 76 + const writes = await util.diffToWriteDescripts(diff); 77 + const newBlocks = diff.newMstBlocks; 78 + const leaves = updateBlocks.getMany(diff.newLeafCids.toList()); 79 + if (leaves.missing.length > 0 && ensureLeaves) { 80 + throw new Error(`missing leaf blocks: ${leaves.missing}`); 81 + } 82 + newBlocks.addMap(leaves.blocks); 83 + const removedCids = diff.removedCids; 84 + const commitCid = await newBlocks.add(updated.commit); 85 + // ensure the commit cid actually changed 86 + if (repo) { 87 + if (commitCid.equals(repo.cid)) { 88 + newBlocks.delete(commitCid); 89 + } else { 90 + removedCids.add(repo.cid); 91 + } 92 + } 93 + return { 94 + writes, 95 + commit: { 96 + cid: updated.cid, 97 + rev: updated.commit.rev, 98 + prev: repo?.cid ?? null, 99 + since: repo?.commit.rev ?? null, 100 + newBlocks, 101 + relevantBlocks: newBlocks, 102 + removedCids, 103 + }, 104 + }; 105 + }; 106 + 107 + // @NOTE only verifies the root, not the repo contents 108 + const verifyRepoRoot = ( 109 + storage: ReadableBlockstore, 110 + head: CID, 111 + did?: string, 112 + signingKey?: string, 113 + ): ReadableRepo => { 114 + const repo = ReadableRepo.load(storage, head); 115 + if (did !== undefined && repo.did !== did) { 116 + throw new RepoVerificationError(`Invalid repo did: ${repo.did}`); 117 + } 118 + if (signingKey !== undefined) { 119 + const validSig = util.verifyCommitSig(repo.commit, signingKey); 120 + if (!validSig) { 121 + throw new RepoVerificationError( 122 + `Invalid signature on commit: ${repo.cid.toString()}`, 123 + ); 124 + } 125 + } 126 + return repo; 127 + }; 128 + 129 + export const verifyProofs = async ( 130 + proofs: Uint8Array, 131 + claims: RecordCidClaim[], 132 + did: string, 133 + didKey: string, 134 + ): Promise<{ verified: RecordCidClaim[]; unverified: RecordCidClaim[] }> => { 135 + const car = await readCarWithRoot(proofs); 136 + const blockstore = new MemoryBlockstore(car.blocks); 137 + const commit = blockstore.readObj(car.root, def.commit); 138 + if (commit.did !== did) { 139 + throw new RepoVerificationError(`Invalid repo did: ${commit.did}`); 140 + } 141 + const validSig = util.verifyCommitSig(commit, didKey); 142 + if (!validSig) { 143 + throw new RepoVerificationError( 144 + `Invalid signature on commit: ${car.root.toString()}`, 145 + ); 146 + } 147 + const mst = MST.load(blockstore, (commit as { data: CID }).data); 148 + const verified: RecordCidClaim[] = []; 149 + const unverified: RecordCidClaim[] = []; 150 + for (const claim of claims) { 151 + const found = await mst.get( 152 + util.formatDataKey(claim.collection, claim.rkey), 153 + ); 154 + const record = found ? blockstore.readObj(found, def.map) : null; 155 + if (claim.cid === null) { 156 + if (record === null) { 157 + verified.push(claim); 158 + } else { 159 + unverified.push(claim); 160 + } 161 + } else { 162 + if (claim.cid.equals(found)) { 163 + verified.push(claim); 164 + } else { 165 + unverified.push(claim); 166 + } 167 + } 168 + } 169 + return { verified, unverified }; 170 + }; 171 + 172 + export const verifyRecords = async ( 173 + proofs: Uint8Array, 174 + did: string, 175 + signingKey: string, 176 + ): Promise<RecordClaim[]> => { 177 + const car = await readCarWithRoot(proofs); 178 + const blockstore = new MemoryBlockstore(car.blocks); 179 + const commit = blockstore.readObj(car.root, def.commit); 180 + if (commit.did !== did) { 181 + throw new RepoVerificationError(`Invalid repo did: ${commit.did}`); 182 + } 183 + const validSig = util.verifyCommitSig(commit, signingKey); 184 + if (!validSig) { 185 + throw new RepoVerificationError( 186 + `Invalid signature on commit: ${car.root.toString()}`, 187 + ); 188 + } 189 + const mst = MST.load(blockstore, (commit as { data: CID }).data); 190 + 191 + const records: RecordClaim[] = []; 192 + const leaves = await mst.reachableLeaves(); 193 + for (const leaf of leaves) { 194 + const { collection, rkey } = util.parseDataKey(leaf.key); 195 + const record = blockstore.attemptReadRecord(leaf.value); 196 + if (record) { 197 + records.push({ 198 + collection, 199 + rkey, 200 + record, 201 + }); 202 + } 203 + } 204 + return records; 205 + }; 206 + 207 + export class RepoVerificationError extends Error {}
+2
repo/sync/index.ts
··· 1 + export * from "./consumer.ts"; 2 + export * from "./provider.ts";
+64
repo/sync/provider.ts
··· 1 + import type { CID } from "multiformats/cid"; 2 + import { writeCarStream } from "../car.ts"; 3 + import { CidSet } from "../cid-set.ts"; 4 + import { MissingBlocksError } from "../error.ts"; 5 + import { MST } from "../mst/index.ts"; 6 + import type { ReadableBlockstore, RepoStorage } from "../storage/index.ts"; 7 + import { def, type RecordPath } from "../types.ts"; 8 + import * as util from "../util.ts"; 9 + 10 + // Full Repo 11 + // ------------- 12 + 13 + export const getFullRepo = ( 14 + storage: RepoStorage, 15 + commitCid: CID, 16 + ): AsyncIterable<Uint8Array> => { 17 + return writeCarStream(commitCid, iterateFullRepo(storage, commitCid)); 18 + }; 19 + 20 + async function* iterateFullRepo(storage: RepoStorage, commitCid: CID) { 21 + const commit = storage.readObjAndBytes(commitCid, def.commit); 22 + yield { cid: commitCid, bytes: commit.bytes }; 23 + const mst = MST.load(storage, commit.obj.data as CID); 24 + for await (const block of mst.carBlockStream()) { 25 + yield block; 26 + } 27 + } 28 + 29 + // Narrow slices 30 + // ------------- 31 + 32 + export const getRecords = ( 33 + storage: ReadableBlockstore, 34 + commitCid: CID, 35 + paths: RecordPath[], 36 + ): AsyncIterable<Uint8Array> => { 37 + return writeCarStream( 38 + commitCid, 39 + iterateRecordBlocks(storage, commitCid, paths), 40 + ); 41 + }; 42 + 43 + async function* iterateRecordBlocks( 44 + storage: ReadableBlockstore, 45 + commitCid: CID, 46 + paths: RecordPath[], 47 + ) { 48 + const commit = storage.readObjAndBytes(commitCid, def.commit); 49 + yield { cid: commitCid, bytes: commit.bytes }; 50 + const mst = MST.load(storage, commit.obj.data as CID); 51 + const cidsForPaths = await Promise.all( 52 + paths.map((p) => mst.cidsForPath(util.formatDataKey(p.collection, p.rkey))), 53 + ); 54 + const allCids = cidsForPaths.reduce((acc, cur) => { 55 + return acc.addSet(new CidSet(cur)); 56 + }, new CidSet()); 57 + const found = storage.getBlocks(allCids.toList()); 58 + if (found.missing.length > 0) { 59 + throw new MissingBlocksError("writeRecordsToCarStream", found.missing); 60 + } 61 + for (const block of found.blocks.entries()) { 62 + yield block; 63 + } 64 + }
+156
repo/tests/_keys.ts
··· 1 + export const A0 = 'A0/501344' 2 + export const A1 = 'A1/700567' 3 + export const A2 = 'A2/239654' 4 + export const A3 = 'A3/570745' 5 + export const A4 = 'A4/231700' 6 + export const A5 = 'A5/343219' 7 + export const B0 = 'B0/436099' 8 + export const B1 = 'B1/293486' 9 + export const B2 = 'B2/303249' 10 + export const B3 = 'B3/690557' 11 + export const B4 = 'B4/522003' 12 + export const B5 = 'B5/528640' 13 + export const C0 = 'C0/535043' 14 + export const C1 = 'C1/970596' 15 + export const C2 = 'C2/953910' 16 + export const C3 = 'C3/016643' 17 + export const C4 = 'C4/687126' 18 + export const C5 = 'C5/136391' 19 + export const D0 = 'D0/360671' 20 + export const D1 = 'D1/637976' 21 + export const D2 = 'D2/915466' 22 + export const D3 = 'D3/722333' 23 + export const D4 = 'D4/816246' 24 + export const D5 = 'D5/611412' 25 + export const E0 = 'E0/922708' 26 + export const E1 = 'E1/710014' 27 + export const E2 = 'E2/413113' 28 + export const E3 = 'E3/226890' 29 + export const E4 = 'E4/349347' 30 + export const E5 = 'E5/574759' 31 + export const F0 = 'F0/606463' 32 + export const F1 = 'F1/415452' 33 + export const F2 = 'F2/410478' 34 + export const F3 = 'F3/000172' 35 + export const F4 = 'F4/438093' 36 + export const F5 = 'F5/131765' 37 + export const G0 = 'G0/714257' 38 + export const G1 = 'G1/254594' 39 + export const G2 = 'G2/536869' 40 + export const G3 = 'G3/188348' 41 + export const G4 = 'G4/627086' 42 + export const G5 = 'G5/436727' 43 + export const H0 = 'H0/740256' 44 + export const H1 = 'H1/113887' 45 + export const H2 = 'H2/783135' 46 + export const H3 = 'H3/911996' 47 + export const H4 = 'H4/413212' 48 + export const H5 = 'H5/205035' 49 + export const I0 = 'I0/123247' 50 + export const I1 = 'I1/186251' 51 + export const I2 = 'I2/455864' 52 + export const I3 = 'I3/874267' 53 + export const I4 = 'I4/700662' 54 + export const I5 = 'I5/355687' 55 + export const J0 = 'J0/651505' 56 + export const J1 = 'J1/747356' 57 + export const J2 = 'J2/880562' 58 + export const J3 = 'J3/337247' 59 + export const J4 = 'J4/333302' 60 + export const J5 = 'J5/802321' 61 + export const K0 = 'K0/513509' 62 + export const K1 = 'K1/512199' 63 + export const K2 = 'K2/998695' 64 + export const K3 = 'K3/030175' 65 + export const K4 = 'K4/843537' 66 + export const K5 = 'K5/621841' 67 + export const L0 = 'L0/110539' 68 + export const L1 = 'L1/902119' 69 + export const L2 = 'L2/433601' 70 + export const L3 = 'L3/578589' 71 + export const L4 = 'L4/179159' 72 + export const L5 = 'L5/411430' 73 + export const M0 = 'M0/233209' 74 + export const M1 = 'M1/807305' 75 + export const M2 = 'M2/593452' 76 + export const M3 = 'M3/412948' 77 + export const M4 = 'M4/230935' 78 + export const M5 = 'M5/340624' 79 + export const N0 = 'N0/719700' 80 + export const N1 = 'N1/322330' 81 + export const N2 = 'N2/554905' 82 + export const N3 = 'N3/279414' 83 + export const N4 = 'N4/223549' 84 + export const N5 = 'N5/106430' 85 + export const O0 = 'O0/439753' 86 + export const O1 = 'O1/184934' 87 + export const O2 = 'O2/163117' 88 + export const O3 = 'O3/801944' 89 + export const O4 = 'O4/769058' 90 + export const O5 = 'O5/682431' 91 + export const P0 = 'P0/312289' 92 + export const P1 = 'P1/708697' 93 + export const P2 = 'P2/085809' 94 + export const P3 = 'P3/664012' 95 + export const P4 = 'P4/515888' 96 + export const P5 = 'P5/973781' 97 + export const Q0 = 'Q0/322861' 98 + export const Q1 = 'Q1/010908' 99 + export const Q2 = 'Q2/786194' 100 + export const Q3 = 'Q3/614951' 101 + export const Q4 = 'Q4/915803' 102 + export const Q5 = 'Q5/475163' 103 + export const R0 = 'R0/874630' 104 + export const R1 = 'R1/430647' 105 + export const R2 = 'R2/767178' 106 + export const R3 = 'R3/943288' 107 + export const R4 = 'R4/582084' 108 + export const R5 = 'R5/501429' 109 + export const S0 = 'S0/275258' 110 + export const S1 = 'S1/739500' 111 + export const S2 = 'S2/449586' 112 + export const S3 = 'S3/891280' 113 + export const S4 = 'S4/156946' 114 + export const S5 = 'S5/027482' 115 + export const T0 = 'T0/515259' 116 + export const T1 = 'T1/898487' 117 + export const T2 = 'T2/102538' 118 + export const T3 = 'T3/666778' 119 + export const T4 = 'T4/976512' 120 + export const T5 = 'T5/843268' 121 + export const U0 = 'U0/948132' 122 + export const U1 = 'U1/844531' 123 + export const U2 = 'U2/428499' 124 + export const U3 = 'U3/676721' 125 + export const U4 = 'U4/746122' 126 + export const U5 = 'U5/758627' 127 + export const V0 = 'V0/625208' 128 + export const V1 = 'V1/894664' 129 + export const V2 = 'V2/449156' 130 + export const V3 = 'V3/800489' 131 + export const V4 = 'V4/715514' 132 + export const V5 = 'V5/777528' 133 + export const W0 = 'W0/543856' 134 + export const W1 = 'W1/610464' 135 + export const W2 = 'W2/735768' 136 + export const W3 = 'W3/268620' 137 + export const W4 = 'W4/542722' 138 + export const W5 = 'W5/515212' 139 + export const X0 = 'X0/358710' 140 + export const X1 = 'X1/930435' 141 + export const X2 = 'X2/229496' 142 + export const X3 = 'X3/642411' 143 + export const X4 = 'X4/792973' 144 + export const X5 = 'X5/997958' 145 + export const Y0 = 'Y0/224673' 146 + export const Y1 = 'Y1/383255' 147 + export const Y2 = 'Y2/036710' 148 + export const Y3 = 'Y3/941868' 149 + export const Y4 = 'Y4/989940' 150 + export const Y5 = 'Y5/885038' 151 + export const Z0 = 'Z0/660512' 152 + export const Z1 = 'Z1/946648' 153 + export const Z2 = 'Z2/470347' 154 + export const Z3 = 'Z3/744138' 155 + export const Z4 = 'Z4/679676' 156 + export const Z5 = 'Z5/904085'
+254
repo/tests/_util.ts
··· 1 + import fs from "node:fs"; 2 + import { CID } from "multiformats"; 3 + import { dataToCborBlock, TID } from "@atp/common"; 4 + import type * as crypto from "@atp/crypto"; 5 + import { type Keypair, randomBytes } from "@atp/crypto"; 6 + import { 7 + BlockMap, 8 + type CollectionContents, 9 + type Commit, 10 + type CommitData, 11 + DataDiff, 12 + type RecordPath, 13 + type RecordWriteOp, 14 + type RepoContents, 15 + WriteOpAction, 16 + } from "../mod.ts"; 17 + import type { MST } from "../mst/index.ts"; 18 + import { Repo } from "../repo.ts"; 19 + import type { RepoStorage } from "../storage/index.ts"; 20 + 21 + type IdMapping = Record<string, CID>; 22 + 23 + export const randomCid = async (storage?: RepoStorage): Promise<CID> => { 24 + const block = await dataToCborBlock({ test: randomStr(50) }); 25 + if (storage) { 26 + // @ts-expect-error FIXME remove this comment (and fix the TS error) 27 + await storage.putBlock(block.cid, block.bytes); 28 + } 29 + return block.cid; 30 + }; 31 + 32 + export const generateBulkDataKeys = async ( 33 + count: number, 34 + blockstore?: RepoStorage, 35 + ): Promise<IdMapping> => { 36 + const obj: IdMapping = {}; 37 + for (let i = 0; i < count; i++) { 38 + const key = `com.example.record/${TID.nextStr()}`; 39 + obj[key] = await randomCid(blockstore); 40 + } 41 + return obj; 42 + }; 43 + 44 + export const keysFromMapping = (mapping: IdMapping): TID[] => { 45 + return Object.keys(mapping).map((id) => TID.fromStr(id)); 46 + }; 47 + 48 + export const keysFromMappings = (mappings: IdMapping[]): TID[] => { 49 + return mappings.map(keysFromMapping).flat(); 50 + }; 51 + 52 + export const randomStr = (len: number): string => { 53 + let result = ""; 54 + const CHARS = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"; 55 + for (let i = 0; i < len; i++) { 56 + result += CHARS.charAt(Math.floor(Math.random() * CHARS.length)); 57 + } 58 + return result; 59 + }; 60 + 61 + export const shuffle = <T>(arr: T[]): T[] => { 62 + const toShuffle = [...arr]; 63 + const shuffled: T[] = []; 64 + while (toShuffle.length > 0) { 65 + const index = Math.floor(Math.random() * toShuffle.length); 66 + shuffled.push(toShuffle[index]); 67 + toShuffle.splice(index, 1); 68 + } 69 + return shuffled; 70 + }; 71 + 72 + export const generateObject = (): Record<string, string> => { 73 + return { 74 + name: randomStr(100), 75 + }; 76 + }; 77 + 78 + // Mass repo mutations & checking 79 + // ------------------------------- 80 + 81 + export const testCollections = ["com.example.posts", "com.example.likes"]; 82 + 83 + export const fillRepo = async ( 84 + repo: Repo, 85 + keypair: crypto.Keypair, 86 + itemsPerCollection: number, 87 + ): Promise<{ repo: Repo; data: RepoContents }> => { 88 + const repoData: RepoContents = {}; 89 + const writes: RecordWriteOp[] = []; 90 + for (const collName of testCollections) { 91 + const collData: CollectionContents = {}; 92 + for (let i = 0; i < itemsPerCollection; i++) { 93 + const object = generateObject(); 94 + const rkey = TID.nextStr(); 95 + collData[rkey] = object; 96 + writes.push({ 97 + action: WriteOpAction.Create, 98 + collection: collName, 99 + rkey, 100 + record: object, 101 + }); 102 + } 103 + repoData[collName] = collData; 104 + } 105 + const updated = await repo.applyWrites(writes, keypair); 106 + return { 107 + repo: updated, 108 + data: repoData, 109 + }; 110 + }; 111 + 112 + export const formatEdit = async ( 113 + repo: Repo, 114 + prevData: RepoContents, 115 + keypair: crypto.Keypair, 116 + params: { 117 + adds?: number; 118 + updates?: number; 119 + deletes?: number; 120 + }, 121 + ): Promise<{ commit: CommitData; data: RepoContents }> => { 122 + const { adds = 0, updates = 0, deletes = 0 } = params; 123 + const repoData: RepoContents = {}; 124 + const writes: RecordWriteOp[] = []; 125 + for (const collName of testCollections) { 126 + const collData = { ...(prevData[collName] ?? {}) }; 127 + const shuffled = shuffle(Object.entries(collData)); 128 + 129 + for (let i = 0; i < adds; i++) { 130 + const object = generateObject(); 131 + const rkey = TID.nextStr(); 132 + collData[rkey] = object; 133 + writes.push({ 134 + action: WriteOpAction.Create, 135 + collection: collName, 136 + rkey, 137 + record: object, 138 + }); 139 + } 140 + 141 + const toUpdate = shuffled.slice(0, updates); 142 + for (let i = 0; i < toUpdate.length; i++) { 143 + const object = generateObject(); 144 + const rkey = toUpdate[i][0]; 145 + collData[rkey] = object; 146 + writes.push({ 147 + action: WriteOpAction.Update, 148 + collection: collName, 149 + rkey, 150 + record: object, 151 + }); 152 + } 153 + 154 + const toDelete = shuffled.slice(updates, deletes); 155 + for (let i = 0; i < toDelete.length; i++) { 156 + const rkey = toDelete[i][0]; 157 + delete collData[rkey]; 158 + writes.push({ 159 + action: WriteOpAction.Delete, 160 + collection: collName, 161 + rkey, 162 + }); 163 + } 164 + repoData[collName] = collData; 165 + } 166 + const commit = await repo.formatCommit(writes, keypair); 167 + return { 168 + commit, 169 + data: repoData, 170 + }; 171 + }; 172 + 173 + export const pathsForOps = (ops: RecordWriteOp[]): RecordPath[] => 174 + ops.map((op) => ({ collection: op.collection, rkey: op.rkey })); 175 + 176 + export const saveMst = async (storage: RepoStorage, mst: MST): Promise<CID> => { 177 + const diff = await mst.getUnstoredBlocks(); 178 + // @ts-expect-error FIXME remove this comment (and fix the TS error) 179 + await storage.putMany(diff.blocks); 180 + return diff.root; 181 + }; 182 + 183 + // Creating repo 184 + // ------------------- 185 + export const addBadCommit = async ( 186 + repo: Repo, 187 + keypair: Keypair, 188 + ): Promise<Repo> => { 189 + const obj = generateObject(); 190 + const newBlocks = new BlockMap(); 191 + const cid = await newBlocks.add(obj); 192 + const updatedData = await repo.data.add( 193 + `com.example.test/${TID.next()}`, 194 + cid, 195 + ); 196 + const dataCid = await updatedData.getPointer(); 197 + const diff = await DataDiff.of(updatedData, repo.data); 198 + newBlocks.addMap(diff.newMstBlocks); 199 + // we generate a bad sig by signing some other data 200 + const rev = TID.nextStr(repo.commit.rev); 201 + const commit: Commit = { 202 + ...repo.commit, 203 + rev, 204 + data: dataCid, 205 + sig: await keypair.sign(randomBytes(256)), 206 + }; 207 + const commitCid = await newBlocks.add(commit); 208 + 209 + // @ts-expect-error FIXME remove this comment (and fix the TS error) 210 + await repo.storage.applyCommit({ 211 + cid: commitCid, 212 + rev, 213 + prev: repo.cid, 214 + newBlocks, 215 + removedCids: diff.removedCids, 216 + }); 217 + return await Repo.load(repo.storage, commitCid); 218 + }; 219 + 220 + // Logging 221 + // ---------------- 222 + 223 + export const writeMstLog = async (filename: string, tree: MST) => { 224 + let log = ""; 225 + for await (const entry of tree.walk()) { 226 + if (entry.isLeaf()) continue; 227 + const layer = await entry.getLayer(); 228 + log += `Layer ${layer}: ${entry.pointer}\n`; 229 + log += "--------------\n"; 230 + const entries = await entry.getEntries(); 231 + for (const e of entries) { 232 + if (e.isLeaf()) { 233 + log += `Key: ${e.key} (${e.value})\n`; 234 + } else { 235 + log += `Subtree: ${e.pointer}\n`; 236 + } 237 + } 238 + log += "\n\n"; 239 + } 240 + fs.writeFileSync(filename, log); 241 + }; 242 + 243 + export const saveMstEntries = (filename: string, entries: [string, CID][]) => { 244 + const writable = entries.map(([key, val]) => [key, val.toString()]); 245 + fs.writeFileSync(filename, JSON.stringify(writable)); 246 + }; 247 + 248 + export const loadMstEntries = (filename: string): [string, CID][] => { 249 + const contents = fs.readFileSync(filename); 250 + const parsed = JSON.parse(contents.toString()); 251 + return parsed.map(( 252 + [key, value]: [string, string], 253 + ) => [key, CID.parse(value)]); 254 + };
+28
repo/tests/car-file-fixtures.json
··· 1 + [ 2 + { 3 + "root": "bafyreiapldaco7m23c7qzc4w42r7kxmcswm64nkindtuh4vwztrpoe7m5m", 4 + "blocks": [ 5 + { 6 + "cid": "bafyreiapldaco7m23c7qzc4w42r7kxmcswm64nkindtuh4vwztrpoe7m5m", 7 + "bytes": "oWR0ZXN0ZHJvb3Q" 8 + }, 9 + { 10 + "cid": "bafyreieteuyxvbvjbvuhsuo54qx6r3tnjxtp3ub6kb66rdjml3murxjcsy", 11 + "bytes": "oWR0ZXN0WQEAM32TVA//xgHS9Gtukp0vw6whQ+TnlwF9czt5A7Dxz/URSbryc9Pdw7HESX+jC2oPI/6rwKbhSml2kxJo4MaUeIg/HWI9ixxALw5gIF/I0JC3ejXVAu1Pw6bI9RWa5TgIvAnSow0pJ6jbaaWHlxCpqqHCNHUYbIC14D9k+RK3yS0h2g+O+gRUETQt8t4jOKxEhD037cYEuJCD+fWzFoLkEkrPdUWeqlFQxGt6bflCYjZFgiZKFUo72afR3XM16+jOlhOl+EtuqFcijYJ6MIB53qI8P8HMC2RVH0Mv8UYWLcWatl+CNLykEjesnMar5CvZT8j4w5EyEiS09iD4r6bljg" 12 + }, 13 + { 14 + "cid": "bafyreiahvdwcywzm35id7hajal5l73p4bnoyaowgckeatd6sbxsacoo5jq", 15 + "bytes": "oWR0ZXN0WID81CSnkdKi0OBexCStL5gex6Fkehtg7R9Jaww7LFuJC/6Or9Cdb58I+6T9Kjqhf1bf5t5WLbTGt8HOlf1Ysl3wqfdxdxnRZYjyng8YjEmH2Twkc//moluskzywxfOwhRXsXI7SYt36OUkS8AKDzmhTijOG2XWSa2QvU3iSSjP8zw" 16 + }, 17 + { 18 + "cid": "bafyreictawwxwjuto6csptbtktbbgwkrryqoakgyb3qovakgiqjzmvzi6a", 19 + "bytes": "oWR0ZXN0RBnEJSE" 20 + }, 21 + { 22 + "cid": "bafyreierfkr6cwv2ux5hk6hp5qh5fhgzyr4yicad5m3pyu3ndaatyqucxu", 23 + "bytes": "oWR0ZXN0WQH09xLXHen1UTIPEap18egWK6OiRVQ4oLBqjfBVQGdiSsmsIn8uXDM6wp63kCrYWLvXoI0dREVW6+RUjoQIbmDhiKbEUTuLbcFiJnwD5gSlkXdUEwO4UeLfvcAsDJXel5lLaOlOeRTfA3Wf+rEEpr/ccCuwOATBPZBjPrneFVQu8UdsvTu5W144tGxp/ptBsMBqM3yLYkYYHNRrjcAI7iFiszsfKzo28GyM199Jko11mcVhNQ8KHZS72jbsWW/HyfJsL1M/dn6sDCfIaro66mTLRddSQaheQL4NBW5FEgLUBO2VDuBT9fVIl2IwQcijZAOakjLkS1sY2SyUmdsicqGRalnOrVlC5iWQcwHDXLzm9GWz/vfuoF+jzWsdpo6cjT5MIN8uXpzoZRSjH1+UXFxCzFhXkDwL/xJUq8u/0OFGp0mzDc7RLO3gC7X/ENaVPtz/wQaZ3q00EeHvDuiaxHdKIesf/+IkbqS1XjKtaHemB511MFiVf7l2OcqsUU12A5VMreqWPwbAHLgFRDPWiLS0D7yEF3KJX1IupxBmidQMT+SlmCp7FZMdJeHJ3pzpbQv+EoKSXja7it2D2uYsMcTn2DGhlVYsCcQd8PVNKZmPXuC7D82N4sh8p2XVW4LbsDfNTOrgHLX7zRX31VNa47w5Uc03Wuk" 24 + } 25 + ], 26 + "car": "OqJlcm9vdHOB2CpYJQABcRIgD1jAJ32a2L8Mi5bmo/VdgpWZ7jVIaOdD8rbM4vcT7OtndmVyc2lvbgEvAXESIA9YwCd9mti/DIuW5qP1XYKVme41SGjnQ/K2zOL3E+zroWR0ZXN0ZHJvb3StAgFxEiCTJTF6hqkNaHlR3eQv6O5tTeb90D5QfeiNLF7ZSN0ilqFkdGVzdFkBADN9k1QP/8YB0vRrbpKdL8OsIUPk55cBfXM7eQOw8c/1EUm68nPT3cOxxEl/owtqDyP+q8Cm4UppdpMSaODGlHiIPx1iPYscQC8OYCBfyNCQt3o11QLtT8OmyPUVmuU4CLwJ0qMNKSeo22mlh5cQqaqhwjR1GGyAteA/ZPkSt8ktIdoPjvoEVBE0LfLeIzisRIQ9N+3GBLiQg/n1sxaC5BJKz3VFnqpRUMRrem35QmI2RYImShVKO9mn0d1zNevozpYTpfhLbqhXIo2CejCAed6iPD/BzAtkVR9DL/FGFi3FmrZfgjS8pBI3rJzGq+Qr2U/I+MORMhIktPYg+K+m5Y6sAQFxEiAHqOwsWyzfUD+cCQL6v+38C12AOsYSiAmP0g3kATndTKFkdGVzdFiA/NQkp5HSotDgXsQkrS+YHsehZHobYO0fSWsMOyxbiQv+jq/QnW+fCPuk/So6oX9W3+beVi20xrfBzpX9WLJd8Kn3cXcZ0WWI8p4PGIxJh9k8JHP/5qJbrJM8sMXzsIUV7FyO0mLd+jlJEvACg85oU4ozhtl1kmtkL1N4kkoz/M8vAXESIFMFrXsmk3eFJ8wzVMITWVGOIOAo2A7g6oFGRBOWVyjwoWR0ZXN0RBnEJSGhBAFxEiCRKqPhWrql+nV47+wP0pzZxHmECAPrNvxTbRgBPEKCvaFkdGVzdFkB9PcS1x3p9VEyDxGqdfHoFiujokVUOKCwao3wVUBnYkrJrCJ/LlwzOsKet5Aq2Fi716CNHURFVuvkVI6ECG5g4YimxFE7i23BYiZ8A+YEpZF3VBMDuFHi373ALAyV3peZS2jpTnkU3wN1n/qxBKa/3HArsDgEwT2QYz653hVULvFHbL07uVteOLRsaf6bQbDAajN8i2JGGBzUa43ACO4hYrM7Hys6NvBsjNffSZKNdZnFYTUPCh2Uu9o27Flvx8nybC9TP3Z+rAwnyGq6Oupky0XXUkGoXkC+DQVuRRIC1ATtlQ7gU/X1SJdiMEHIo2QDmpIy5EtbGNkslJnbInKhkWpZzq1ZQuYlkHMBw1y85vRls/737qBfo81rHaaOnI0+TCDfLl6c6GUUox9flFxcQsxYV5A8C/8SVKvLv9DhRqdJsw3O0Szt4Au1/xDWlT7c/8EGmd6tNBHh7w7omsR3SiHrH//iJG6ktV4yrWh3pgeddTBYlX+5djnKrFFNdgOVTK3qlj8GwBy4BUQz1oi0tA+8hBdyiV9SLqcQZonUDE/kpZgqexWTHSXhyd6c6W0L/hKCkl42u4rdg9rmLDHE59gxoZVWLAnEHfD1TSmZj17guw/NjeLIfKdl1VuC27A3zUzq4By1+80V99VTWuO8OVHNN1rp" 27 + } 28 + ]
+103
repo/tests/car_test.ts
··· 1 + import { CID } from "multiformats/cid"; 2 + import * as ui8 from "@atp/bytes"; 3 + import { dataToCborBlock, streamToBytes, wait } from "@atp/common"; 4 + import { type CarBlock, readCarStream, writeCarStream } from "../mod.ts"; 5 + import fixtures from "./car-file-fixtures.json" with { type: "json" }; 6 + import { assertEquals, assertRejects } from "@std/assert"; 7 + 8 + for (const fixture of fixtures) { 9 + Deno.test("correctly writes car files", async () => { 10 + const root = CID.parse(fixture.root); 11 + async function* blockIter() { 12 + for (const block of fixture.blocks) { 13 + const cid = CID.parse(block.cid); 14 + const bytes = ui8.fromString(block.bytes, "base64"); 15 + yield { cid, bytes }; 16 + } 17 + } 18 + const carStream = writeCarStream(root, blockIter()); 19 + const car = await streamToBytes(carStream); 20 + const carB64 = ui8.toString(car, "base64"); 21 + assertEquals(carB64, fixture.car); 22 + }); 23 + 24 + Deno.test("correctly reads carfiles", async () => { 25 + const carStream = [ui8.fromString(fixture.car, "base64")]; 26 + const { roots, blocks } = await readCarStream(carStream); 27 + assertEquals(roots.length, 1); 28 + assertEquals(roots[0].toString(), fixture.root); 29 + const carBlocks: CarBlock[] = []; 30 + for await (const block of blocks) { 31 + carBlocks.push(block); 32 + } 33 + assertEquals(carBlocks.length, fixture.blocks.length); 34 + for (let i = 0; i < carBlocks.length; i++) { 35 + assertEquals(carBlocks[i].cid.toString(), fixture.blocks[i].cid); 36 + assertEquals( 37 + ui8.toString(carBlocks[i].bytes, "base64"), 38 + fixture.blocks[i].bytes, 39 + ); 40 + } 41 + }); 42 + } 43 + 44 + Deno.test("writeCar propagates errors", async () => { 45 + const iterate = async () => { 46 + async function* blockIterator() { 47 + await wait(1); 48 + const block = await dataToCborBlock({ test: 1 }); 49 + yield block; 50 + throw new Error("Oops!"); 51 + } 52 + const iter = writeCarStream(null, blockIterator()); 53 + for await (const _bytes of iter) { 54 + // no-op 55 + } 56 + }; 57 + await assertRejects(() => iterate(), "Oops!"); 58 + }); 59 + 60 + Deno.test("verifies CIDs", async () => { 61 + const block0 = await dataToCborBlock({ block: 0 }); 62 + const block1 = await dataToCborBlock({ block: 1 }); 63 + const block2 = await dataToCborBlock({ block: 2 }); 64 + const block3 = await dataToCborBlock({ block: 3 }); 65 + const badBlock = await dataToCborBlock({ block: "bad" }); 66 + const blockIter = async function* () { 67 + yield block0; 68 + yield block1; 69 + yield block2; 70 + yield { cid: block3.cid, bytes: badBlock.bytes }; 71 + }; 72 + const flush = async function (iter: AsyncIterable<unknown>) { 73 + for await (const _ of iter) { 74 + // no-op 75 + } 76 + }; 77 + const badCar = await readCarStream(writeCarStream(block0.cid, blockIter())); 78 + await assertRejects(() => flush(badCar.blocks), "Not a valid CID for bytes"); 79 + }); 80 + 81 + Deno.test("skips CID verification", async () => { 82 + const block0 = await dataToCborBlock({ block: 0 }); 83 + const block1 = await dataToCborBlock({ block: 1 }); 84 + const block2 = await dataToCborBlock({ block: 2 }); 85 + const block3 = await dataToCborBlock({ block: 3 }); 86 + const badBlock = await dataToCborBlock({ block: "bad" }); 87 + const blockIter = async function* () { 88 + yield block0; 89 + yield block1; 90 + yield block2; 91 + yield { cid: block3.cid, bytes: badBlock.bytes }; 92 + }; 93 + const flush = async function (iter: AsyncIterable<unknown>) { 94 + for await (const _ of iter) { 95 + // no-op 96 + } 97 + }; 98 + const badCar = await readCarStream( 99 + writeCarStream(block0.cid, blockIter()), 100 + { skipCidVerification: true }, 101 + ); 102 + assertEquals(await flush(badCar.blocks), undefined); 103 + });
+89
repo/tests/commit-data_test.ts
··· 1 + import { Secp256k1Keypair } from "@atp/crypto"; 2 + import { blocksToCarFile, Repo, verifyProofs, WriteOpAction } from "../mod.ts"; 3 + import { MemoryBlockstore } from "../storage/index.ts"; 4 + import { assertRejects } from "@std/assert"; 5 + import { assertEquals } from "@std/assert/equals"; 6 + 7 + // @NOTE this test uses a fully deterministic tree structure 8 + Deno.test("includes all relevant blocks for proof in commit data", async () => { 9 + const did = "did:example:alice"; 10 + const collection = "com.atproto.test"; 11 + const record = { 12 + test: 123, 13 + }; 14 + 15 + const blockstore = new MemoryBlockstore(); 16 + const keypair = Secp256k1Keypair.create(); 17 + let repo = await Repo.create(blockstore, did, keypair); 18 + 19 + const keys: string[] = []; 20 + for (let i = 0; i < 50; i++) { 21 + const rkey = `key-${i}`; 22 + keys.push(rkey); 23 + repo = await repo.applyWrites( 24 + [ 25 + { 26 + action: WriteOpAction.Create, 27 + collection, 28 + rkey, 29 + record, 30 + }, 31 + ], 32 + keypair, 33 + ); 34 + } 35 + 36 + // this test demonstrates the test case: 37 + // specifically in the case of deleting the first key, there is a "rearranged block" that is necessary 38 + // in the proof path but _is not_ in newBlocks (as it already existed in the repository) 39 + { 40 + const commit = await repo.formatCommit( 41 + { 42 + action: WriteOpAction.Delete, 43 + collection, 44 + rkey: keys[0], 45 + }, 46 + keypair, 47 + ); 48 + const car = await blocksToCarFile(commit.cid, commit.newBlocks); 49 + const proofAttempt = verifyProofs( 50 + car, 51 + [ 52 + { 53 + collection, 54 + rkey: keys[0], 55 + cid: null, 56 + }, 57 + ], 58 + did, 59 + keypair.did(), 60 + ); 61 + await assertRejects(() => proofAttempt, "block not found"); 62 + } 63 + 64 + for (const rkey of keys) { 65 + const commit = await repo.formatCommit( 66 + { 67 + action: WriteOpAction.Delete, 68 + collection, 69 + rkey, 70 + }, 71 + keypair, 72 + ); 73 + const car = await blocksToCarFile(commit.cid, commit.relevantBlocks); 74 + const proofRes = await verifyProofs( 75 + car, 76 + [ 77 + { 78 + collection, 79 + rkey: rkey, 80 + cid: null, 81 + }, 82 + ], 83 + did, 84 + keypair.did(), 85 + ); 86 + assertEquals(proofRes.unverified.length, 0); 87 + repo = await repo.applyCommit(commit); 88 + } 89 + });
+96
repo/tests/commit-proof-fixtures.json
··· 1 + [ 2 + { 3 + "comment": "two deep split", 4 + "leafValue": "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 5 + "keys": [ 6 + "A0/501344", 7 + "B1/293486", 8 + "C0/535043", 9 + "E0/922708", 10 + "F1/415452", 11 + "G0/714257" 12 + ], 13 + "adds": ["D2/915466"], 14 + "dels": [], 15 + "rootBeforeCommit": "bafyreibthlzzn3rwvmomwf4dz6utt7yeh5eyn6qwbumvjfv35gwanh7ovq", 16 + "rootAfterCommit": "bafyreidb6bxxylhmlzs4a6ruhcunv3fd32o6i5phlzkmjk6arletj2ua6a", 17 + "blocksInProof": [ 18 + "bafyreidb6bxxylhmlzs4a6ruhcunv3fd32o6i5phlzkmjk6arletj2ua6a", 19 + "bafyreifjsxnultnc3tbvnrawqpmrk6d76ymcstwcr5e3hn6u472nasb2xq", 20 + "bafyreibzch5k5j5xkg6dcwmur2p6jqwavyjhdtvifr6g2gnccwhixibzsi", 21 + "bafyreiamcu5ud3j4ovclrgq2sdyev5oajsmpnl2fdu5ffgpfint64n2jme", 22 + "bafyreidxvw3sbdg4t5b2mbtozitnyu7kjien2zcrtgdj4ssgmjb72mzawe" 23 + ] 24 + }, 25 + { 26 + "comment": "two deep leafless split", 27 + "leafValue": "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 28 + "keys": ["A0/501344", "B0/436099", "D0/360671", "E0/922708"], 29 + "adds": ["C2/953910"], 30 + "dels": [], 31 + "rootBeforeCommit": "bafyreid7jnvjg7mr4akmyf7rtaz47duex2l47rz36nvs4i7yjnpuhfmehe", 32 + "rootAfterCommit": "bafyreih2ry5gae5r4m47unhhuw4w2qjdhe6oprw3w2uico2tzbflwi74eu", 33 + "blocksInProof": [ 34 + "bafyreih2ry5gae5r4m47unhhuw4w2qjdhe6oprw3w2uico2tzbflwi74eu", 35 + "bafyreiag5ata5gtynbpef26l4kus2uz4nshuo526h275oljwlm5dwsvhqm", 36 + "bafyreiaybgpm7ahyiy7fko7c4czjokhzajvimot6lfi6mxqzw2bzwoddn4", 37 + "bafyreiheqxxydll4b4zlyemmegb7q3chs7aacczuotpxkqils6bufnsyse", 38 + "bafyreigkijiuasyl5x4f2j3kxzou2vsdyc3vockx63r6bvgoip4ybhj2sa" 39 + ] 40 + }, 41 + { 42 + "comment": "add on edge with neighbor two layers down", 43 + "leafValue": "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 44 + "keys": ["A0/501344", "B2/303249", "C0/535043"], 45 + "adds": ["D2/915466"], 46 + "dels": [], 47 + "rootBeforeCommit": "bafyreifoy7ierkqljk37wozudqhqjuuahjnubqvd3qprx5ocwcfrx5v3hm", 48 + "rootAfterCommit": "bafyreid2i3nxmsvv3ifb53nlkjh3qaymygrrxuno6z22gctzdme5lbptky", 49 + "blocksInProof": [ 50 + "bafyreid2i3nxmsvv3ifb53nlkjh3qaymygrrxuno6z22gctzdme5lbptky", 51 + "bafyreiagiwrefvm27hvgryirykp7reqcpz56v6txzksgbargjlibtpsqwu", 52 + "bafyreiewdvzcopoza6bdntvhmvdfqeolql6sckkiu75jpvfnwwnfi57jia" 53 + ] 54 + }, 55 + { 56 + "comment": "merge and split in multi-op commit", 57 + "leafValue": "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 58 + "keys": ["A0/501344", "B2/303249", "D2/915466", "E0/922708"], 59 + "adds": ["C2/953910"], 60 + "dels": ["B2/303249", "D2/915466"], 61 + "rootBeforeCommit": "bafyreielnllkafudlseizljjx32rkkivlgxziqayhctgbxncw2srrox7ny", 62 + "rootAfterCommit": "bafyreih6464tr7ue67qgllhiekgfmwiz45zuthrv72gwi2tjpuu5dbxt3a", 63 + "blocksInProof": [ 64 + "bafyreih6464tr7ue67qgllhiekgfmwiz45zuthrv72gwi2tjpuu5dbxt3a", 65 + "bafyreihexby6fnhajsjzzqkmegqpqt2lrr3rpesyl6kt3t3xppid7tuvfy", 66 + "bafyreiciix65xuk62hu6ew6jdy3m2swqstvnuhuwcwffidk3nduf7eaoh4", 67 + "bafyreieneexkszoung4zc5jzkjukjbbxm74ukz6mylydj7q2v42zqp6vmy", 68 + "bafyreidxvw3sbdg4t5b2mbtozitnyu7kjien2zcrtgdj4ssgmjb72mzawe" 69 + ] 70 + }, 71 + { 72 + "comment": "complex multi-op commit", 73 + "leafValue": "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 74 + "keys": [ 75 + "B0/436099", 76 + "C2/953910", 77 + "D0/360671", 78 + "E2/413113", 79 + "F0/606463", 80 + "H0/740256" 81 + ], 82 + "adds": ["A2/239654", "G2/536869"], 83 + "dels": ["C2/953910"], 84 + "rootBeforeCommit": "bafyreiej4jqggfhidabjfrjgogdwed5eglhnboepxscbwfrss4uclnrrmi", 85 + "rootAfterCommit": "bafyreifykpu67c4w4ynkx4lvjfjwxdofax6gx7j2wxrl6ewt3yslezcb6i", 86 + "blocksInProof": [ 87 + "bafyreifykpu67c4w4ynkx4lvjfjwxdofax6gx7j2wxrl6ewt3yslezcb6i", 88 + "bafyreig5pe2hdnhfbqleo6yyipkw3tdiju7tlm4sqp7btsiicxe4tex5de", 89 + "bafyreievjgro75jk6ma3xwuqvalsydtzgvbbaduhazbvajvslaf3l6kcxu", 90 + "bafyreieax6243224jnbout6ynursux2dvt6fabonofdu47dxupkxvmflvu", 91 + "bafyreie44qmlnwlyeh6ubb2eocfko6st7gmbarplmcci6c7ilx24vh4iym", 92 + "bafyreihlhqn4quwcgbum5g4wzkini2c42j7zi5dsjdgkzm55jxyvebndue", 93 + "bafyreiggcbzkb2wgenvyfhkh2nggf7pohb7uzjm6bs7hixhjxw2xpmnq6u" 94 + ] 95 + } 96 + ]
+64
repo/tests/commit-proofs_test.ts
··· 1 + import { CID } from "multiformats"; 2 + import { MST } from "../mst/index.ts"; 3 + import { BlockMap, MemoryBlockstore } from "../mod.ts"; 4 + import fixtures from "./commit-proof-fixtures.json" with { type: "json" }; 5 + import { assert, assertEquals } from "@std/assert"; 6 + 7 + for (const fixture of fixtures) { 8 + Deno.test(fixture.comment, async () => { 9 + const { leafValue, keys, adds, dels } = fixture; 10 + const leaf = CID.parse(leafValue); 11 + 12 + const storage = new MemoryBlockstore(); 13 + let mst = await MST.create(storage); 14 + for (const key of keys) { 15 + mst = await mst.add(key, leaf); 16 + } 17 + 18 + const rootBeforeCommit = await mst.getPointer(); 19 + assertEquals(rootBeforeCommit.toString(), fixture.rootBeforeCommit); 20 + 21 + for (const key of adds) { 22 + mst = await mst.add(key, leaf); 23 + } 24 + for (const key of dels) { 25 + mst = await mst.delete(key); 26 + } 27 + const rootAfterCommit = await mst.getPointer(); 28 + assertEquals(rootAfterCommit.toString(), fixture.rootAfterCommit); 29 + const proofs = await Promise.all( 30 + [...adds, ...dels].map((key) => mst.getCoveringProof(key)), 31 + ); 32 + const proof = proofs.reduce( 33 + (acc, cur) => acc.addMap(cur), 34 + new BlockMap(), 35 + ); 36 + const blocksInProof = fixture.blocksInProof.map((cid) => CID.parse(cid)); 37 + for (const cid of blocksInProof) { 38 + assert(proof.has(cid)); 39 + } 40 + 41 + const invertAdds = adds.map((k) => (mst: MST) => mst.delete(k)); 42 + const invertDels = dels.map((k) => (mst: MST) => mst.add(k, leaf)); 43 + const invertOrders = permutations([...invertAdds, ...invertDels]); 44 + 45 + const proofStorage = new MemoryBlockstore(proof); 46 + for (const order of invertOrders) { 47 + let proofMst = MST.load(proofStorage, rootAfterCommit); 48 + for (const fn of order) { 49 + proofMst = await fn(proofMst); 50 + } 51 + const rootAfterInvert = await proofMst.getPointer(); 52 + assertEquals(rootAfterInvert.toString(), fixture.rootBeforeCommit); 53 + } 54 + }); 55 + } 56 + 57 + function permutations<T>(arr: T[]): T[][] { 58 + if (arr.length <= 1) return [arr]; 59 + 60 + return arr.reduce((perms: T[][], item: T, i: number) => { 61 + const rest = [...arr.slice(0, i), ...arr.slice(i + 1)]; 62 + return perms.concat(permutations(rest).map((p) => [item, ...p])); 63 + }, []); 64 + }
+231
repo/tests/covering-proofs_test.ts
··· 1 + import { CID } from "multiformats"; 2 + import { BlockMap } from "../mod.ts"; 3 + import { MST } from "../mst/index.ts"; 4 + import { MemoryBlockstore } from "../storage/index.ts"; 5 + import * as k from "./_keys.ts"; 6 + import { assert, assertEquals } from "@std/assert"; 7 + 8 + // @NOTE these tests are the exact same as the tests in commit-proof-fixtures.json but in code from 9 + // kept around currently because they are a bit easier to understand/work with 10 + // we should delete in the future once the fixtures are pulled into our test suite 11 + 12 + /** 13 + * * * 14 + * _________|________ ____|____ 15 + * | | | | | | | | 16 + * * b __*__ f * -> __*__ d __*__ 17 + * | | | | | | | | | | 18 + * a c e g * b * * f * 19 + * | | | | 20 + * a c e g 21 + */ 22 + Deno.test("two deep split ", async () => { 23 + const storage = new MemoryBlockstore(); 24 + const cid = CID.parse( 25 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 26 + ); 27 + 28 + let mst = await MST.create(storage); 29 + mst = await mst.add(k.A0, cid); 30 + mst = await mst.add(k.B1, cid); 31 + mst = await mst.add(k.C0, cid); 32 + mst = await mst.add(k.E0, cid); 33 + mst = await mst.add(k.F1, cid); 34 + mst = await mst.add(k.G0, cid); 35 + 36 + const rootBeforeCommit = await mst.getPointer(); 37 + 38 + mst = await mst.add(k.D2, cid); 39 + const proof = await mst.getCoveringProof(k.D2); 40 + 41 + const proofStorage = new MemoryBlockstore(proof); 42 + let proofMst = MST.load(proofStorage, await mst.getPointer()); 43 + proofMst = await proofMst.delete(k.D2); 44 + const rootAfterInvert = await proofMst.getPointer(); 45 + assertEquals(rootAfterInvert.equals(rootBeforeCommit), true); 46 + }); 47 + 48 + /** 49 + * * * 50 + * _____|_____ ____|____ 51 + * | | | | | | | 52 + * a b d e -> * c * 53 + * | | 54 + * __*__ __*__ 55 + * | | | | 56 + * a b d e 57 + */ 58 + Deno.test("two deep leafless splits ", async () => { 59 + const storage = new MemoryBlockstore(); 60 + const cid = CID.parse( 61 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 62 + ); 63 + 64 + let mst = await MST.create(storage); 65 + mst = await mst.add(k.A0, cid); 66 + mst = await mst.add(k.B0, cid); 67 + mst = await mst.add(k.D0, cid); 68 + mst = await mst.add(k.E0, cid); 69 + 70 + const rootBeforeCommit = await mst.getPointer(); 71 + 72 + mst = await mst.add(k.C2, cid); 73 + const proof = await mst.getCoveringProof(k.C2); 74 + 75 + const proofStorage = new MemoryBlockstore(proof); 76 + let proofMst = MST.load(proofStorage, await mst.getPointer()); 77 + proofMst = await proofMst.delete(k.C2); 78 + const rootAfterInvert = await proofMst.getPointer(); 79 + assert(rootAfterInvert.equals(rootBeforeCommit)); 80 + }); 81 + 82 + /** 83 + * * * 84 + * ____|____ ____|____ 85 + * | b | | | | | 86 + * * * -> * b * d 87 + * | | | | 88 + * a c a c 89 + */ 90 + Deno.test("add on edge with neighbor two layers down", async () => { 91 + const storage = new MemoryBlockstore(); 92 + const cid = CID.parse( 93 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 94 + ); 95 + 96 + let mst = await MST.create(storage); 97 + mst = await mst.add(k.A0, cid); 98 + mst = await mst.add(k.B2, cid); 99 + mst = await mst.add(k.C0, cid); 100 + 101 + const rootBeforeCommit = await mst.getPointer(); 102 + 103 + mst = await mst.add(k.D2, cid); 104 + const proof = await mst.getCoveringProof(k.D2); 105 + 106 + const proofStorage = new MemoryBlockstore(proof); 107 + let proofMst = MST.load(proofStorage, await mst.getPointer()); 108 + proofMst = await proofMst.delete(k.D2); 109 + const rootAfterInvert = await proofMst.getPointer(); 110 + assert(rootAfterInvert.equals(rootBeforeCommit)); 111 + }); 112 + 113 + /** 114 + * * * 115 + * _____|_____ _____|_____ 116 + * | | | | | | | 117 + * * b d * -> * c * 118 + * | | | | 119 + * a e a e 120 + */ 121 + Deno.test("merge and split in multi op commit", async () => { 122 + const storage = new MemoryBlockstore(); 123 + const cid = CID.parse( 124 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 125 + ); 126 + 127 + let mst = await MST.create(storage); 128 + mst = await mst.add(k.A0, cid); 129 + mst = await mst.add(k.B2, cid); 130 + mst = await mst.add(k.D2, cid); 131 + mst = await mst.add(k.E0, cid); 132 + 133 + const rootBeforeCommit = await mst.getPointer(); 134 + 135 + mst = await mst.delete(k.B2); 136 + mst = await mst.delete(k.D2); 137 + mst = await mst.add(k.C2, cid); 138 + 139 + const proofs = await Promise.all([ 140 + mst.getCoveringProof(k.B2), 141 + mst.getCoveringProof(k.D2), 142 + mst.getCoveringProof(k.C2), 143 + ]); 144 + const proof = proofs.reduce((acc, cur) => acc.addMap(cur), new BlockMap()); 145 + const proofStorage = new MemoryBlockstore(proof); 146 + 147 + const addB = (mst: MST) => mst.add(k.B2, cid); 148 + const addD = (mst: MST) => mst.add(k.D2, cid); 149 + const delC = (mst: MST) => mst.delete(k.C2); 150 + 151 + const testOrder = async (fns: ((mst: MST) => Promise<MST>)[]) => { 152 + let proofMst = MST.load(proofStorage, await mst.getPointer()); 153 + for (const fn of fns) { 154 + proofMst = await fn(proofMst); 155 + } 156 + const rootAfterInvert = await proofMst.getPointer(); 157 + assert(rootAfterInvert.equals(rootBeforeCommit)); 158 + }; 159 + 160 + // test that the operations work in any order 161 + await testOrder([addB, addD, delC]); 162 + await testOrder([addB, delC, addD]); 163 + await testOrder([addD, addB, delC]); 164 + await testOrder([addD, delC, addB]); 165 + await testOrder([delC, addB, addD]); 166 + await testOrder([delC, addD, addB]); 167 + }); 168 + 169 + // This complex multi op commit includes: 170 + // - a two deep split 171 + // - a two deep merge 172 + // - an addition that requires knowledge of a leaf two deeper 173 + /** 174 + * * * 175 + * _____|_____ ______|_______ 176 + * | | | | | | | | | | | 177 + * * c * e * -> a * e * g * 178 + * | | _|_ | | | 179 + * * * | | _*_ * * 180 + * b d f h | | | | 181 + * b d f h 182 + */ 183 + Deno.test("complex multi-op commit", async () => { 184 + const storage = new MemoryBlockstore(); 185 + const cid = CID.parse( 186 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 187 + ); 188 + 189 + let mst = await MST.create(storage); 190 + mst = await mst.add(k.B0, cid); 191 + mst = await mst.add(k.C2, cid); 192 + mst = await mst.add(k.D0, cid); 193 + mst = await mst.add(k.E2, cid); 194 + mst = await mst.add(k.F0, cid); 195 + mst = await mst.add(k.H0, cid); 196 + 197 + const rootBeforeCommit = await mst.getPointer(); 198 + 199 + mst = await mst.add(k.A2, cid); 200 + mst = await mst.add(k.G2, cid); 201 + mst = await mst.delete(k.C2); 202 + 203 + const proofs = await Promise.all([ 204 + mst.getCoveringProof(k.A2), 205 + mst.getCoveringProof(k.G2), 206 + mst.getCoveringProof(k.C2), 207 + ]); 208 + const proof = proofs.reduce((acc, cur) => acc.addMap(cur), new BlockMap()); 209 + const proofStorage = new MemoryBlockstore(proof); 210 + 211 + const delA = (mst: MST) => mst.delete(k.A2); 212 + const delG = (mst: MST) => mst.delete(k.G2); 213 + const addC = (mst: MST) => mst.add(k.C2, cid); 214 + 215 + const testOrder = async (fns: ((mst: MST) => Promise<MST>)[]) => { 216 + let proofMst = MST.load(proofStorage, await mst.getPointer()); 217 + for (const fn of fns) { 218 + proofMst = await fn(proofMst); 219 + } 220 + const rootAfterInvert = await proofMst.getPointer(); 221 + assert(rootAfterInvert.equals(rootBeforeCommit)); 222 + }; 223 + 224 + // test that the operations work in any order 225 + await testOrder([delA, delG, addC]); 226 + await testOrder([delA, addC, delG]); 227 + await testOrder([delG, delA, addC]); 228 + await testOrder([delG, addC, delA]); 229 + await testOrder([addC, delA, delG]); 230 + await testOrder([addC, delG, delA]); 231 + });
+500
repo/tests/mst_test.ts
··· 1 + import { CID } from "multiformats"; 2 + import { assertEquals, assertRejects } from "@std/assert"; 3 + import { 4 + type DataAdd, 5 + type DataDelete, 6 + DataDiff, 7 + type DataUpdate, 8 + } from "../data-diff.ts"; 9 + import { MST } from "../mst/index.ts"; 10 + import { countPrefixLen, InvalidMstKeyError } from "../mst/util.ts"; 11 + import { MemoryBlockstore } from "../storage/index.ts"; 12 + import * as util from "./_util.ts"; 13 + 14 + let blockstore: MemoryBlockstore; 15 + let mst: MST; 16 + let mapping: Record<string, CID>; 17 + let shuffled: [string, CID][]; 18 + 19 + // Setup for main MST tests 20 + Deno.test("MST setup", async () => { 21 + blockstore = new MemoryBlockstore(); 22 + mst = await MST.create(blockstore); 23 + mapping = await util.generateBulkDataKeys(1000, blockstore); 24 + shuffled = util.shuffle(Object.entries(mapping)); 25 + }); 26 + 27 + Deno.test("MST adds records", async () => { 28 + for (const entry of shuffled) { 29 + mst = await mst.add(entry[0], entry[1]); 30 + } 31 + for (const entry of shuffled) { 32 + const got = await mst.get(entry[0]); 33 + assertEquals(entry[1].equals(got), true); 34 + } 35 + 36 + const totalSize = await mst.leafCount(); 37 + assertEquals(totalSize, 1000); 38 + }); 39 + 40 + Deno.test("MST edits records", async () => { 41 + let editedMst = mst; 42 + const toEdit = shuffled.slice(0, 100); 43 + 44 + const edited: [string, CID][] = []; 45 + for (const entry of toEdit) { 46 + const newCid = await util.randomCid(); 47 + editedMst = await editedMst.update(entry[0], newCid); 48 + edited.push([entry[0], newCid]); 49 + } 50 + 51 + for (const entry of edited) { 52 + const got = await editedMst.get(entry[0]); 53 + assertEquals(entry[1].equals(got), true); 54 + } 55 + 56 + const totalSize = await editedMst.leafCount(); 57 + assertEquals(totalSize, 1000); 58 + }); 59 + 60 + Deno.test("MST deletes records", async () => { 61 + let deletedMst = mst; 62 + const toDelete = shuffled.slice(0, 100); 63 + const theRest = shuffled.slice(100); 64 + for (const entry of toDelete) { 65 + deletedMst = await deletedMst.delete(entry[0]); 66 + } 67 + 68 + const totalSize = await deletedMst.leafCount(); 69 + assertEquals(totalSize, 900); 70 + 71 + for (const entry of toDelete) { 72 + const got = await deletedMst.get(entry[0]); 73 + assertEquals(got, null); 74 + } 75 + for (const entry of theRest) { 76 + const got = await deletedMst.get(entry[0]); 77 + assertEquals(entry[1].equals(got), true); 78 + } 79 + }); 80 + 81 + Deno.test("MST is order independent", async () => { 82 + const allNodes = await mst.allNodes(); 83 + 84 + let recreated = await MST.create(blockstore); 85 + const reshuffled = util.shuffle(Object.entries(mapping)); 86 + for (const entry of reshuffled) { 87 + recreated = await recreated.add(entry[0], entry[1]); 88 + } 89 + const allReshuffled = await recreated.allNodes(); 90 + 91 + assertEquals(allNodes.length, allReshuffled.length); 92 + for (let i = 0; i < allNodes.length; i++) { 93 + assertEquals(await allNodes[i].equals(allReshuffled[i]), true); 94 + } 95 + }); 96 + 97 + Deno.test("MST saves and loads from blockstore", async () => { 98 + const root = await util.saveMst(blockstore, mst); 99 + const loaded = MST.load(blockstore, root); 100 + const origNodes = await mst.allNodes(); 101 + const loadedNodes = await loaded.allNodes(); 102 + assertEquals(origNodes.length, loadedNodes.length); 103 + for (let i = 0; i < origNodes.length; i++) { 104 + assertEquals(await origNodes[i].equals(loadedNodes[i]), true); 105 + } 106 + }); 107 + 108 + Deno.test("MST diffs", async () => { 109 + let toDiff = mst; 110 + 111 + const toAdd = Object.entries( 112 + await util.generateBulkDataKeys(100, blockstore), 113 + ); 114 + const toEdit = shuffled.slice(500, 600); 115 + const toDel = shuffled.slice(400, 500); 116 + 117 + const expectedAdds: Record<string, DataAdd> = {}; 118 + const expectedUpdates: Record<string, DataUpdate> = {}; 119 + const expectedDels: Record<string, DataDelete> = {}; 120 + 121 + for (const entry of toAdd) { 122 + toDiff = await toDiff.add(entry[0], entry[1]); 123 + expectedAdds[entry[0]] = { key: entry[0], cid: entry[1] }; 124 + } 125 + for (const entry of toEdit) { 126 + const updated = await util.randomCid(); 127 + toDiff = await toDiff.update(entry[0], updated); 128 + expectedUpdates[entry[0]] = { 129 + key: entry[0], 130 + prev: entry[1], 131 + cid: updated, 132 + }; 133 + } 134 + for (const entry of toDel) { 135 + toDiff = await toDiff.delete(entry[0]); 136 + expectedDels[entry[0]] = { key: entry[0], cid: entry[1] }; 137 + } 138 + 139 + const diff = await DataDiff.of(toDiff, mst); 140 + 141 + assertEquals(diff.addList().length, 100); 142 + assertEquals(diff.updateList().length, 100); 143 + assertEquals(diff.deleteList().length, 100); 144 + 145 + assertEquals(diff.adds, expectedAdds); 146 + assertEquals(diff.updates, expectedUpdates); 147 + assertEquals(diff.deletes, expectedDels); 148 + 149 + // ensure we correctly report all added CIDs 150 + for await (const entry of toDiff.walk()) { 151 + let cid: CID; 152 + if (entry.isTree()) { 153 + cid = await entry.getPointer(); 154 + } else { 155 + cid = entry.value; 156 + } 157 + const found = (blockstore.has(cid)) || 158 + diff.newMstBlocks.has(cid) || 159 + diff.newLeafCids.has(cid); 160 + assertEquals(found, true); 161 + } 162 + }); 163 + 164 + Deno.test("utils counts prefix length", () => { 165 + assertEquals(countPrefixLen("abc", "abc"), 3); 166 + assertEquals(countPrefixLen("", "abc"), 0); 167 + assertEquals(countPrefixLen("abc", ""), 0); 168 + assertEquals(countPrefixLen("ab", "abc"), 2); 169 + assertEquals(countPrefixLen("abc", "ab"), 2); 170 + assertEquals(countPrefixLen("abcde", "abc"), 3); 171 + assertEquals(countPrefixLen("abc", "abcde"), 3); 172 + assertEquals(countPrefixLen("abcde", "abc1"), 3); 173 + assertEquals(countPrefixLen("abcde", "abb"), 2); 174 + assertEquals(countPrefixLen("abcde", "qbb"), 0); 175 + assertEquals(countPrefixLen("", "asdf"), 0); 176 + assertEquals(countPrefixLen("abc", "abc\x00"), 3); 177 + assertEquals(countPrefixLen("abc\x00", "abc"), 3); 178 + }); 179 + 180 + // MST Interop Allowable Keys tests 181 + Deno.test("MST Allowable Keys rejects the empty key", async () => { 182 + const blockstore = new MemoryBlockstore(); 183 + const mst = await MST.create(blockstore); 184 + const cid1 = CID.parse( 185 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 186 + ); 187 + 188 + await assertRejects( 189 + async () => await mst.add("", cid1), 190 + InvalidMstKeyError, 191 + ); 192 + }); 193 + 194 + Deno.test("MST Allowable Keys rejects a key with no collection", async () => { 195 + const blockstore = new MemoryBlockstore(); 196 + const mst = await MST.create(blockstore); 197 + const cid1 = CID.parse( 198 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 199 + ); 200 + 201 + await assertRejects( 202 + async () => await mst.add("asdf", cid1), 203 + InvalidMstKeyError, 204 + ); 205 + }); 206 + 207 + Deno.test("MST Allowable Keys rejects a key with a nested collection", async () => { 208 + const blockstore = new MemoryBlockstore(); 209 + const mst = await MST.create(blockstore); 210 + const cid1 = CID.parse( 211 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 212 + ); 213 + 214 + await assertRejects( 215 + async () => await mst.add("nested/collection/asdf", cid1), 216 + InvalidMstKeyError, 217 + ); 218 + }); 219 + 220 + Deno.test("MST Allowable Keys rejects on empty coll or rkey", async () => { 221 + const blockstore = new MemoryBlockstore(); 222 + const mst = await MST.create(blockstore); 223 + const cid1 = CID.parse( 224 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 225 + ); 226 + 227 + await assertRejects( 228 + async () => await mst.add("coll/", cid1), 229 + InvalidMstKeyError, 230 + ); 231 + await assertRejects( 232 + async () => await mst.add("/rkey", cid1), 233 + InvalidMstKeyError, 234 + ); 235 + }); 236 + 237 + Deno.test("MST Allowable Keys rejects non-ascii chars", async () => { 238 + const blockstore = new MemoryBlockstore(); 239 + const mst = await MST.create(blockstore); 240 + const cid1 = CID.parse( 241 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 242 + ); 243 + 244 + await assertRejects( 245 + async () => await mst.add("coll/jalapeñoA", cid1), 246 + InvalidMstKeyError, 247 + ); 248 + await assertRejects( 249 + async () => await mst.add("coll/coöperative", cid1), 250 + InvalidMstKeyError, 251 + ); 252 + await assertRejects( 253 + async () => await mst.add("coll/abc💩", cid1), 254 + InvalidMstKeyError, 255 + ); 256 + }); 257 + 258 + Deno.test("MST Allowable Keys rejects ascii that we dont support", async () => { 259 + const blockstore = new MemoryBlockstore(); 260 + const mst = await MST.create(blockstore); 261 + const cid1 = CID.parse( 262 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 263 + ); 264 + 265 + const unsupportedKeys = [ 266 + "coll/key$", 267 + "coll/key%", 268 + "coll/key(", 269 + "coll/key)", 270 + "coll/key+", 271 + "coll/key=", 272 + "coll/@handle", 273 + "coll/any space", 274 + "coll/#extra", 275 + "coll/any+space", 276 + "coll/number[3]", 277 + "coll/number(3)", 278 + "coll/dHJ1ZQ==", 279 + 'coll/"quote"', 280 + ]; 281 + 282 + for (const key of unsupportedKeys) { 283 + await assertRejects( 284 + async () => await mst.add(key, cid1), 285 + InvalidMstKeyError, 286 + ); 287 + } 288 + }); 289 + 290 + Deno.test("MST Allowable Keys rejects keys over 1024 chars", async () => { 291 + const blockstore = new MemoryBlockstore(); 292 + const mst = await MST.create(blockstore); 293 + const cid1 = CID.parse( 294 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 295 + ); 296 + 297 + const longKey = 298 + "coll/asdofiupoiwqeurfpaosidfuapsodirupasoirupasoeiruaspeoriuaspeoriu2p3o4iu1pqw3oiuaspdfoiuaspdfoiuasdfpoiasdufpwoieruapsdofiuaspdfoiuasdpfoiausdfpoasidfupasodifuaspdofiuasdpfoiasudfpoasidfuapsodfiuasdpfoiausdfpoasidufpasodifuapsdofiuasdpofiuasdfpoaisdufpaoasdofiupoiwqeurfpaosidfuapsodirupasoirupasoeiruaspeoriuaspeoriu2p3o4iu1pqw3oiuaspdfoiuaspdfoiuasdfpoiasdufpwoieruapsdofiuaspdfoiuasdpfoiausdfpoasidfupasodifuaspdofiuasdpfoiasudfpoasidfuapsodfiuasdpfoiausdfpoasidufpasodifuapsdofiuasdpofiuasdfpoaisdufpaoasdofiupoiwqeurfpaosidfuapsodirupasoirupasoeiruaspeoriuaspeoriu2p3o4iu1pqw3oiuaspdfoiuaspdfoiuasdfpoiasdufpwoieruapsdofiuaspdfoiuasdpfoiausdfpoasidfupasodifuaspdofiuasdpfoiasudfpoasidfuapsodfiuasdpfoiausdfpoasidufpasodifuapsdofiuasdpofiuasdfpoaisdufpaoasdofiupoiwqeurfpaosidfuapsodirupasoirupasoeiruaspeoriuaspeoriu2p3o4iu1pqw3oiuaspdfoiuaspdfoiuasdfpoiasdufpwoieruapsdofiuaspdfoiuasdpfoiausdfpoasidfupasodifuaspdofiuasdpfoiasudfpoasidfuapsodfiuasdpfoiausdfpoasidufpasodifuapsdofiuasdpofiuasdfpoaisdufpaoasdofiupoiwqeurfpaosidfuapsodirupasoirupasoeiruaspeoriuaspeoriu2p3o4iu1pqw3oiuaspdfoiuaspdfoiuasdfpoiasdufpwoieruapsdofiuaspdfoiuasdpfoiausdfpoasidfupasodifuaspdofiuasdpfoiasudfpoasidfuapsodfiuasdpfoiausdfpoasidufpasodifuapsdofiuasdpofiuasdfpoaisdufpao"; 299 + 300 + await assertRejects( 301 + async () => await mst.add(longKey, cid1), 302 + InvalidMstKeyError, 303 + ); 304 + }); 305 + 306 + Deno.test("MST Allowable Keys allows valid keys", async () => { 307 + const blockstore = new MemoryBlockstore(); 308 + let mst = await MST.create(blockstore); 309 + const cid1 = CID.parse( 310 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 311 + ); 312 + 313 + const validKeys = [ 314 + "coll/3jui7kd54zh2y", 315 + "coll/self", 316 + "coll/example.com", 317 + "com.example/rkey", 318 + "coll/~1.2-3_", 319 + "coll/dHJ1ZQ", 320 + "coll/pre:fix", 321 + "coll/_", 322 + ]; 323 + 324 + for (const key of validKeys) { 325 + mst = await mst.add(key, cid1); 326 + } 327 + }); 328 + 329 + // MST Interop Known Maps tests 330 + Deno.test('MST Known Maps computes "empty" tree root CID', async () => { 331 + const blockstore = new MemoryBlockstore(); 332 + const mst = await MST.create(blockstore); 333 + 334 + assertEquals(await mst.leafCount(), 0); 335 + assertEquals( 336 + (await mst.getPointer()).toString(), 337 + "bafyreie5737gdxlw5i64vzichcalba3z2v5n6icifvx5xytvske7mr3hpm", 338 + ); 339 + }); 340 + 341 + Deno.test('MST Known Maps computes "trivial" tree root CID', async () => { 342 + const blockstore = new MemoryBlockstore(); 343 + let mst = await MST.create(blockstore); 344 + const cid1 = CID.parse( 345 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 346 + ); 347 + 348 + mst = await mst.add("com.example.record/3jqfcqzm3fo2j", cid1); 349 + assertEquals(await mst.leafCount(), 1); 350 + assertEquals( 351 + (await mst.getPointer()).toString(), 352 + "bafyreibj4lsc3aqnrvphp5xmrnfoorvru4wynt6lwidqbm2623a6tatzdu", 353 + ); 354 + }); 355 + 356 + Deno.test('MST Known Maps computes "singlelayer2" tree root CID', async () => { 357 + const blockstore = new MemoryBlockstore(); 358 + let mst = await MST.create(blockstore); 359 + const cid1 = CID.parse( 360 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 361 + ); 362 + 363 + mst = await mst.add("com.example.record/3jqfcqzm3fx2j", cid1); 364 + assertEquals(await mst.leafCount(), 1); 365 + assertEquals(mst.layer, 2); 366 + assertEquals( 367 + (await mst.getPointer()).toString(), 368 + "bafyreih7wfei65pxzhauoibu3ls7jgmkju4bspy4t2ha2qdjnzqvoy33ai", 369 + ); 370 + }); 371 + 372 + Deno.test('MST Known Maps computes "simple" tree root CID', async () => { 373 + const blockstore = new MemoryBlockstore(); 374 + let mst = await MST.create(blockstore); 375 + const cid1 = CID.parse( 376 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 377 + ); 378 + 379 + mst = await mst.add("com.example.record/3jqfcqzm3fp2j", cid1); // level 0 380 + mst = await mst.add("com.example.record/3jqfcqzm3fr2j", cid1); // level 0 381 + mst = await mst.add("com.example.record/3jqfcqzm3fs2j", cid1); // level 1 382 + mst = await mst.add("com.example.record/3jqfcqzm3ft2j", cid1); // level 0 383 + mst = await mst.add("com.example.record/3jqfcqzm4fc2j", cid1); // level 0 384 + assertEquals(await mst.leafCount(), 5); 385 + assertEquals( 386 + (await mst.getPointer()).toString(), 387 + "bafyreicmahysq4n6wfuxo522m6dpiy7z7qzym3dzs756t5n7nfdgccwq7m", 388 + ); 389 + }); 390 + 391 + // MST Interop Edge Cases tests 392 + Deno.test("MST Edge Cases trims top of tree on delete", async () => { 393 + const blockstore = new MemoryBlockstore(); 394 + let mst = await MST.create(blockstore); 395 + const cid1 = CID.parse( 396 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 397 + ); 398 + 399 + const l1root = "bafyreifnqrwbk6ffmyaz5qtujqrzf5qmxf7cbxvgzktl4e3gabuxbtatv4"; 400 + const l0root = "bafyreie4kjuxbwkhzg2i5dljaswcroeih4dgiqq6pazcmunwt2byd725vi"; 401 + 402 + mst = await mst.add("com.example.record/3jqfcqzm3fn2j", cid1); // level 0 403 + mst = await mst.add("com.example.record/3jqfcqzm3fo2j", cid1); // level 0 404 + mst = await mst.add("com.example.record/3jqfcqzm3fp2j", cid1); // level 0 405 + mst = await mst.add("com.example.record/3jqfcqzm3fs2j", cid1); // level 1 406 + mst = await mst.add("com.example.record/3jqfcqzm3ft2j", cid1); // level 0 407 + mst = await mst.add("com.example.record/3jqfcqzm3fu2j", cid1); // level 0 408 + 409 + assertEquals(await mst.leafCount(), 6); 410 + assertEquals(mst.layer, 1); 411 + assertEquals((await mst.getPointer()).toString(), l1root); 412 + 413 + mst = await mst.delete("com.example.record/3jqfcqzm3fs2j"); // level 1 414 + assertEquals(await mst.leafCount(), 5); 415 + assertEquals(mst.layer, 0); 416 + assertEquals((await mst.getPointer()).toString(), l0root); 417 + }); 418 + 419 + Deno.test("MST Edge Cases handles insertion that splits two layers down", async () => { 420 + const blockstore = new MemoryBlockstore(); 421 + let mst = await MST.create(blockstore); 422 + const cid1 = CID.parse( 423 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 424 + ); 425 + 426 + const l1root = "bafyreiettyludka6fpgp33stwxfuwhkzlur6chs4d2v4nkmq2j3ogpdjem"; 427 + const l2root = "bafyreid2x5eqs4w4qxvc5jiwda4cien3gw2q6cshofxwnvv7iucrmfohpm"; 428 + 429 + mst = await mst.add("com.example.record/3jqfcqzm3fo2j", cid1); // A; level 0 430 + mst = await mst.add("com.example.record/3jqfcqzm3fp2j", cid1); // B; level 0 431 + mst = await mst.add("com.example.record/3jqfcqzm3fr2j", cid1); // C; level 0 432 + mst = await mst.add("com.example.record/3jqfcqzm3fs2j", cid1); // D; level 1 433 + mst = await mst.add("com.example.record/3jqfcqzm3ft2j", cid1); // E; level 0 434 + // GAP for F 435 + mst = await mst.add("com.example.record/3jqfcqzm3fz2j", cid1); // G; level 0 436 + mst = await mst.add("com.example.record/3jqfcqzm4fc2j", cid1); // H; level 0 437 + mst = await mst.add("com.example.record/3jqfcqzm4fd2j", cid1); // I; level 1 438 + mst = await mst.add("com.example.record/3jqfcqzm4ff2j", cid1); // J; level 0 439 + mst = await mst.add("com.example.record/3jqfcqzm4fg2j", cid1); // K; level 0 440 + mst = await mst.add("com.example.record/3jqfcqzm4fh2j", cid1); // L; level 0 441 + 442 + assertEquals(await mst.leafCount(), 11); 443 + assertEquals(mst.layer, 1); 444 + assertEquals((await mst.getPointer()).toString(), l1root); 445 + 446 + // insert F, which will push E out of the node with G+H to a new node under D 447 + mst = await mst.add("com.example.record/3jqfcqzm3fx2j", cid1); // F; level 2 448 + assertEquals(await mst.leafCount(), 12); 449 + assertEquals(mst.layer, 2); 450 + assertEquals((await mst.getPointer()).toString(), l2root); 451 + 452 + // remove F, which should push E back over with G+H 453 + mst = await mst.delete("com.example.record/3jqfcqzm3fx2j"); // F; level 2 454 + assertEquals(await mst.leafCount(), 11); 455 + assertEquals(mst.layer, 1); 456 + assertEquals((await mst.getPointer()).toString(), l1root); 457 + }); 458 + 459 + Deno.test("MST Edge Cases handles new layers that are two higher than existing", async () => { 460 + const blockstore = new MemoryBlockstore(); 461 + let mst = await MST.create(blockstore); 462 + const cid1 = CID.parse( 463 + "bafyreie5cvv4h45feadgeuwhbcutmh6t2ceseocckahdoe6uat64zmz454", 464 + ); 465 + 466 + const l0root = "bafyreidfcktqnfmykz2ps3dbul35pepleq7kvv526g47xahuz3rqtptmky"; 467 + const l2root = "bafyreiavxaxdz7o7rbvr3zg2liox2yww46t7g6hkehx4i4h3lwudly7dhy"; 468 + const l2root2 = "bafyreig4jv3vuajbsybhyvb7gggvpwh2zszwfyttjrj6qwvcsp24h6popu"; 469 + 470 + mst = await mst.add("com.example.record/3jqfcqzm3ft2j", cid1); // A; level 0 471 + mst = await mst.add("com.example.record/3jqfcqzm3fz2j", cid1); // C; level 0 472 + assertEquals(await mst.leafCount(), 2); 473 + assertEquals(mst.layer, 0); 474 + assertEquals((await mst.getPointer()).toString(), l0root); 475 + 476 + // insert B, which is two levels above 477 + mst = await mst.add("com.example.record/3jqfcqzm3fx2j", cid1); // B; level 2 478 + assertEquals(await mst.leafCount(), 3); 479 + assertEquals(mst.layer, 2); 480 + assertEquals((await mst.getPointer()).toString(), l2root); 481 + 482 + // remove B 483 + mst = await mst.delete("com.example.record/3jqfcqzm3fx2j"); // B; level 2 484 + assertEquals(await mst.leafCount(), 2); 485 + assertEquals(mst.layer, 0); 486 + assertEquals((await mst.getPointer()).toString(), l0root); 487 + 488 + // insert B (level=2) and D (level=1) 489 + mst = await mst.add("com.example.record/3jqfcqzm3fx2j", cid1); // B; level 2 490 + mst = await mst.add("com.example.record/3jqfcqzm4fd2j", cid1); // D; level 1 491 + assertEquals(await mst.leafCount(), 4); 492 + assertEquals(mst.layer, 2); 493 + assertEquals((await mst.getPointer()).toString(), l2root2); 494 + 495 + // remove D 496 + mst = await mst.delete("com.example.record/3jqfcqzm4fd2j"); // D; level 1 497 + assertEquals(await mst.leafCount(), 3); 498 + assertEquals(mst.layer, 2); 499 + assertEquals((await mst.getPointer()).toString(), l2root); 500 + });
+158
repo/tests/proofs_test.ts
··· 1 + import { cidForCbor, streamToBuffer, TID } from "@atp/common"; 2 + import * as crypto from "@atp/crypto"; 3 + import { 4 + type RecordCidClaim, 5 + type RecordPath, 6 + Repo, 7 + type RepoContents, 8 + } from "../mod.ts"; 9 + import { MemoryBlockstore } from "../storage/index.ts"; 10 + import * as sync from "../sync/index.ts"; 11 + import * as util from "./_util.ts"; 12 + import { assert, assertEquals, assertRejects } from "@std/assert"; 13 + 14 + let storage: MemoryBlockstore; 15 + let repo: Repo; 16 + let keypair: crypto.Keypair; 17 + let repoData: RepoContents; 18 + 19 + const repoDid = "did:example:test"; 20 + 21 + Deno.test.beforeAll(async () => { 22 + storage = new MemoryBlockstore(); 23 + keypair = crypto.Secp256k1Keypair.create(); 24 + repo = await Repo.create(storage, repoDid, keypair); 25 + const filled = await util.fillRepo(repo, keypair, 5); 26 + repo = filled.repo; 27 + repoData = filled.data; 28 + }); 29 + 30 + const getProofs = (claims: RecordPath[]) => { 31 + return streamToBuffer(sync.getRecords(storage, repo.cid, claims)); 32 + }; 33 + 34 + const doVerify = (proofs: Uint8Array, claims: RecordCidClaim[]) => { 35 + return sync.verifyProofs(proofs, claims, repoDid, keypair.did()); 36 + }; 37 + 38 + const contentsToClaims = async ( 39 + contents: RepoContents, 40 + ): Promise<RecordCidClaim[]> => { 41 + const claims: RecordCidClaim[] = []; 42 + for (const coll of Object.keys(contents)) { 43 + for (const rkey of Object.keys(contents[coll])) { 44 + claims.push({ 45 + collection: coll, 46 + rkey: rkey, 47 + cid: await cidForCbor(contents[coll][rkey]), 48 + }); 49 + } 50 + } 51 + return claims; 52 + }; 53 + 54 + Deno.test("verifies valid records", async () => { 55 + const claims = await contentsToClaims(repoData); 56 + const proofs = await getProofs(claims); 57 + const results = await doVerify(proofs, claims); 58 + assert(results.verified.length > 0); 59 + assertEquals(results.verified, claims); 60 + assertEquals(results.unverified.length, 0); 61 + }); 62 + 63 + Deno.test("verifies record nonexistence", async () => { 64 + const claims: RecordCidClaim[] = [ 65 + { 66 + collection: util.testCollections[0], 67 + rkey: TID.nextStr(), // does not exist 68 + cid: null, 69 + }, 70 + ]; 71 + const proofs = await getProofs(claims); 72 + const results = await doVerify(proofs, claims); 73 + assert(results.verified.length > 0); 74 + assertEquals(results.verified, claims); 75 + assertEquals(results.unverified.length, 0); 76 + }); 77 + 78 + Deno.test("does not verify a record that doesnt exist", async () => { 79 + const realClaims = await contentsToClaims(repoData); 80 + const claims: RecordCidClaim[] = [ 81 + { 82 + ...realClaims[0], 83 + rkey: TID.nextStr(), 84 + }, 85 + ]; 86 + const proofs = await getProofs(claims); 87 + const results = await doVerify(proofs, claims); 88 + assertEquals(results.verified.length, 0); 89 + assert(results.unverified.length > 0); 90 + assertEquals(results.unverified, claims); 91 + }); 92 + 93 + Deno.test("does not verify an invalid record at a real path", async () => { 94 + const realClaims = await contentsToClaims(repoData); 95 + const claims: RecordCidClaim[] = [ 96 + { 97 + ...realClaims[0], 98 + cid: await util.randomCid(), 99 + }, 100 + ]; 101 + const proofs = await getProofs(claims); 102 + const results = await doVerify(proofs, claims); 103 + assertEquals(results.verified.length, 0); 104 + assert(results.unverified.length > 0); 105 + assertEquals(results.unverified, claims); 106 + }); 107 + 108 + Deno.test("does not verify a delete where the record does exist", async () => { 109 + const realClaims = await contentsToClaims(repoData); 110 + const claims: RecordCidClaim[] = [ 111 + { 112 + collection: realClaims[0].collection, 113 + rkey: realClaims[0].rkey, 114 + cid: null, 115 + }, 116 + ]; 117 + const proofs = await getProofs(claims); 118 + const results = await doVerify(proofs, claims); 119 + assertEquals(results.verified.length, 0); 120 + assert(results.unverified.length > 0); 121 + assertEquals(results.unverified, claims); 122 + }); 123 + 124 + Deno.test("can determine record proofs from car file", async () => { 125 + const possible = await contentsToClaims(repoData); 126 + const claims = [ 127 + //random sampling of records 128 + possible[0], 129 + possible[4], 130 + possible[5], 131 + possible[8], 132 + ]; 133 + const proofs = await getProofs(claims); 134 + const records = await sync.verifyRecords(proofs, repoDid, keypair.did()); 135 + for (const record of records) { 136 + const foundClaim = claims.find( 137 + (claim) => 138 + claim.collection === record.collection && claim.rkey === record.rkey, 139 + ); 140 + if (!foundClaim) { 141 + throw new Error("Could not find record for claim"); 142 + } 143 + assertEquals( 144 + foundClaim.cid, 145 + await cidForCbor(repoData[record.collection][record.rkey]), 146 + ); 147 + } 148 + }); 149 + 150 + Deno.test("verifyProofs throws on a bad signature", async () => { 151 + const badRepo = await util.addBadCommit(repo, keypair); 152 + const claims = await contentsToClaims(repoData); 153 + const proofs = await streamToBuffer( 154 + sync.getRecords(storage, badRepo.cid, claims), 155 + ); 156 + const fn = sync.verifyProofs(proofs, claims, repoDid, keypair.did()); 157 + await assertRejects(() => fn, sync.RepoVerificationError); 158 + });
+105
repo/tests/repo_test.ts
··· 1 + import { TID } from "@atp/common"; 2 + import { assertEquals } from "@std/assert"; 3 + import type * as crypto from "@atp/crypto"; 4 + import { Secp256k1Keypair } from "@atp/crypto"; 5 + import { type RepoContents, verifyCommitSig, WriteOpAction } from "../mod.ts"; 6 + import { Repo } from "../repo.ts"; 7 + import { MemoryBlockstore } from "../storage/index.ts"; 8 + import * as util from "./_util.ts"; 9 + 10 + const collName = "com.example.posts"; 11 + 12 + let storage: MemoryBlockstore; 13 + let keypair: crypto.Keypair; 14 + let repo: Repo; 15 + let repoData: RepoContents; 16 + 17 + Deno.test("repo creates repo", async () => { 18 + storage = new MemoryBlockstore(); 19 + keypair = Secp256k1Keypair.create(); 20 + repo = await Repo.create(storage, keypair.did(), keypair); 21 + }); 22 + 23 + Deno.test("repo has proper metadata", () => { 24 + assertEquals(repo.did, keypair.did()); 25 + assertEquals(repo.version, 3); 26 + }); 27 + 28 + Deno.test("repo does basic operations", async () => { 29 + const rkey = TID.nextStr(); 30 + const record = util.generateObject(); 31 + repo = await repo.applyWrites( 32 + { 33 + action: WriteOpAction.Create, 34 + collection: collName, 35 + rkey, 36 + record, 37 + }, 38 + keypair, 39 + ); 40 + 41 + let got = await repo.getRecord(collName, rkey); 42 + assertEquals(got, record); 43 + 44 + const updatedRecord = util.generateObject(); 45 + repo = await repo.applyWrites( 46 + { 47 + action: WriteOpAction.Update, 48 + collection: collName, 49 + rkey, 50 + record: updatedRecord, 51 + }, 52 + keypair, 53 + ); 54 + got = await repo.getRecord(collName, rkey); 55 + assertEquals(got, updatedRecord); 56 + 57 + repo = await repo.applyWrites( 58 + { 59 + action: WriteOpAction.Delete, 60 + collection: collName, 61 + rkey: rkey, 62 + }, 63 + keypair, 64 + ); 65 + got = await repo.getRecord(collName, rkey); 66 + assertEquals(got, null); 67 + }); 68 + 69 + Deno.test("repo adds content collections", async () => { 70 + const filled = await util.fillRepo(repo, keypair, 100); 71 + repo = filled.repo; 72 + repoData = filled.data; 73 + const contents = await repo.getContents(); 74 + assertEquals(contents, repoData); 75 + }); 76 + 77 + Deno.test("repo edits and deletes content", async () => { 78 + const edit = await util.formatEdit(repo, repoData, keypair, { 79 + adds: 20, 80 + updates: 20, 81 + deletes: 20, 82 + }); 83 + repo = await repo.applyCommit(edit.commit); 84 + repoData = edit.data; 85 + const contents = await repo.getContents(); 86 + assertEquals(contents, repoData); 87 + }); 88 + 89 + Deno.test("repo has a valid signature to commit", () => { 90 + const verified = verifyCommitSig(repo.commit, keypair.did()); 91 + assertEquals(verified, true); 92 + }); 93 + 94 + Deno.test("repo sets correct DID", () => { 95 + assertEquals(repo.did, keypair.did()); 96 + }); 97 + 98 + Deno.test("repo loads from blockstore", async () => { 99 + const reloadedRepo = Repo.load(storage, repo.cid); 100 + 101 + const contents = await reloadedRepo.getContents(); 102 + assertEquals(contents, repoData); 103 + assertEquals(repo.did, keypair.did()); 104 + assertEquals(repo.version, 3); 105 + });
+99
repo/tests/sync_test.ts
··· 1 + import { streamToBuffer } from "@atp/common"; 2 + import { assertEquals, assertRejects } from "@std/assert"; 3 + import * as crypto from "@atp/crypto"; 4 + import { 5 + CidSet, 6 + getAndParseRecord, 7 + readCar, 8 + readCarWithRoot, 9 + Repo, 10 + type RepoContents, 11 + RepoVerificationError, 12 + } from "../mod.ts"; 13 + import { MemoryBlockstore } from "../storage/index.ts"; 14 + import * as sync from "../sync/index.ts"; 15 + import * as util from "./_util.ts"; 16 + 17 + let storage: MemoryBlockstore; 18 + let repo: Repo; 19 + let keypair: crypto.Keypair; 20 + let repoData: RepoContents; 21 + 22 + const repoDid = "did:example:test"; 23 + 24 + // Setup for sync tests 25 + Deno.test("sync setup", async () => { 26 + storage = new MemoryBlockstore(); 27 + keypair = crypto.Secp256k1Keypair.create(); 28 + repo = await Repo.create(storage, repoDid, keypair); 29 + const filled = await util.fillRepo(repo, keypair, 20); 30 + repo = filled.repo; 31 + repoData = filled.data; 32 + }); 33 + 34 + Deno.test("sync a full repo", async () => { 35 + const carBytes = await streamToBuffer(sync.getFullRepo(storage, repo.cid)); 36 + const car = await readCarWithRoot(carBytes); 37 + const verified = await sync.verifyRepo( 38 + car.blocks, 39 + car.root, 40 + repoDid, 41 + keypair.did(), 42 + ); 43 + const syncStorage = new MemoryBlockstore(); 44 + syncStorage.applyCommit(verified.commit); 45 + const loadedRepo = Repo.load(syncStorage, car.root); 46 + const contents = await loadedRepo.getContents(); 47 + assertEquals(contents, repoData); 48 + const contentsFromOps: RepoContents = {}; 49 + for (const write of verified.creates) { 50 + contentsFromOps[write.collection] ??= {}; 51 + const parsed = getAndParseRecord(car.blocks, write.cid); 52 + contentsFromOps[write.collection][write.rkey] = parsed.record; 53 + } 54 + assertEquals(contentsFromOps, repoData); 55 + }); 56 + 57 + Deno.test("sync does not sync duplicate blocks", async () => { 58 + const carBytes = await streamToBuffer(sync.getFullRepo(storage, repo.cid)); 59 + const car = await readCar(carBytes); 60 + const cids = new CidSet(); 61 + car.blocks.forEach((_, cid) => { 62 + if (cids.has(cid)) { 63 + throw new Error(`duplicate block: :${cid.toString()}`); 64 + } 65 + cids.add(cid); 66 + }); 67 + }); 68 + 69 + Deno.test("sync syncs a repo that is behind", async () => { 70 + // add more to providers's repo & have consumer catch up 71 + const edit = await util.formatEdit(repo, repoData, keypair, { 72 + adds: 10, 73 + updates: 10, 74 + deletes: 10, 75 + }); 76 + const verified = await sync.verifyDiff( 77 + repo, 78 + edit.commit.newBlocks, 79 + edit.commit.cid, 80 + repoDid, 81 + keypair.did(), 82 + ); 83 + storage.applyCommit(verified.commit); 84 + repo = Repo.load(storage, verified.commit.cid); 85 + const contents = await repo.getContents(); 86 + assertEquals(contents, edit.data); 87 + }); 88 + 89 + Deno.test("sync throws on a bad signature", async () => { 90 + const badRepo = await util.addBadCommit(repo, keypair); 91 + const carBytes = await streamToBuffer( 92 + sync.getFullRepo(storage, badRepo.cid), 93 + ); 94 + const car = await readCarWithRoot(carBytes); 95 + await assertRejects( 96 + () => sync.verifyRepo(car.blocks, car.root, repoDid, keypair.did()), 97 + RepoVerificationError, 98 + ); 99 + });
+186
repo/types.ts
··· 1 + import { CID } from "multiformats"; 2 + import { z } from "zod"; 3 + import { schema as common } from "@atp/common"; 4 + import { def as commonDef } from "@atp/common"; 5 + import { RepoRecord } from "@atp/lexicon"; 6 + import { BlockMap } from "./block-map.ts"; 7 + import { CidSet } from "./cid-set.ts"; 8 + 9 + // Repo nodes 10 + // --------------- 11 + 12 + const unsignedCommit = z.object({ 13 + did: z.string(), 14 + version: z.literal(3), 15 + data: common.cid, 16 + rev: z.string(), 17 + // `prev` added for backwards compatibility with v2, no requirement of keeping around history 18 + prev: common.cid.nullable(), 19 + }); 20 + export type UnsignedCommit = z.infer<typeof unsignedCommit> & { sig?: never }; 21 + 22 + const commit = z.object({ 23 + did: z.string(), 24 + version: z.literal(3), 25 + data: common.cid, 26 + rev: z.string(), 27 + prev: common.cid.nullable(), 28 + sig: common.bytes, 29 + }); 30 + export type Commit = z.infer<typeof commit>; 31 + 32 + const legacyV2Commit = z.object({ 33 + did: z.string(), 34 + version: z.literal(2), 35 + data: common.cid, 36 + rev: z.string().optional(), 37 + prev: common.cid.nullable(), 38 + sig: common.bytes, 39 + }); 40 + export type LegacyV2Commit = z.infer<typeof legacyV2Commit>; 41 + 42 + const versionedCommit = z.discriminatedUnion("version", [ 43 + commit, 44 + legacyV2Commit, 45 + ]); 46 + export type VersionedCommit = z.infer<typeof versionedCommit>; 47 + 48 + export const schema = { 49 + ...common, 50 + commit, 51 + legacyV2Commit, 52 + versionedCommit, 53 + }; 54 + 55 + export const def = { 56 + ...commonDef, 57 + commit: { 58 + name: "commit", 59 + schema: schema.commit, 60 + }, 61 + versionedCommit: { 62 + name: "versioned_commit", 63 + schema: schema.versionedCommit, 64 + }, 65 + }; 66 + 67 + // Repo Operations 68 + // --------------- 69 + 70 + export enum WriteOpAction { 71 + Create = "create", 72 + Update = "update", 73 + Delete = "delete", 74 + } 75 + 76 + export type RecordCreateOp = { 77 + action: WriteOpAction.Create; 78 + collection: string; 79 + rkey: string; 80 + record: RepoRecord; 81 + }; 82 + 83 + export type RecordUpdateOp = { 84 + action: WriteOpAction.Update; 85 + collection: string; 86 + rkey: string; 87 + record: RepoRecord; 88 + }; 89 + 90 + export type RecordDeleteOp = { 91 + action: WriteOpAction.Delete; 92 + collection: string; 93 + rkey: string; 94 + }; 95 + 96 + export type RecordWriteOp = RecordCreateOp | RecordUpdateOp | RecordDeleteOp; 97 + 98 + export type RecordCreateDescript = { 99 + action: WriteOpAction.Create; 100 + collection: string; 101 + rkey: string; 102 + cid: CID; 103 + }; 104 + 105 + export type RecordUpdateDescript = { 106 + action: WriteOpAction.Update; 107 + collection: string; 108 + rkey: string; 109 + prev: CID; 110 + cid: CID; 111 + }; 112 + 113 + export type RecordDeleteDescript = { 114 + action: WriteOpAction.Delete; 115 + collection: string; 116 + rkey: string; 117 + cid: CID; 118 + }; 119 + 120 + export type RecordWriteDescript = 121 + | RecordCreateDescript 122 + | RecordUpdateDescript 123 + | RecordDeleteDescript; 124 + 125 + export type WriteLog = RecordWriteDescript[][]; 126 + 127 + // Updates/Commits 128 + // --------------- 129 + 130 + export type CommitData = { 131 + cid: CID; 132 + rev: string; 133 + since: string | null; 134 + prev: CID | null; 135 + newBlocks: BlockMap; 136 + relevantBlocks: BlockMap; 137 + removedCids: CidSet; 138 + }; 139 + 140 + export type RepoUpdate = CommitData & { 141 + ops: RecordWriteOp[]; 142 + }; 143 + 144 + export type CollectionContents = Record<string, RepoRecord>; 145 + export type RepoContents = Record<string, CollectionContents>; 146 + 147 + export type RepoRecordWithCid = { cid: CID; value: RepoRecord }; 148 + export type CollectionContentsWithCids = Record<string, RepoRecordWithCid>; 149 + export type RepoContentsWithCids = Record<string, CollectionContentsWithCids>; 150 + 151 + export type DatastoreContents = Record<string, CID>; 152 + 153 + export type RecordPath = { 154 + collection: string; 155 + rkey: string; 156 + }; 157 + 158 + export type RecordCidClaim = { 159 + collection: string; 160 + rkey: string; 161 + cid: CID | null; 162 + }; 163 + 164 + export type RecordClaim = { 165 + collection: string; 166 + rkey: string; 167 + record: RepoRecord | null; 168 + }; 169 + 170 + // Sync 171 + // --------------- 172 + 173 + export type VerifiedDiff = { 174 + writes: RecordWriteDescript[]; 175 + commit: CommitData; 176 + }; 177 + 178 + export type VerifiedRepo = { 179 + creates: RecordCreateDescript[]; 180 + commit: CommitData; 181 + }; 182 + 183 + export type CarBlock = { 184 + cid: CID; 185 + bytes: Uint8Array; 186 + };
+129
repo/util.ts
··· 1 + import * as cbor from "@ipld/dag-cbor"; 2 + import { cborDecode, check, cidForCbor, schema, TID } from "@atp/common"; 3 + import * as crypto from "@atp/crypto"; 4 + import { Keypair } from "@atp/crypto"; 5 + import { ipldToLex, lexToIpld, LexValue, RepoRecord } from "@atp/lexicon"; 6 + import { DataDiff } from "./data-diff.ts"; 7 + import { 8 + Commit, 9 + LegacyV2Commit, 10 + RecordCreateDescript, 11 + RecordDeleteDescript, 12 + RecordPath, 13 + RecordUpdateDescript, 14 + RecordWriteDescript, 15 + UnsignedCommit, 16 + WriteOpAction, 17 + } from "./types.ts"; 18 + 19 + export const diffToWriteDescripts = ( 20 + diff: DataDiff, 21 + ): Promise<RecordWriteDescript[]> => { 22 + return Promise.all([ 23 + ...diff.addList().map((add) => { 24 + const { collection, rkey } = parseDataKey(add.key); 25 + return { 26 + action: WriteOpAction.Create, 27 + collection, 28 + rkey, 29 + cid: add.cid, 30 + } as RecordCreateDescript; 31 + }), 32 + ...diff.updateList().map((upd) => { 33 + const { collection, rkey } = parseDataKey(upd.key); 34 + return { 35 + action: WriteOpAction.Update, 36 + collection, 37 + rkey, 38 + cid: upd.cid, 39 + prev: upd.prev, 40 + } as RecordUpdateDescript; 41 + }), 42 + ...diff.deleteList().map((del) => { 43 + const { collection, rkey } = parseDataKey(del.key); 44 + return { 45 + action: WriteOpAction.Delete, 46 + collection, 47 + rkey, 48 + cid: del.cid, 49 + } as RecordDeleteDescript; 50 + }), 51 + ]); 52 + }; 53 + 54 + export const ensureCreates = ( 55 + descripts: RecordWriteDescript[], 56 + ): RecordCreateDescript[] => { 57 + const creates: RecordCreateDescript[] = []; 58 + for (const descript of descripts) { 59 + if (descript.action !== WriteOpAction.Create) { 60 + throw new Error(`Unexpected action: ${descript.action}`); 61 + } else { 62 + creates.push(descript); 63 + } 64 + } 65 + return creates; 66 + }; 67 + 68 + export const parseDataKey = (key: string): RecordPath => { 69 + const parts = key.split("/"); 70 + if (parts.length !== 2) throw new Error(`Invalid record key: ${key}`); 71 + return { collection: parts[0], rkey: parts[1] }; 72 + }; 73 + 74 + export const formatDataKey = (collection: string, rkey: string): string => { 75 + return collection + "/" + rkey; 76 + }; 77 + 78 + export const metaEqual = (a: Commit, b: Commit): boolean => { 79 + return a.did === b.did && a.version === b.version; 80 + }; 81 + 82 + export const signCommit = async ( 83 + unsigned: UnsignedCommit, 84 + keypair: Keypair, 85 + ): Promise<Commit> => { 86 + const encoded = cbor.encode(unsigned); 87 + const sig = await keypair.sign(encoded); 88 + return { 89 + ...unsigned, 90 + sig, 91 + }; 92 + }; 93 + 94 + export const verifyCommitSig = ( 95 + commit: Commit, 96 + didKey: string, 97 + ): boolean => { 98 + const { sig, ...rest } = commit; 99 + const encoded = cbor.encode(rest); 100 + return crypto.verifySignature(didKey, encoded, sig as Uint8Array); 101 + }; 102 + 103 + export const cborToLex = (val: Uint8Array): LexValue => { 104 + return ipldToLex(cborDecode(val)); 105 + }; 106 + 107 + export const cborToLexRecord = (val: Uint8Array): RepoRecord => { 108 + const parsed = cborToLex(val); 109 + if (!check.is(parsed, schema.map)) { 110 + throw new Error("lexicon records be a json object"); 111 + } 112 + return parsed as RepoRecord; 113 + }; 114 + 115 + export const cidForRecord = (val: LexValue) => { 116 + return cidForCbor(lexToIpld(val)); 117 + }; 118 + 119 + export const ensureV3Commit = (commit: LegacyV2Commit | Commit): Commit => { 120 + if (commit.version === 3) { 121 + return commit; 122 + } else { 123 + return { 124 + ...commit, 125 + version: 3, 126 + rev: commit.rev ?? TID.nextStr(), 127 + }; 128 + } 129 + };
-3
syntax/deno.json
··· 3 3 "version": "0.1.0-alpha.1", 4 4 "exports": "./mod.ts", 5 5 "license": "MIT", 6 - "imports": { 7 - "@std/assert": "jsr:@std/assert@^1.0.14" 8 - }, 9 6 "test": { 10 7 "permissions": { 11 8 "read": ["./tests/interop"]
-1
xrpc-server/deno.json
··· 4 4 "exports": "./mod.ts", 5 5 "license": "MIT", 6 6 "imports": { 7 - "@std/assert": "jsr:@std/assert@^1.0.14", 8 7 "@std/cbor": "jsr:@std/cbor@^0.1.8", 9 8 "@std/encoding": "jsr:@std/encoding@^1.0.10", 10 9 "get-port": "npm:get-port@^7.1.0",