Suite of AT Protocol TypeScript libraries built on web standards
20
fork

Configure Feed

Select the types of activity you want to include in your feed.

SINK🚰

+1883 -156
+1
deno.json
··· 9 9 "repo", 10 10 "xrpc", 11 11 "xrpc-server", 12 + "sync", 12 13 "lex-cli" 13 14 ], 14 15 "imports": {
+39 -136
deno.lock
··· 41 41 "jsr:@ts-morph/common@0.27": "0.27.0", 42 42 "jsr:@ts-morph/ts-morph@26": "26.0.0", 43 43 "jsr:@zod/zod@^4.1.11": "4.1.11", 44 - "npm:@atproto/crypto@*": "0.1.0", 44 + "npm:@atproto/crypto@*": "0.4.4", 45 45 "npm:@did-plc/lib@^0.0.4": "0.0.4", 46 46 "npm:@did-plc/server@^0.0.1": "0.0.1_express@4.21.2", 47 47 "npm:@ipld/dag-cbor@^9.2.5": "9.2.5", 48 48 "npm:@types/node@*": "24.2.0", 49 - "npm:cbor-x@*": "1.6.0", 50 49 "npm:get-port@^7.1.0": "7.1.0", 51 - "npm:http-errors@2": "2.0.0", 52 - "npm:key-encoder@^2.0.3": "2.0.3", 53 50 "npm:multiformats@^13.4.1": "13.4.1", 51 + "npm:p-queue@^8.1.1": "8.1.1", 54 52 "npm:prettier@^3.6.2": "3.6.2", 55 53 "npm:rate-limiter-flexible@^2.4.2": "2.4.2", 56 54 "npm:ws@^8.18.3": "8.18.3", ··· 241 239 "uint8arrays" 242 240 ] 243 241 }, 244 - "@cbor-extract/cbor-extract-darwin-arm64@2.2.0": { 245 - "integrity": "sha512-P7swiOAdF7aSi0H+tHtHtr6zrpF3aAq/W9FXx5HektRvLTM2O89xCyXF3pk7pLc7QpaY7AoaE8UowVf9QBdh3w==", 246 - "os": ["darwin"], 247 - "cpu": ["arm64"] 248 - }, 249 - "@cbor-extract/cbor-extract-darwin-x64@2.2.0": { 250 - "integrity": "sha512-1liF6fgowph0JxBbYnAS7ZlqNYLf000Qnj4KjqPNW4GViKrEql2MgZnAsExhY9LSy8dnvA4C0qHEBgPrll0z0w==", 251 - "os": ["darwin"], 252 - "cpu": ["x64"] 253 - }, 254 - "@cbor-extract/cbor-extract-linux-arm64@2.2.0": { 255 - "integrity": "sha512-rQvhNmDuhjTVXSPFLolmQ47/ydGOFXtbR7+wgkSY0bdOxCFept1hvg59uiLPT2fVDuJFuEy16EImo5tE2x3RsQ==", 256 - "os": ["linux"], 257 - "cpu": ["arm64"] 258 - }, 259 - "@cbor-extract/cbor-extract-linux-arm@2.2.0": { 260 - "integrity": "sha512-QeBcBXk964zOytiedMPQNZr7sg0TNavZeuUCD6ON4vEOU/25+pLhNN6EDIKJ9VLTKaZ7K7EaAriyYQ1NQ05s/Q==", 261 - "os": ["linux"], 262 - "cpu": ["arm"] 263 - }, 264 - "@cbor-extract/cbor-extract-linux-x64@2.2.0": { 265 - "integrity": "sha512-cWLAWtT3kNLHSvP4RKDzSTX9o0wvQEEAj4SKvhWuOVZxiDAeQazr9A+PSiRILK1VYMLeDml89ohxCnUNQNQNCw==", 266 - "os": ["linux"], 267 - "cpu": ["x64"] 268 - }, 269 - "@cbor-extract/cbor-extract-win32-x64@2.2.0": { 270 - "integrity": "sha512-l2M+Z8DO2vbvADOBNLbbh9y5ST1RY5sqkWOg/58GkUPBYou/cuNZ68SGQ644f1CvZ8kcOxyZtw06+dxWHIoN/w==", 271 - "os": ["win32"], 272 - "cpu": ["x64"] 242 + "@atproto/crypto@0.4.4": { 243 + "integrity": "sha512-Yq9+crJ7WQl7sxStVpHgie5Z51R05etaK9DLWYG/7bR5T4bhdcIgF6IfklLShtZwLYdVVj+K15s0BqW9a8PSDA==", 244 + "dependencies": [ 245 + "@noble/curves", 246 + "@noble/hashes", 247 + "uint8arrays" 248 + ] 273 249 }, 274 250 "@did-plc/lib@0.0.4": { 275 251 "integrity": "sha512-Omeawq3b8G/c/5CtkTtzovSOnWuvIuCI4GTJNrt1AmCskwEQV7zbX5d6km1mjJNbE0gHuQPTVqZxLVqetNbfwA==", 276 252 "dependencies": [ 277 253 "@atproto/common@0.1.1", 278 - "@atproto/crypto", 254 + "@atproto/crypto@0.1.0", 279 255 "@ipld/dag-cbor@7.0.3", 280 256 "axios", 281 257 "multiformats@9.9.0", ··· 287 263 "integrity": "sha512-GtxxHcOrOQ6fNI1ufq3Zqjc2PtWqPZOdsuzlwtxiH9XibUGwDkb0GmaBHyU5GiOxOKZEW1GspZ8mreBA6XOlTQ==", 288 264 "dependencies": [ 289 265 "@atproto/common@0.1.0", 290 - "@atproto/crypto", 266 + "@atproto/crypto@0.1.0", 291 267 "@did-plc/lib", 292 268 "axios", 293 269 "cors", ··· 315 291 "multiformats@13.4.1" 316 292 ] 317 293 }, 318 - "@noble/secp256k1@1.7.2": { 319 - "integrity": "sha512-/qzwYl5eFLH8OWIecQWM31qld2g1NfjgylK+TNhqtaUKP37Nm+Y+z30Fjhw0Ct8p9yCQEm2N3W/AckdIb3SMcQ==" 320 - }, 321 - "@types/bn.js@5.2.0": { 322 - "integrity": "sha512-DLbJ1BPqxvQhIGbeu8VbUC1DiAiahHtAYvA0ZEAa4P31F7IaArc8z3C3BRQdWX4mtLQuABG4yzp76ZrS02Ui1Q==", 294 + "@noble/curves@1.9.7": { 295 + "integrity": "sha512-gbKGcRUYIjA3/zCCNaWDciTMFI0dCkvou3TL8Zmy5Nc7sJ47a0jtOeZoTaMxkuqRo9cRhjOdZJXegxYE5FN/xw==", 323 296 "dependencies": [ 324 - "@types/node" 297 + "@noble/hashes" 325 298 ] 326 299 }, 327 - "@types/elliptic@6.4.18": { 328 - "integrity": "sha512-UseG6H5vjRiNpQvrhy4VF/JXdA3V/Fp5amvveaL+fs28BZ6xIKJBPnUPRlEaZpysD9MbpfaLi8lbl7PGUAkpWw==", 329 - "dependencies": [ 330 - "@types/bn.js" 331 - ] 300 + "@noble/hashes@1.8.0": { 301 + "integrity": "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==" 302 + }, 303 + "@noble/secp256k1@1.7.2": { 304 + "integrity": "sha512-/qzwYl5eFLH8OWIecQWM31qld2g1NfjgylK+TNhqtaUKP37Nm+Y+z30Fjhw0Ct8p9yCQEm2N3W/AckdIb3SMcQ==" 332 305 }, 333 306 "@types/node@24.2.0": { 334 307 "integrity": "sha512-3xyG3pMCq3oYCNg7/ZP+E1ooTaGB4cG8JWRsqqOYQdbWNY4zbaV0Ennrd7stjiJEFZCaybcIgpTjJWHRfBSIDw==", ··· 352 325 "array-flatten@1.1.1": { 353 326 "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" 354 327 }, 355 - "asn1.js@5.4.1": { 356 - "integrity": "sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==", 357 - "dependencies": [ 358 - "bn.js", 359 - "inherits", 360 - "minimalistic-assert", 361 - "safer-buffer" 362 - ] 363 - }, 364 328 "asynckit@0.4.0": { 365 329 "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" 366 330 }, ··· 380 344 }, 381 345 "big-integer@1.6.52": { 382 346 "integrity": "sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==" 383 - }, 384 - "bn.js@4.12.2": { 385 - "integrity": "sha512-n4DSx829VRTRByMRGdjQ9iqsN0Bh4OolPsFnaZBLcbi8iXcB+kJ9s7EnRt4wILZNV3kPLHkRVfOc/HvhC3ovDw==" 386 347 }, 387 348 "body-parser@1.20.3": { 388 349 "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", ··· 401 362 "unpipe" 402 363 ] 403 364 }, 404 - "brorand@1.1.0": { 405 - "integrity": "sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w==" 406 - }, 407 365 "buffer@6.0.3": { 408 366 "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", 409 367 "dependencies": [ ··· 428 386 "get-intrinsic" 429 387 ] 430 388 }, 431 - "cbor-extract@2.2.0": { 432 - "integrity": "sha512-Ig1zM66BjLfTXpNgKpvBePq271BPOvu8MR0Jl080yG7Jsl+wAZunfrwiwA+9ruzm/WEdIV5QF/bjDZTqyAIVHA==", 433 - "dependencies": [ 434 - "node-gyp-build-optional-packages" 435 - ], 436 - "optionalDependencies": [ 437 - "@cbor-extract/cbor-extract-darwin-arm64", 438 - "@cbor-extract/cbor-extract-darwin-x64", 439 - "@cbor-extract/cbor-extract-linux-arm", 440 - "@cbor-extract/cbor-extract-linux-arm64", 441 - "@cbor-extract/cbor-extract-linux-x64", 442 - "@cbor-extract/cbor-extract-win32-x64" 443 - ], 444 - "scripts": true, 445 - "bin": true 446 - }, 447 - "cbor-x@1.6.0": { 448 - "integrity": "sha512-0kareyRwHSkL6ws5VXHEf8uY1liitysCVJjlmhaLG+IXLqhSaOO+t63coaso7yjwEzWZzLy8fJo06gZDVQM9Qg==", 449 - "optionalDependencies": [ 450 - "cbor-extract" 451 - ] 452 - }, 453 389 "cborg@1.10.2": { 454 390 "integrity": "sha512-b3tFPA9pUr2zCUiCfRd2+wok2/LBSNUMKOuRRok+WlvvAgEt/PlbgPTsZUcwCOs53IJvLgTp0eotwtosE6njug==", 455 391 "bin": true ··· 504 440 "destroy@1.2.0": { 505 441 "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==" 506 442 }, 507 - "detect-libc@2.0.4": { 508 - "integrity": "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==" 509 - }, 510 443 "dunder-proto@1.0.1": { 511 444 "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", 512 445 "dependencies": [ ··· 518 451 "ee-first@1.1.1": { 519 452 "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" 520 453 }, 521 - "elliptic@6.6.1": { 522 - "integrity": "sha512-RaddvvMatK2LJHqFJ+YA4WysVN5Ita9E35botqIYspQ4TkRAlCicdzKOjlyv/1Za5RyTNn7di//eEV0uTAfe3g==", 523 - "dependencies": [ 524 - "bn.js", 525 - "brorand", 526 - "hash.js", 527 - "hmac-drbg", 528 - "inherits", 529 - "minimalistic-assert", 530 - "minimalistic-crypto-utils" 531 - ] 532 - }, 533 454 "encodeurl@1.0.2": { 534 455 "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==" 535 456 }, ··· 565 486 }, 566 487 "event-target-shim@5.0.1": { 567 488 "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==" 489 + }, 490 + "eventemitter3@5.0.1": { 491 + "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==" 568 492 }, 569 493 "events@3.3.0": { 570 494 "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==" ··· 691 615 "has-symbols" 692 616 ] 693 617 }, 694 - "hash.js@1.1.7": { 695 - "integrity": "sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==", 696 - "dependencies": [ 697 - "inherits", 698 - "minimalistic-assert" 699 - ] 700 - }, 701 618 "hasown@2.0.2": { 702 619 "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", 703 620 "dependencies": [ 704 621 "function-bind" 705 622 ] 706 623 }, 707 - "hmac-drbg@1.0.1": { 708 - "integrity": "sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg==", 709 - "dependencies": [ 710 - "hash.js", 711 - "minimalistic-assert", 712 - "minimalistic-crypto-utils" 713 - ] 714 - }, 715 624 "http-errors@2.0.0": { 716 625 "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", 717 626 "dependencies": [ ··· 746 655 "ipaddr.js@1.9.1": { 747 656 "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==" 748 657 }, 749 - "key-encoder@2.0.3": { 750 - "integrity": "sha512-fgBtpAGIr/Fy5/+ZLQZIPPhsZEcbSlYu/Wu96tNDFNSjSACw5lEIOFeaVdQ/iwrb8oxjlWi6wmWdH76hV6GZjg==", 751 - "dependencies": [ 752 - "@types/elliptic", 753 - "asn1.js", 754 - "bn.js", 755 - "elliptic" 756 - ] 757 - }, 758 658 "kysely@0.23.5": { 759 659 "integrity": "sha512-TH+b56pVXQq0tsyooYLeNfV11j6ih7D50dyN8tkM0e7ndiUH28Nziojiog3qRFlmEj9XePYdZUrNJ2079Qjdow==" 760 660 }, ··· 783 683 "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", 784 684 "bin": true 785 685 }, 786 - "minimalistic-assert@1.0.1": { 787 - "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==" 788 - }, 789 - "minimalistic-crypto-utils@1.0.1": { 790 - "integrity": "sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg==" 791 - }, 792 686 "ms@2.0.0": { 793 687 "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" 794 688 }, ··· 804 698 "negotiator@0.6.3": { 805 699 "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==" 806 700 }, 807 - "node-gyp-build-optional-packages@5.1.1": { 808 - "integrity": "sha512-+P72GAjVAbTxjjwUmwjVrqrdZROD4nf8KgpBoDxqXXTiYZZt/ud60dE5yvCSr9lRO8e8yv6kgJIC0K0PfZFVQw==", 809 - "dependencies": [ 810 - "detect-libc" 811 - ], 812 - "bin": true 813 - }, 814 701 "object-assign@4.1.1": { 815 702 "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==" 816 703 }, ··· 832 719 "p-finally@1.0.0": { 833 720 "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==" 834 721 }, 722 + "p-queue@8.1.1": { 723 + "integrity": "sha512-aNZ+VfjobsWryoiPnEApGGmf5WmNsCo9xu8dfaYamG5qaLP7ClhLN6NgsFe6SwJ2UbLEBK5dv9x8Mn5+RVhMWQ==", 724 + "dependencies": [ 725 + "eventemitter3", 726 + "p-timeout@6.1.4" 727 + ] 728 + }, 835 729 "p-timeout@3.2.0": { 836 730 "integrity": "sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==", 837 731 "dependencies": [ 838 732 "p-finally" 839 733 ] 840 734 }, 735 + "p-timeout@6.1.4": { 736 + "integrity": "sha512-MyIV3ZA/PmyBN/ud8vV9XzwTrNtR4jFrObymZYnZqMmW0zA8Z17vnT0rBgFE/TlohB+YCHqXMgZzb3Csp49vqg==" 737 + }, 841 738 "p-wait-for@3.2.0": { 842 739 "integrity": "sha512-wpgERjNkLrBiFmkMEjuZJEWKKDrNfHCKA1OhyN1wg1FrLkULbviEy6py1AyJUgZ72YWFbZ38FIpnqvVqAlDUwA==", 843 740 "dependencies": [ 844 - "p-timeout" 741 + "p-timeout@3.2.0" 845 742 ] 846 743 }, 847 744 "parseurl@1.3.3": { ··· 1231 1128 "npm:@ipld/dag-cbor@^9.2.5", 1232 1129 "npm:multiformats@^13.4.1", 1233 1130 "npm:zod@^4.1.11" 1131 + ] 1132 + }, 1133 + "sync": { 1134 + "dependencies": [ 1135 + "npm:multiformats@^13.4.1", 1136 + "npm:p-queue@^8.1.1" 1234 1137 ] 1235 1138 }, 1236 1139 "xrpc": {
+6 -6
identity/did/util.ts
··· 2 2 ms: number, 3 3 fn: F, 4 4 ): Promise<Awaited<ReturnType<F>>> { 5 - const abortController = new AbortController() 6 - const timer = setTimeout(() => abortController.abort(), ms) 7 - const signal = abortController.signal 5 + const abortController = new AbortController(); 6 + const timer = setTimeout(() => abortController.abort(), ms); 7 + const signal = abortController.signal; 8 8 9 9 try { 10 - return (await fn(signal)) as Awaited<ReturnType<F>> 10 + return (await fn(signal)) as Awaited<ReturnType<F>>; 11 11 } finally { 12 - clearTimeout(timer) 13 - abortController.abort() 12 + clearTimeout(timer); 13 + abortController.abort(); 14 14 } 15 15 }
+5 -5
identity/errors.ts
··· 1 1 export class DidNotFoundError extends Error { 2 2 constructor(public did: string) { 3 - super(`Could not resolve DID: ${did}`) 3 + super(`Could not resolve DID: ${did}`); 4 4 } 5 5 } 6 6 7 7 export class PoorlyFormattedDidError extends Error { 8 8 constructor(public did: string) { 9 - super(`Poorly formatted DID: ${did}`) 9 + super(`Poorly formatted DID: ${did}`); 10 10 } 11 11 } 12 12 13 13 export class UnsupportedDidMethodError extends Error { 14 14 constructor(public did: string) { 15 - super(`Unsupported DID method: ${did}`) 15 + super(`Unsupported DID method: ${did}`); 16 16 } 17 17 } 18 18 ··· 21 21 public did: string, 22 22 public doc: unknown, 23 23 ) { 24 - super(`Poorly formatted DID Document: ${doc}`) 24 + super(`Poorly formatted DID Document: ${doc}`); 25 25 } 26 26 } 27 27 28 28 export class UnsupportedDidWebPathError extends Error { 29 29 constructor(public did: string) { 30 - super(`Unsupported did:web paths: ${did}`) 30 + super(`Unsupported did:web paths: ${did}`); 31 31 } 32 32 }
+26 -9
identity/tests/README.md
··· 1 1 # Identity Tests 2 2 3 - This directory contains tests for the ATP identity system, including DID resolution and caching functionality. 3 + This directory contains tests for the ATP identity system, including DID 4 + resolution and caching functionality. 4 5 5 6 ## Current Status 6 7 7 8 ### Working Tests 9 + 8 10 - `did-document.test.ts` - DID document parsing and validation ✅ 9 - - `did-cache.test.ts` - DID caching functionality ✅ (fixed with mock implementation) 11 + - `did-cache.test.ts` - DID caching functionality ✅ (fixed with mock 12 + implementation) 10 13 11 14 ### Partially Working Tests 12 - - `handle-resolver.test.ts` - Handle resolution (2/4 tests passing, DNS resolution issues) 15 + 16 + - `handle-resolver.test.ts` - Handle resolution (2/4 tests passing, DNS 17 + resolution issues) 13 18 14 19 ### Known Issues 15 20 16 21 #### PLC Tests (did-resolver.test.ts) 17 - The `did-resolver.test.ts` tests are failing due to a version compatibility issue between the PLC library packages: 22 + 23 + The `did-resolver.test.ts` tests are failing due to a version compatibility 24 + issue between the PLC library packages: 18 25 19 26 - `@did-plc/lib` v0.0.4 (client library) 20 27 - `@did-plc/server` v0.0.1 (server library, published 2 years ago) 21 28 22 - **Problem**: The server rejects signatures generated by the client with "Invalid signature on op" errors, even though the signature generation appears to be working correctly. 29 + **Problem**: The server rejects signatures generated by the client with "Invalid 30 + signature on op" errors, even though the signature generation appears to be 31 + working correctly. 23 32 24 - **Root Cause**: The server package is significantly older and uses different validation logic than the client library expects. 33 + **Root Cause**: The server package is significantly older and uses different 34 + validation logic than the client library expects. 25 35 26 - **Fixed for did-cache.test.ts**: Implemented a mock HTTP server approach that bypasses the PLC compatibility issue while still testing the caching functionality. 36 + **Fixed for did-cache.test.ts**: Implemented a mock HTTP server approach that 37 + bypasses the PLC compatibility issue while still testing the caching 38 + functionality. 27 39 28 40 #### Handle Resolver Tests 29 - Some DNS resolution tests are failing, possibly due to network/environment issues or missing test DNS records. 41 + 42 + Some DNS resolution tests are failing, possibly due to network/environment 43 + issues or missing test DNS records. 30 44 31 45 **Potential Solutions**: 46 + 32 47 1. Wait for updated compatible versions of the PLC packages 33 48 2. Switch to the `@atproto/plc` ecosystem (if available and compatible) 34 49 3. Mock the PLC server interactions for testing purposes ··· 36 51 37 52 ## Server Changes 38 53 39 - The `web/server.ts` file has been successfully converted from Express to use `Deno.serve` while maintaining the same API and functionality. The web server tests should work once the PLC dependency issues are resolved. 54 + The `web/server.ts` file has been successfully converted from Express to use 55 + `Deno.serve` while maintaining the same API and functionality. The web server 56 + tests should work once the PLC dependency issues are resolved. 40 57 41 58 ## Running Tests 42 59
+15
sync/deno.json
··· 1 + { 2 + "name": "@atp/sync", 3 + "version": "0.1.0-alpha.1", 4 + "exports": "./mod.ts", 5 + "license": "MIT", 6 + "imports": { 7 + "multiformats": "npm:multiformats@^13.4.1", 8 + "p-queue": "npm:p-queue@^8.1.1" 9 + }, 10 + "test": { 11 + "permissions": { 12 + "env": true 13 + } 14 + } 15 + }
+71
sync/events.ts
··· 1 + import type { CID } from "multiformats/cid"; 2 + import type { DidDocument } from "@atp/identity"; 3 + import type { RepoRecord } from "@atp/lexicon"; 4 + import type { BlockMap } from "@atp/repo"; 5 + import type { AtUri } from "@atp/syntax"; 6 + 7 + export type Event = CommitEvt | SyncEvt | IdentityEvt | AccountEvt; 8 + 9 + export type CommitMeta = { 10 + seq: number; 11 + time: string; 12 + commit: CID; 13 + blocks: BlockMap; 14 + rev: string; 15 + uri: AtUri; 16 + did: string; 17 + collection: string; 18 + rkey: string; 19 + }; 20 + 21 + export type CommitEvt = Create | Update | Delete; 22 + 23 + export type Create = CommitMeta & { 24 + event: "create"; 25 + record: RepoRecord; 26 + cid: CID; 27 + }; 28 + 29 + export type Update = CommitMeta & { 30 + event: "update"; 31 + record: RepoRecord; 32 + cid: CID; 33 + }; 34 + 35 + export type Delete = CommitMeta & { 36 + event: "delete"; 37 + }; 38 + 39 + export type SyncEvt = { 40 + seq: number; 41 + time: string; 42 + event: "sync"; 43 + did: string; 44 + cid: CID; 45 + rev: string; 46 + blocks: BlockMap; 47 + }; 48 + 49 + export type IdentityEvt = { 50 + seq: number; 51 + time: string; 52 + event: "identity"; 53 + did: string; 54 + handle?: string; 55 + didDocument?: DidDocument; 56 + }; 57 + 58 + export type AccountEvt = { 59 + seq: number; 60 + time: string; 61 + event: "account"; 62 + did: string; 63 + active: boolean; 64 + status?: AccountStatus; 65 + }; 66 + 67 + export type AccountStatus = 68 + | "takendown" 69 + | "suspended" 70 + | "deleted" 71 + | "deactivated";
+393
sync/firehose/index.ts
··· 1 + import type { CID } from "multiformats/cid"; 2 + import type { WebSocketOptions } from "@atp/xrpc-server"; 3 + import { createDeferrable, type Deferrable, wait } from "@atp/common"; 4 + import { 5 + type DidDocument, 6 + type IdResolver, 7 + parseToAtprotoDocument, 8 + } from "@atp/identity"; 9 + import { 10 + cborToLexRecord, 11 + formatDataKey, 12 + parseDataKey, 13 + readCar, 14 + readCarWithRoot, 15 + RepoVerificationError, 16 + verifyProofs, 17 + } from "@atp/repo"; 18 + import { AtUri } from "@atp/syntax"; 19 + import { Subscription } from "@atp/xrpc-server"; 20 + import type { 21 + AccountEvt, 22 + AccountStatus, 23 + CommitEvt, 24 + CommitMeta, 25 + Event, 26 + IdentityEvt, 27 + SyncEvt, 28 + } from "../events.ts"; 29 + import type { EventRunner } from "../runner/index.ts"; 30 + import { didAndSeqForEvt } from "../util.ts"; 31 + import { 32 + type Account, 33 + type Commit, 34 + type Identity, 35 + isAccount, 36 + isCommit, 37 + isIdentity, 38 + isSync, 39 + isValidRepoEvent, 40 + type RepoEvent, 41 + type RepoOp, 42 + type Sync, 43 + } from "./lexicons.ts"; 44 + 45 + export type FirehoseOptions = WebSocketOptions & { 46 + idResolver: IdResolver; 47 + 48 + handleEvent: (evt: Event) => Awaited<void>; 49 + onError: (err: Error) => void; 50 + getCursor?: () => Awaited<number | undefined>; 51 + 52 + runner?: EventRunner; // should only set getCursor *or* runner 53 + 54 + service?: string; 55 + subscriptionReconnectDelay?: number; 56 + 57 + unauthenticatedCommits?: boolean; 58 + unauthenticatedHandles?: boolean; 59 + 60 + filterCollections?: string[]; 61 + excludeIdentity?: boolean; 62 + excludeAccount?: boolean; 63 + excludeCommit?: boolean; 64 + excludeSync?: boolean; 65 + }; 66 + 67 + export class Firehose { 68 + private sub: Subscription<RepoEvent>; 69 + private abortController: AbortController; 70 + private destoryDefer: Deferrable; 71 + 72 + constructor(public opts: FirehoseOptions) { 73 + this.destoryDefer = createDeferrable(); 74 + this.abortController = new AbortController(); 75 + if (this.opts.getCursor && this.opts.runner) { 76 + throw new Error("Must set only `getCursor` or `runner`"); 77 + } 78 + this.sub = new Subscription({ 79 + ...opts, 80 + service: opts.service ?? "wss://bsky.network", 81 + method: "com.atproto.sync.subscribeRepos", 82 + signal: this.abortController.signal, 83 + getParams: async () => { 84 + const getCursorFn = () => 85 + this.opts.runner?.getCursor() ?? this.opts.getCursor; 86 + if (!getCursorFn) { 87 + return undefined; 88 + } 89 + const cursor = await getCursorFn(); 90 + return { cursor }; 91 + }, 92 + validate: (value: unknown) => { 93 + try { 94 + return isValidRepoEvent(value); 95 + } catch (err) { 96 + this.opts.onError(new FirehoseValidationError(err, value)); 97 + } 98 + }, 99 + }); 100 + } 101 + 102 + async start(): Promise<void> { 103 + try { 104 + for await (const evt of this.sub) { 105 + if (this.opts.runner) { 106 + const parsed = didAndSeqForEvt(evt); 107 + if (!parsed) { 108 + continue; 109 + } 110 + this.opts.runner.trackEvent(parsed.did, parsed.seq, async () => { 111 + const parsed = await this.parseEvt(evt); 112 + for (const write of parsed) { 113 + try { 114 + await this.opts.handleEvent(write); 115 + } catch (err) { 116 + this.opts.onError(new FirehoseHandlerError(err, write)); 117 + } 118 + } 119 + }); 120 + } else { 121 + await this.processEvt(evt); 122 + } 123 + } 124 + } catch (err) { 125 + if ( 126 + err && 127 + (err as Record<string, unknown>)["name"] === "AbortError" 128 + ) { 129 + this.destoryDefer.resolve(); 130 + return; 131 + } 132 + this.opts.onError(new FirehoseSubscriptionError(err)); 133 + await wait(this.opts.subscriptionReconnectDelay ?? 3000); 134 + return this.start(); 135 + } 136 + } 137 + 138 + private async parseEvt(evt: RepoEvent): Promise<Event[]> { 139 + try { 140 + if (isCommit(evt) && !this.opts.excludeCommit) { 141 + return this.opts.unauthenticatedCommits 142 + ? await parseCommitUnauthenticated(evt, this.opts.filterCollections) 143 + : await parseCommitAuthenticated( 144 + this.opts.idResolver, 145 + evt, 146 + this.opts.filterCollections, 147 + ); 148 + } else if (isAccount(evt) && !this.opts.excludeAccount) { 149 + const parsed = parseAccount(evt); 150 + return parsed ? [parsed] : []; 151 + } else if (isIdentity(evt) && !this.opts.excludeIdentity) { 152 + const parsed = await parseIdentity( 153 + this.opts.idResolver, 154 + evt, 155 + this.opts.unauthenticatedHandles, 156 + ); 157 + return parsed ? [parsed] : []; 158 + } else if (isSync(evt) && !this.opts.excludeSync) { 159 + const parsed = await parseSync(evt); 160 + return parsed ? [parsed] : []; 161 + } else { 162 + return []; 163 + } 164 + } catch (err) { 165 + this.opts.onError(new FirehoseParseError(err, evt)); 166 + return []; 167 + } 168 + } 169 + 170 + private async processEvt(evt: RepoEvent) { 171 + const parsed = await this.parseEvt(evt); 172 + for (const write of parsed) { 173 + try { 174 + await this.opts.handleEvent(write); 175 + } catch (err) { 176 + this.opts.onError(new FirehoseHandlerError(err, write)); 177 + } 178 + } 179 + } 180 + 181 + async destroy(): Promise<void> { 182 + this.abortController.abort(); 183 + await this.destoryDefer.complete; 184 + } 185 + } 186 + 187 + export const parseCommitAuthenticated = async ( 188 + idResolver: IdResolver, 189 + evt: Commit, 190 + filterCollections?: string[], 191 + forceKeyRefresh = false, 192 + ): Promise<CommitEvt[]> => { 193 + const did = evt.repo; 194 + const ops = maybeFilterOps(evt.ops, filterCollections); 195 + if (ops.length === 0) { 196 + return []; 197 + } 198 + const claims = ops.map((op) => { 199 + const { collection, rkey } = parseDataKey(op.path); 200 + return { 201 + collection, 202 + rkey, 203 + cid: op.action === "delete" ? null : op.cid, 204 + }; 205 + }); 206 + const key = await idResolver.did.resolveAtprotoKey(did, forceKeyRefresh); 207 + const verifiedCids: Record<string, CID | null> = {}; 208 + try { 209 + const results = await verifyProofs(evt.blocks, claims, did, key); 210 + results.verified.forEach((op) => { 211 + const path = formatDataKey(op.collection, op.rkey); 212 + verifiedCids[path] = op.cid; 213 + }); 214 + } catch (err) { 215 + if (err instanceof RepoVerificationError && !forceKeyRefresh) { 216 + return parseCommitAuthenticated(idResolver, evt, filterCollections, true); 217 + } 218 + throw err; 219 + } 220 + const verifiedOps: RepoOp[] = ops.filter((op) => { 221 + if (op.action === "delete") { 222 + return verifiedCids[op.path] === null; 223 + } else { 224 + return op.cid !== null && op.cid.equals(verifiedCids[op.path]); 225 + } 226 + }); 227 + return formatCommitOps(evt, verifiedOps, { 228 + skipCidVerification: true, // already checked via verifyProofs() 229 + }); 230 + }; 231 + 232 + export const parseCommitUnauthenticated = ( 233 + evt: Commit, 234 + filterCollections?: string[], 235 + ): Promise<CommitEvt[]> => { 236 + const ops = maybeFilterOps(evt.ops, filterCollections); 237 + return formatCommitOps(evt, ops); 238 + }; 239 + 240 + const maybeFilterOps = ( 241 + ops: RepoOp[], 242 + filterCollections?: string[], 243 + ): RepoOp[] => { 244 + if (!filterCollections) return ops; 245 + return ops.filter((op) => { 246 + const { collection } = parseDataKey(op.path); 247 + return filterCollections.includes(collection); 248 + }); 249 + }; 250 + 251 + const formatCommitOps = async ( 252 + evt: Commit, 253 + ops: RepoOp[], 254 + options?: { skipCidVerification: boolean }, 255 + ) => { 256 + const car = await readCar(evt.blocks, options); 257 + 258 + const evts: CommitEvt[] = []; 259 + 260 + for (const op of ops) { 261 + const uri = AtUri.make(evt.repo, op.path); 262 + 263 + const meta: CommitMeta = { 264 + seq: evt.seq, 265 + time: evt.time, 266 + commit: evt.commit, 267 + blocks: car.blocks, 268 + rev: evt.rev, 269 + uri, 270 + did: uri.host, 271 + collection: uri.collection, 272 + rkey: uri.rkey, 273 + }; 274 + 275 + if (op.action === "create" || op.action === "update") { 276 + if (!op.cid) continue; 277 + const recordBytes = car.blocks.get(op.cid); 278 + if (!recordBytes) continue; 279 + const record = cborToLexRecord(recordBytes); 280 + evts.push({ 281 + ...meta, 282 + event: op.action as "create" | "update", 283 + cid: op.cid, 284 + record, 285 + }); 286 + } 287 + 288 + if (op.action === "delete") { 289 + evts.push({ 290 + ...meta, 291 + event: "delete", 292 + }); 293 + } 294 + } 295 + 296 + return evts; 297 + }; 298 + 299 + export const parseSync = async (evt: Sync): Promise<SyncEvt | null> => { 300 + const car = await readCarWithRoot(evt.blocks); 301 + 302 + return { 303 + event: "sync", 304 + seq: evt.seq, 305 + time: evt.time, 306 + did: evt.did, 307 + cid: car.root, 308 + rev: evt.rev, 309 + blocks: car.blocks, 310 + }; 311 + }; 312 + 313 + export const parseIdentity = async ( 314 + idResolver: IdResolver, 315 + evt: Identity, 316 + unauthenticated = false, 317 + ): Promise<IdentityEvt | null> => { 318 + const res = await idResolver.did.resolve(evt.did); 319 + const handle = res && !unauthenticated 320 + ? await verifyHandle(idResolver, evt.did, res) 321 + : undefined; 322 + 323 + return { 324 + event: "identity", 325 + seq: evt.seq, 326 + time: evt.time, 327 + did: evt.did, 328 + handle, 329 + didDocument: res ?? undefined, 330 + }; 331 + }; 332 + 333 + const verifyHandle = async ( 334 + idResolver: IdResolver, 335 + did: string, 336 + didDoc: DidDocument, 337 + ): Promise<string | undefined> => { 338 + const { handle } = parseToAtprotoDocument(didDoc); 339 + if (!handle) { 340 + return undefined; 341 + } 342 + const res = await idResolver.handle.resolve(handle); 343 + return res === did ? handle : undefined; 344 + }; 345 + 346 + export const parseAccount = (evt: Account): AccountEvt | undefined => { 347 + if (evt.status && !isValidStatus(evt.status)) return; 348 + return { 349 + event: "account", 350 + seq: evt.seq, 351 + time: evt.time, 352 + did: evt.did, 353 + active: evt.active, 354 + status: evt.status as AccountStatus | undefined, 355 + }; 356 + }; 357 + 358 + const isValidStatus = (str: string): str is AccountStatus => { 359 + return ["takendown", "suspended", "deleted", "deactivated"].includes(str); 360 + }; 361 + 362 + export class FirehoseValidationError extends Error { 363 + constructor( 364 + err: unknown, 365 + public value: unknown, 366 + ) { 367 + super("error in firehose event lexicon validation", { cause: err }); 368 + } 369 + } 370 + 371 + export class FirehoseParseError extends Error { 372 + constructor( 373 + err: unknown, 374 + public event: RepoEvent, 375 + ) { 376 + super("error in parsing and authenticating firehose event", { cause: err }); 377 + } 378 + } 379 + 380 + export class FirehoseSubscriptionError extends Error { 381 + constructor(err: unknown) { 382 + super("error on firehose subscription", { cause: err }); 383 + } 384 + } 385 + 386 + export class FirehoseHandlerError extends Error { 387 + constructor( 388 + err: unknown, 389 + public event: Event, 390 + ) { 391 + super("error in firehose event handler", { cause: err }); 392 + } 393 + }
+446
sync/firehose/lexicons.ts
··· 1 + import type { IncomingMessage } from "node:http"; 2 + import type { CID } from "multiformats/cid"; 3 + import { type LexiconDoc, Lexicons } from "@atp/lexicon"; 4 + import type { Auth, ErrorFrame } from "@atp/xrpc-server"; 5 + 6 + // @NOTE: this file is an ugly copy job of codegen output. I'd like to clean this whole thing up 7 + 8 + export function isObj(v: unknown): v is Record<string, unknown> { 9 + return typeof v === "object" && v !== null; 10 + } 11 + 12 + export function hasProp<K extends PropertyKey>( 13 + data: object, 14 + prop: K, 15 + ): data is Record<K, unknown> { 16 + return prop in data; 17 + } 18 + 19 + export interface QueryParams { 20 + /** The last known event seq number to backfill from. */ 21 + cursor?: number; 22 + } 23 + 24 + export type RepoEvent = 25 + | Commit 26 + | Identity 27 + | Account 28 + | Sync 29 + | Info 30 + | { $type: string; [k: string]: unknown }; 31 + export type HandlerError = ErrorFrame<"FutureCursor" | "ConsumerTooSlow">; 32 + export type HandlerOutput = HandlerError | RepoEvent; 33 + export type HandlerReqCtx<HA extends Auth = never> = { 34 + auth: HA; 35 + params: QueryParams; 36 + req: IncomingMessage; 37 + signal: AbortSignal; 38 + }; 39 + export type Handler<HA extends Auth = never> = ( 40 + ctx: HandlerReqCtx<HA>, 41 + ) => AsyncIterable<HandlerOutput>; 42 + 43 + /** Represents an update of repository state. Note that empty commits are allowed, which include no repo data changes, but an update to rev and signature. */ 44 + export interface Commit { 45 + /** The stream sequence number of this message. */ 46 + seq: number; 47 + /** DEPRECATED -- unused */ 48 + rebase: boolean; 49 + /** Indicates that this commit contained too many ops, or data size was too large. Consumers will need to make a separate request to get missing data. */ 50 + tooBig: boolean; 51 + /** The repo this event comes from. */ 52 + repo: string; 53 + /** Repo commit object CID. */ 54 + commit: CID; 55 + /** DEPRECATED -- unused. WARNING -- nullable and optional; stick with optional to ensure golang interoperability. */ 56 + prev?: CID | null; 57 + /** The rev of the emitted commit. Note that this information is also in the commit object included in blocks, unless this is a tooBig event. */ 58 + rev: string; 59 + /** The rev of the last emitted commit from this repo (if any). */ 60 + since: string | null; 61 + /** CAR file containing relevant blocks, as a diff since the previous repo state. */ 62 + blocks: Uint8Array; 63 + ops: RepoOp[]; 64 + blobs: CID[]; 65 + /** Timestamp of when this message was originally broadcast. */ 66 + time: string; 67 + [k: string]: unknown; 68 + } 69 + 70 + export function isCommit(v: unknown): v is Commit { 71 + return ( 72 + isObj(v) && 73 + hasProp(v, "$type") && 74 + v.$type === "com.atproto.sync.subscribeRepos#commit" 75 + ); 76 + } 77 + 78 + /** Updates the repo to a new state, without necessarily including that state on the firehose. Used to recover from broken commit streams, data loss incidents, or in situations where upstream host does not know recent state of the repository. */ 79 + export interface Sync { 80 + $type?: "com.atproto.sync.subscribeRepos#sync"; 81 + /** The stream sequence number of this message. */ 82 + seq: number; 83 + /** The account this repo event corresponds to. Must match that in the commit object. */ 84 + did: string; 85 + /** CAR file containing the commit, as a block. The CAR header must include the commit block CID as the first 'root'. */ 86 + blocks: Uint8Array; 87 + /** The rev of the commit. This value must match that in the commit object. */ 88 + rev: string; 89 + /** Timestamp of when this message was originally broadcast. */ 90 + time: string; 91 + } 92 + 93 + export function isSync(v: unknown): v is Sync { 94 + return ( 95 + isObj(v) && 96 + hasProp(v, "$type") && 97 + v.$type === "com.atproto.sync.subscribeRepos#sync" 98 + ); 99 + } 100 + 101 + /** Represents a change to an account's identity. Could be an updated handle, signing key, or pds hosting endpoint. Serves as a prod to all downstream services to refresh their identity cache. */ 102 + export interface Identity { 103 + seq: number; 104 + did: string; 105 + time: string; 106 + /** The current handle for the account, or 'handle.invalid' if validation fails. This field is optional, might have been validated or passed-through from an upstream source. Semantics and behaviors for PDS vs Relay may evolve in the future; see atproto specs for more details. */ 107 + handle?: string; 108 + [k: string]: unknown; 109 + } 110 + 111 + export function isIdentity(v: unknown): v is Identity { 112 + return ( 113 + isObj(v) && 114 + hasProp(v, "$type") && 115 + v.$type === "com.atproto.sync.subscribeRepos#identity" 116 + ); 117 + } 118 + 119 + /** Represents a change to an account's status on a host (eg, PDS or Relay). The semantics of this event are that the status is at the host which emitted the event, not necessarily that at the currently active PDS. Eg, a Relay takedown would emit a takedown with active=false, even if the PDS is still active. */ 120 + export interface Account { 121 + seq: number; 122 + did: string; 123 + time: string; 124 + /** Indicates that the account has a repository which can be fetched from the host that emitted this event. */ 125 + active: boolean; 126 + /** If active=false, this optional field indicates a reason for why the account is not active. */ 127 + status?: "takendown" | "suspended" | "deleted" | "deactivated" | string; 128 + [k: string]: unknown; 129 + } 130 + 131 + export function isAccount(v: unknown): v is Account { 132 + return ( 133 + isObj(v) && 134 + hasProp(v, "$type") && 135 + v.$type === "com.atproto.sync.subscribeRepos#account" 136 + ); 137 + } 138 + 139 + export interface Info { 140 + name: "OutdatedCursor" | string; 141 + message?: string; 142 + [k: string]: unknown; 143 + } 144 + 145 + export function isInfo(v: unknown): v is Info { 146 + return ( 147 + isObj(v) && 148 + hasProp(v, "$type") && 149 + v.$type === "com.atproto.sync.subscribeRepos#info" 150 + ); 151 + } 152 + 153 + /** A repo operation, ie a mutation of a single record. */ 154 + export interface RepoOp { 155 + action: "create" | "update" | "delete" | string; 156 + path: string; 157 + /** For creates and updates, the new record CID. For deletions, null. */ 158 + cid: CID | null; 159 + [k: string]: unknown; 160 + } 161 + 162 + export function isRepoOp(v: unknown): v is RepoOp { 163 + return ( 164 + isObj(v) && 165 + hasProp(v, "$type") && 166 + v.$type === "com.atproto.sync.subscribeRepos#repoOp" 167 + ); 168 + } 169 + 170 + export const ComAtprotoSyncSubscribeRepos: LexiconDoc = { 171 + lexicon: 1, 172 + id: "com.atproto.sync.subscribeRepos", 173 + defs: { 174 + main: { 175 + type: "subscription", 176 + description: 177 + "Repository event stream, aka Firehose endpoint. Outputs repo commits with diff data, and identity update events, for all repositories on the current server. See the atproto specifications for details around stream sequencing, repo versioning, CAR diff format, and more. Public and does not require auth; implemented by PDS and Relay.", 178 + parameters: { 179 + type: "params", 180 + properties: { 181 + cursor: { 182 + type: "integer", 183 + description: "The last known event seq number to backfill from.", 184 + }, 185 + }, 186 + }, 187 + message: { 188 + schema: { 189 + type: "union", 190 + refs: [ 191 + "lex:com.atproto.sync.subscribeRepos#commit", 192 + "lex:com.atproto.sync.subscribeRepos#sync", 193 + "lex:com.atproto.sync.subscribeRepos#identity", 194 + "lex:com.atproto.sync.subscribeRepos#account", 195 + "lex:com.atproto.sync.subscribeRepos#info", 196 + ], 197 + }, 198 + }, 199 + errors: [ 200 + { 201 + name: "FutureCursor", 202 + }, 203 + { 204 + name: "ConsumerTooSlow", 205 + description: 206 + "If the consumer of the stream can not keep up with events, and a backlog gets too large, the server will drop the connection.", 207 + }, 208 + ], 209 + }, 210 + commit: { 211 + type: "object", 212 + description: 213 + "Represents an update of repository state. Note that empty commits are allowed, which include no repo data changes, but an update to rev and signature.", 214 + required: [ 215 + "seq", 216 + "rebase", 217 + "tooBig", 218 + "repo", 219 + "commit", 220 + "rev", 221 + "since", 222 + "blocks", 223 + "ops", 224 + "blobs", 225 + "time", 226 + ], 227 + nullable: ["since"], 228 + properties: { 229 + seq: { 230 + type: "integer", 231 + description: "The stream sequence number of this message.", 232 + }, 233 + rebase: { 234 + type: "boolean", 235 + description: "DEPRECATED -- unused", 236 + }, 237 + tooBig: { 238 + type: "boolean", 239 + description: 240 + "DEPRECATED -- replaced by #sync event and data limits. Indicates that this commit contained too many ops, or data size was too large. Consumers will need to make a separate request to get missing data.", 241 + }, 242 + repo: { 243 + type: "string", 244 + format: "did", 245 + description: 246 + "The repo this event comes from. Note that all other message types name this field 'did'.", 247 + }, 248 + commit: { 249 + type: "cid-link", 250 + description: "Repo commit object CID.", 251 + }, 252 + rev: { 253 + type: "string", 254 + format: "tid", 255 + description: 256 + "The rev of the emitted commit. Note that this information is also in the commit object included in blocks, unless this is a tooBig event.", 257 + }, 258 + since: { 259 + type: "string", 260 + format: "tid", 261 + description: 262 + "The rev of the last emitted commit from this repo (if any).", 263 + }, 264 + blocks: { 265 + type: "bytes", 266 + description: 267 + "CAR file containing relevant blocks, as a diff since the previous repo state. The commit must be included as a block, and the commit block CID must be the first entry in the CAR header 'roots' list.", 268 + maxLength: 2000000, 269 + }, 270 + ops: { 271 + type: "array", 272 + items: { 273 + type: "ref", 274 + ref: "lex:com.atproto.sync.subscribeRepos#repoOp", 275 + description: 276 + "List of repo mutation operations in this commit (eg, records created, updated, or deleted).", 277 + }, 278 + maxLength: 200, 279 + }, 280 + blobs: { 281 + type: "array", 282 + items: { 283 + type: "cid-link", 284 + description: 285 + "DEPRECATED -- will soon always be empty. List of new blobs (by CID) referenced by records in this commit.", 286 + }, 287 + }, 288 + prevData: { 289 + type: "cid-link", 290 + description: 291 + "The root CID of the MST tree for the previous commit from this repo (indicated by the 'since' revision field in this message). Corresponds to the 'data' field in the repo commit object. NOTE: this field is effectively required for the 'inductive' version of firehose.", 292 + }, 293 + time: { 294 + type: "string", 295 + format: "datetime", 296 + description: 297 + "Timestamp of when this message was originally broadcast.", 298 + }, 299 + }, 300 + }, 301 + sync: { 302 + type: "object", 303 + description: 304 + "Updates the repo to a new state, without necessarily including that state on the firehose. Used to recover from broken commit streams, data loss incidents, or in situations where upstream host does not know recent state of the repository.", 305 + required: ["seq", "did", "blocks", "rev", "time"], 306 + properties: { 307 + seq: { 308 + type: "integer", 309 + description: "The stream sequence number of this message.", 310 + }, 311 + did: { 312 + type: "string", 313 + format: "did", 314 + description: 315 + "The account this repo event corresponds to. Must match that in the commit object.", 316 + }, 317 + blocks: { 318 + type: "bytes", 319 + description: 320 + "CAR file containing the commit, as a block. The CAR header must include the commit block CID as the first 'root'.", 321 + maxLength: 10000, 322 + }, 323 + rev: { 324 + type: "string", 325 + description: 326 + "The rev of the commit. This value must match that in the commit object.", 327 + }, 328 + time: { 329 + type: "string", 330 + format: "datetime", 331 + description: 332 + "Timestamp of when this message was originally broadcast.", 333 + }, 334 + }, 335 + }, 336 + identity: { 337 + type: "object", 338 + description: 339 + "Represents a change to an account's identity. Could be an updated handle, signing key, or pds hosting endpoint. Serves as a prod to all downstream services to refresh their identity cache.", 340 + required: ["seq", "did", "time"], 341 + properties: { 342 + seq: { 343 + type: "integer", 344 + }, 345 + did: { 346 + type: "string", 347 + format: "did", 348 + }, 349 + time: { 350 + type: "string", 351 + format: "datetime", 352 + }, 353 + handle: { 354 + type: "string", 355 + format: "handle", 356 + description: 357 + "The current handle for the account, or 'handle.invalid' if validation fails. This field is optional, might have been validated or passed-through from an upstream source. Semantics and behaviors for PDS vs Relay may evolve in the future; see atproto specs for more details.", 358 + }, 359 + }, 360 + }, 361 + account: { 362 + type: "object", 363 + description: 364 + "Represents a change to an account's status on a host (eg, PDS or Relay). The semantics of this event are that the status is at the host which emitted the event, not necessarily that at the currently active PDS. Eg, a Relay takedown would emit a takedown with active=false, even if the PDS is still active.", 365 + required: ["seq", "did", "time", "active"], 366 + properties: { 367 + seq: { 368 + type: "integer", 369 + }, 370 + did: { 371 + type: "string", 372 + format: "did", 373 + }, 374 + time: { 375 + type: "string", 376 + format: "datetime", 377 + }, 378 + active: { 379 + type: "boolean", 380 + description: 381 + "Indicates that the account has a repository which can be fetched from the host that emitted this event.", 382 + }, 383 + status: { 384 + type: "string", 385 + description: 386 + "If active=false, this optional field indicates a reason for why the account is not active.", 387 + knownValues: [ 388 + "takendown", 389 + "suspended", 390 + "deleted", 391 + "deactivated", 392 + "desynchronized", 393 + "throttled", 394 + ], 395 + }, 396 + }, 397 + }, 398 + info: { 399 + type: "object", 400 + required: ["name"], 401 + properties: { 402 + name: { 403 + type: "string", 404 + knownValues: ["OutdatedCursor"], 405 + }, 406 + message: { 407 + type: "string", 408 + }, 409 + }, 410 + }, 411 + repoOp: { 412 + type: "object", 413 + description: "A repo operation, ie a mutation of a single record.", 414 + required: ["action", "path", "cid"], 415 + nullable: ["cid"], 416 + properties: { 417 + action: { 418 + type: "string", 419 + knownValues: ["create", "update", "delete"], 420 + }, 421 + path: { 422 + type: "string", 423 + }, 424 + cid: { 425 + type: "cid-link", 426 + description: 427 + "For creates and updates, the new record CID. For deletions, null.", 428 + }, 429 + prev: { 430 + type: "cid-link", 431 + description: 432 + "For updates and deletes, the previous record CID (required for inductive firehose). For creations, field should not be defined.", 433 + }, 434 + }, 435 + }, 436 + }, 437 + }; 438 + 439 + const lexicons = new Lexicons([ComAtprotoSyncSubscribeRepos]); 440 + 441 + export const isValidRepoEvent = (evt: unknown) => { 442 + return lexicons.assertValidXrpcMessage<RepoEvent>( 443 + "com.atproto.sync.subscribeRepos", 444 + evt, 445 + ); 446 + };
+3
sync/mod.ts
··· 1 + export * from "./runner/index.ts"; 2 + export * from "./firehose/index.ts"; 3 + export * from "./events.ts";
+43
sync/runner/consecutive-list.ts
··· 1 + /** 2 + * Add items to a list, and mark those items as 3 + * completed. Upon item completion, get list of consecutive 4 + * items completed at the head of the list. Example: 5 + * 6 + * const consecutive = new ConsecutiveList<number>() 7 + * const item1 = consecutive.push(1) 8 + * const item2 = consecutive.push(2) 9 + * const item3 = consecutive.push(3) 10 + * item2.complete() // [] 11 + * item1.complete() // [1, 2] 12 + * item3.complete() // [3] 13 + */ 14 + export class ConsecutiveList<T> { 15 + list: ConsecutiveItem<T>[] = []; 16 + 17 + push(value: T): ConsecutiveItem<T> { 18 + const item = new ConsecutiveItem<T>(this, value); 19 + this.list.push(item); 20 + return item; 21 + } 22 + 23 + complete(): T[] { 24 + let i = 0; 25 + while (this.list[i]?.isComplete) { 26 + i += 1; 27 + } 28 + return this.list.splice(0, i).map((item) => item.value); 29 + } 30 + } 31 + 32 + export class ConsecutiveItem<T> { 33 + isComplete = false; 34 + constructor( 35 + private consecutive: ConsecutiveList<T>, 36 + public value: T, 37 + ) {} 38 + 39 + complete(): T[] { 40 + this.isComplete = true; 41 + return this.consecutive.complete(); 42 + } 43 + }
+3
sync/runner/index.ts
··· 1 + export * from "./consecutive-list.ts"; 2 + export * from "./memory-runner.ts"; 3 + export * from "./types.ts";
+70
sync/runner/memory-runner.ts
··· 1 + import PQueue from "p-queue"; 2 + import { ConsecutiveList } from "./consecutive-list.ts"; 3 + import type { EventRunner } from "./types.ts"; 4 + 5 + export type MemoryRunnerOptions = { 6 + setCursor?: (cursor: number) => Promise<void>; 7 + concurrency?: number; 8 + startCursor?: number; 9 + }; 10 + 11 + // A queue with arbitrarily many partitions, each processing work sequentially. 12 + // Partitions are created lazily and taken out of memory when they go idle. 13 + export class MemoryRunner implements EventRunner { 14 + consecutive: ConsecutiveList<number> = new ConsecutiveList<number>(); 15 + mainQueue: PQueue; 16 + partitions: Map<string, PQueue> = new Map<string, PQueue>(); 17 + cursor: number | undefined; 18 + 19 + constructor(public opts: MemoryRunnerOptions = {}) { 20 + this.mainQueue = new PQueue({ concurrency: opts.concurrency ?? Infinity }); 21 + this.cursor = opts.startCursor; 22 + } 23 + 24 + getCursor(): number | undefined { 25 + return this.cursor; 26 + } 27 + 28 + addTask(partitionId: string, task: () => Promise<void>): Promise<void> { 29 + if (this.mainQueue.isPaused) return Promise.resolve(); 30 + return this.mainQueue.add(() => { 31 + return this.getPartition(partitionId).add(task); 32 + }); 33 + } 34 + 35 + private getPartition(partitionId: string) { 36 + let partition = this.partitions.get(partitionId); 37 + if (!partition) { 38 + partition = new PQueue({ concurrency: 1 }); 39 + partition.once("idle", () => this.partitions.delete(partitionId)); 40 + this.partitions.set(partitionId, partition); 41 + } 42 + return partition; 43 + } 44 + 45 + async trackEvent(did: string, seq: number, handler: () => Promise<void>) { 46 + if (this.mainQueue.isPaused) return; 47 + const item = this.consecutive.push(seq); 48 + await this.addTask(did, async () => { 49 + await handler(); 50 + const latest = item.complete().at(-1); 51 + if (latest !== undefined) { 52 + this.cursor = latest; 53 + if (this.opts.setCursor) { 54 + await this.opts.setCursor(this.cursor); 55 + } 56 + } 57 + }); 58 + } 59 + 60 + async processAll() { 61 + await this.mainQueue.onIdle(); 62 + } 63 + 64 + async destroy() { 65 + this.mainQueue.pause(); 66 + this.mainQueue.clear(); 67 + this.partitions.forEach((p) => p.clear()); 68 + await this.mainQueue.onIdle(); 69 + } 70 + }
+8
sync/runner/types.ts
··· 1 + export interface EventRunner { 2 + getCursor(): Awaited<number | undefined>; 3 + trackEvent( 4 + did: string, 5 + seq: number, 6 + handler: () => Promise<void>, 7 + ): Promise<void>; 8 + }
+232
sync/tests/firehose-mock_test.ts
··· 1 + import { assertEquals, assertObjectMatch } from "@std/assert"; 2 + import { IdResolver } from "@atp/identity"; 3 + import { MemoryCache } from "@atp/identity"; 4 + import { parseAccount, parseIdentity } from "../firehose/index.ts"; 5 + import type { Account, Identity } from "../firehose/lexicons.ts"; 6 + 7 + // Mock IdResolver 8 + const createMockIdResolver = (): IdResolver => { 9 + const didCache = new MemoryCache(); 10 + const resolver = new IdResolver({ 11 + plcUrl: "http://localhost:3000", 12 + didCache, 13 + }); 14 + 15 + // Mock the resolve methods 16 + resolver.did.resolve = (did: string) => 17 + Promise.resolve({ 18 + id: did, 19 + verificationMethod: [], 20 + service: [{ 21 + id: "#atproto_pds", 22 + type: "AtprotoPersonalDataServer", 23 + serviceEndpoint: "https://test.pds", 24 + }], 25 + alsoKnownAs: [`at://${did.replace("did:plc:", "")}.test`], 26 + }); 27 + 28 + resolver.handle.resolve = (handle: string) => { 29 + const didMap: Record<string, string> = { 30 + "alice123.test": "did:plc:alice123", 31 + "bob456.test": "did:plc:bob456", 32 + }; 33 + return Promise.resolve(didMap[handle]); 34 + }; 35 + 36 + return resolver; 37 + }; 38 + 39 + Deno.test({ 40 + name: "parseAccount - creates account events with no status", 41 + fn() { 42 + const account: Account = { 43 + $type: "com.atproto.sync.subscribeRepos#account", 44 + seq: 200, 45 + time: "2024-01-01T13:00:00.000Z", 46 + did: "did:plc:alice123", 47 + active: true, 48 + status: undefined, 49 + }; 50 + 51 + const event = parseAccount(account); 52 + 53 + assertObjectMatch(event!, { 54 + event: "account", 55 + seq: 200, 56 + time: "2024-01-01T13:00:00.000Z", 57 + did: "did:plc:alice123", 58 + active: true, 59 + status: undefined, 60 + }); 61 + }, 62 + sanitizeResources: false, 63 + sanitizeOps: false, 64 + }); 65 + 66 + Deno.test({ 67 + name: "parseAccount - handles valid status values", 68 + fn() { 69 + const validStatuses = ["takendown", "suspended", "deleted", "deactivated"]; 70 + 71 + for (const status of validStatuses) { 72 + const account: Account = { 73 + $type: "com.atproto.sync.subscribeRepos#account", 74 + seq: 201, 75 + time: "2024-01-01T13:01:00.000Z", 76 + did: "did:plc:alice123", 77 + active: false, 78 + status, 79 + }; 80 + 81 + const event = parseAccount(account); 82 + assertEquals(event!.status, status); 83 + } 84 + }, 85 + sanitizeResources: false, 86 + sanitizeOps: false, 87 + }); 88 + 89 + Deno.test({ 90 + name: "parseAccount - returns undefined for invalid status", 91 + fn() { 92 + const account: Account = { 93 + $type: "com.atproto.sync.subscribeRepos#account", 94 + seq: 202, 95 + time: "2024-01-01T13:02:00.000Z", 96 + did: "did:plc:alice123", 97 + active: true, 98 + status: "active", // "active" is not a valid status 99 + }; 100 + 101 + const event = parseAccount(account); 102 + assertEquals(event, undefined); 103 + }, 104 + sanitizeResources: false, 105 + sanitizeOps: false, 106 + }); 107 + 108 + Deno.test({ 109 + name: "parseIdentity - creates identity events with resolved DID", 110 + async fn() { 111 + const idResolver = createMockIdResolver(); 112 + 113 + const identity: Identity = { 114 + $type: "com.atproto.sync.subscribeRepos#identity", 115 + seq: 300, 116 + time: "2024-01-01T14:00:00.000Z", 117 + did: "did:plc:alice123", 118 + }; 119 + 120 + const event = await parseIdentity(idResolver, identity); 121 + 122 + assertObjectMatch(event!, { 123 + event: "identity", 124 + seq: 300, 125 + time: "2024-01-01T14:00:00.000Z", 126 + did: "did:plc:alice123", 127 + handle: "alice123.test", 128 + didDocument: { 129 + id: "did:plc:alice123", 130 + service: [{ 131 + id: "#atproto_pds", 132 + type: "AtprotoPersonalDataServer", 133 + serviceEndpoint: "https://test.pds", 134 + }], 135 + }, 136 + }); 137 + }, 138 + sanitizeResources: false, 139 + sanitizeOps: false, 140 + }); 141 + 142 + Deno.test({ 143 + name: "parseIdentity - handles unauthenticated mode", 144 + async fn() { 145 + const idResolver = createMockIdResolver(); 146 + 147 + const identity: Identity = { 148 + $type: "com.atproto.sync.subscribeRepos#identity", 149 + seq: 301, 150 + time: "2024-01-01T14:01:00.000Z", 151 + did: "did:plc:alice123", 152 + }; 153 + 154 + const event = await parseIdentity(idResolver, identity, true); 155 + 156 + assertObjectMatch(event!, { 157 + event: "identity", 158 + seq: 301, 159 + did: "did:plc:alice123", 160 + handle: undefined, // Should be undefined in unauthenticated mode 161 + didDocument: { 162 + id: "did:plc:alice123", 163 + }, 164 + }); 165 + }, 166 + sanitizeResources: false, 167 + sanitizeOps: false, 168 + }); 169 + 170 + Deno.test({ 171 + name: "parseIdentity - handles DID resolution failure", 172 + async fn() { 173 + const didCache = new MemoryCache(); 174 + const resolver = new IdResolver({ 175 + plcUrl: "http://localhost:3000", 176 + didCache, 177 + }); 178 + 179 + // Mock resolver to return null (failed resolution) 180 + resolver.did.resolve = () => Promise.resolve(null); 181 + 182 + const identity: Identity = { 183 + $type: "com.atproto.sync.subscribeRepos#identity", 184 + seq: 302, 185 + time: "2024-01-01T14:02:00.000Z", 186 + did: "did:plc:unknown", 187 + }; 188 + 189 + const event = await parseIdentity(resolver, identity); 190 + 191 + assertObjectMatch(event!, { 192 + event: "identity", 193 + seq: 302, 194 + did: "did:plc:unknown", 195 + handle: undefined, 196 + didDocument: undefined, 197 + }); 198 + }, 199 + sanitizeResources: false, 200 + sanitizeOps: false, 201 + }); 202 + 203 + Deno.test({ 204 + name: "parseIdentity - handles handle resolution failure", 205 + async fn() { 206 + const idResolver = createMockIdResolver(); 207 + 208 + // Override handle resolver to return undefined 209 + idResolver.handle.resolve = () => Promise.resolve(undefined); 210 + 211 + const identity: Identity = { 212 + $type: "com.atproto.sync.subscribeRepos#identity", 213 + seq: 303, 214 + time: "2024-01-01T14:03:00.000Z", 215 + did: "did:plc:alice123", 216 + }; 217 + 218 + const event = await parseIdentity(idResolver, identity); 219 + 220 + assertObjectMatch(event!, { 221 + event: "identity", 222 + seq: 303, 223 + did: "did:plc:alice123", 224 + handle: undefined, // Handle resolution failed 225 + didDocument: { 226 + id: "did:plc:alice123", 227 + }, 228 + }); 229 + }, 230 + sanitizeResources: false, 231 + sanitizeOps: false, 232 + });
+170
sync/tests/firehose-parsing_test.ts
··· 1 + import { assertEquals, assertObjectMatch } from "@std/assert"; 2 + import { IdResolver } from "@atp/identity"; 3 + import { MemoryCache } from "@atp/identity"; 4 + import { parseAccount, parseIdentity } from "../firehose/index.ts"; 5 + import type { Account, Identity } from "../firehose/lexicons.ts"; 6 + 7 + // Mock IdResolver 8 + const createMockIdResolver = (): IdResolver => { 9 + const didCache = new MemoryCache(); 10 + const resolver = new IdResolver({ 11 + plcUrl: "http://localhost:3000", 12 + didCache, 13 + }); 14 + 15 + // Mock DID document with no verification methods to avoid crypto parsing 16 + resolver.did.resolve = (did: string) => 17 + Promise.resolve({ 18 + id: did, 19 + verificationMethod: [], 20 + service: [{ 21 + id: "#atproto_pds", 22 + type: "AtprotoPersonalDataServer", 23 + serviceEndpoint: "https://test.pds", 24 + }], 25 + alsoKnownAs: ["at://alice.test"], 26 + }); 27 + 28 + // Mock key resolution 29 + resolver.did.resolveAtprotoKey = (_did: string) => 30 + Promise.resolve("test-key"); 31 + 32 + // Mock handle resolution 33 + resolver.handle.resolve = (handle: string) => { 34 + const didMap: Record<string, string> = { 35 + "alice.test": "did:plc:alice123", 36 + "bob.test": "did:plc:bob456", 37 + }; 38 + return Promise.resolve(didMap[handle]); 39 + }; 40 + 41 + return resolver; 42 + }; 43 + 44 + Deno.test({ 45 + name: "parseAccount - returns undefined for invalid status", 46 + fn() { 47 + const account: Account = { 48 + $type: "com.atproto.sync.subscribeRepos#account", 49 + seq: 200, 50 + time: "2024-01-01T13:00:00.000Z", 51 + did: "did:plc:alice123", 52 + active: true, 53 + status: "active", // "active" is not a valid status 54 + }; 55 + 56 + const event = parseAccount(account); 57 + 58 + assertEquals(event, undefined); // Should return undefined for invalid status 59 + }, 60 + sanitizeResources: false, 61 + sanitizeOps: false, 62 + }); 63 + 64 + Deno.test({ 65 + name: "parseAccount - creates account events with valid status", 66 + fn() { 67 + const account: Account = { 68 + $type: "com.atproto.sync.subscribeRepos#account", 69 + seq: 200, 70 + time: "2024-01-01T13:00:00.000Z", 71 + did: "did:plc:alice123", 72 + active: true, 73 + status: "suspended", 74 + }; 75 + 76 + const event = parseAccount(account); 77 + 78 + assertObjectMatch(event!, { 79 + event: "account", 80 + seq: 200, 81 + time: "2024-01-01T13:00:00.000Z", 82 + did: "did:plc:alice123", 83 + active: true, 84 + status: "suspended", 85 + }); 86 + }, 87 + sanitizeResources: false, 88 + sanitizeOps: false, 89 + }); 90 + 91 + Deno.test({ 92 + name: "parseAccount - handles valid status values", 93 + fn() { 94 + const validStatuses = ["takendown", "suspended", "deleted", "deactivated"]; 95 + 96 + for (const status of validStatuses) { 97 + const account: Account = { 98 + $type: "com.atproto.sync.subscribeRepos#account", 99 + seq: 201, 100 + time: "2024-01-01T13:01:00.000Z", 101 + did: "did:plc:alice123", 102 + active: false, 103 + status, 104 + }; 105 + 106 + const event = parseAccount(account); 107 + assertEquals(event!.status, status); 108 + } 109 + }, 110 + sanitizeResources: false, 111 + sanitizeOps: false, 112 + }); 113 + 114 + Deno.test({ 115 + name: 116 + "parseIdentity - creates identity events with resolved DID (unauthenticated)", 117 + async fn() { 118 + const idResolver = createMockIdResolver(); 119 + 120 + const identity: Identity = { 121 + $type: "com.atproto.sync.subscribeRepos#identity", 122 + seq: 300, 123 + time: "2024-01-01T14:00:00.000Z", 124 + did: "did:plc:alice123", 125 + }; 126 + 127 + const event = await parseIdentity(idResolver, identity, true); 128 + 129 + assertObjectMatch(event!, { 130 + event: "identity", 131 + seq: 300, 132 + time: "2024-01-01T14:00:00.000Z", 133 + did: "did:plc:alice123", 134 + handle: undefined, // Should be undefined in unauthenticated mode 135 + didDocument: { 136 + id: "did:plc:alice123", 137 + }, 138 + }); 139 + }, 140 + sanitizeResources: false, 141 + sanitizeOps: false, 142 + }); 143 + 144 + Deno.test({ 145 + name: "parseIdentity - handles unauthenticated mode", 146 + async fn() { 147 + const idResolver = createMockIdResolver(); 148 + 149 + const identity: Identity = { 150 + $type: "com.atproto.sync.subscribeRepos#identity", 151 + seq: 301, 152 + time: "2024-01-01T14:01:00.000Z", 153 + did: "did:plc:alice123", 154 + }; 155 + 156 + const event = await parseIdentity(idResolver, identity, true); 157 + 158 + assertObjectMatch(event!, { 159 + event: "identity", 160 + seq: 301, 161 + did: "did:plc:alice123", 162 + handle: undefined, // Should be undefined in unauthenticated mode 163 + didDocument: { 164 + id: "did:plc:alice123", 165 + }, 166 + }); 167 + }, 168 + sanitizeResources: false, 169 + sanitizeOps: false, 170 + });
+218
sync/tests/mock-firehose-server.ts
··· 1 + import type { CID } from "multiformats/cid"; 2 + import type { RepoEvent } from "../firehose/lexicons.ts"; 3 + 4 + export interface MockFirehoseServerOptions { 5 + port?: number; 6 + events?: RepoEvent[]; 7 + eventDelay?: number; 8 + } 9 + 10 + export class MockFirehoseServer { 11 + private port: number; 12 + private events: RepoEvent[]; 13 + private eventDelay: number; 14 + private server: Deno.HttpServer | null = null; 15 + private connections = new Set<WebSocket>(); 16 + 17 + constructor(options: MockFirehoseServerOptions = {}) { 18 + this.port = options.port ?? 8080; 19 + this.events = options.events ?? []; 20 + this.eventDelay = options.eventDelay ?? 100; 21 + } 22 + 23 + start(): void { 24 + this.server = Deno.serve({ 25 + port: this.port, 26 + handler: (req) => this.handleRequest(req), 27 + }); 28 + console.log(`Mock firehose server started on port ${this.port}`); 29 + } 30 + 31 + async stop(): Promise<void> { 32 + if (this.server) { 33 + await this.server.shutdown(); 34 + this.server = null; 35 + } 36 + // Close all active connections 37 + for (const ws of this.connections) { 38 + try { 39 + ws.close(); 40 + } catch { 41 + // Ignore errors when closing 42 + } 43 + } 44 + this.connections.clear(); 45 + } 46 + 47 + private handleRequest(req: Request): Response { 48 + const url = new URL(req.url); 49 + 50 + // Handle WebSocket upgrade for the firehose endpoint 51 + if (url.pathname === "/xrpc/com.atproto.sync.subscribeRepos") { 52 + if (req.headers.get("upgrade") !== "websocket") { 53 + return new Response("Expected websocket", { status: 400 }); 54 + } 55 + 56 + const { socket, response } = Deno.upgradeWebSocket(req); 57 + this.connections.add(socket); 58 + 59 + socket.onopen = () => { 60 + console.log("WebSocket connection opened"); 61 + this.startEventStream(socket); 62 + }; 63 + 64 + socket.onclose = () => { 65 + console.log("WebSocket connection closed"); 66 + this.connections.delete(socket); 67 + }; 68 + 69 + socket.onerror = (error) => { 70 + console.error("WebSocket error:", error); 71 + this.connections.delete(socket); 72 + }; 73 + 74 + return response; 75 + } 76 + 77 + return new Response("Not found", { status: 404 }); 78 + } 79 + 80 + private async startEventStream(socket: WebSocket): Promise<void> { 81 + try { 82 + for (const event of this.events) { 83 + if (socket.readyState !== WebSocket.OPEN) { 84 + break; 85 + } 86 + 87 + // Encode the event as a frame (simplified version) 88 + const frame = this.encodeFrame(event); 89 + socket.send(frame); 90 + 91 + // Wait before sending next event 92 + if (this.eventDelay > 0) { 93 + await new Promise((resolve) => setTimeout(resolve, this.eventDelay)); 94 + } 95 + } 96 + } catch (error) { 97 + console.error("Error streaming events:", error); 98 + if (socket.readyState === WebSocket.OPEN) { 99 + socket.close(); 100 + } 101 + } 102 + } 103 + 104 + private encodeFrame(event: RepoEvent): ArrayBuffer { 105 + // This is a simplified frame encoding for testing 106 + // In reality, this would use the proper XRPC streaming format 107 + 108 + const header = { 109 + op: 1, // Message frame 110 + t: event.$type, 111 + }; 112 + 113 + const headerBytes = new TextEncoder().encode(JSON.stringify(header)); 114 + const bodyBytes = new TextEncoder().encode(JSON.stringify(event)); 115 + 116 + // Create a simple frame: [header_length][header][body] 117 + const frame = new ArrayBuffer(4 + headerBytes.length + bodyBytes.length); 118 + const view = new DataView(frame); 119 + const uint8View = new Uint8Array(frame); 120 + 121 + // Write header length 122 + view.setUint32(0, headerBytes.length, false); 123 + 124 + // Write header 125 + uint8View.set(headerBytes, 4); 126 + 127 + // Write body 128 + uint8View.set(bodyBytes, 4 + headerBytes.length); 129 + 130 + return frame; 131 + } 132 + 133 + addEvent(event: RepoEvent): void { 134 + this.events.push(event); 135 + } 136 + 137 + setEvents(events: RepoEvent[]): void { 138 + this.events = events; 139 + } 140 + 141 + clearEvents(): void { 142 + this.events = []; 143 + } 144 + 145 + get url(): string { 146 + return `ws://localhost:${this.port}`; 147 + } 148 + } 149 + 150 + // Helper functions to create mock events 151 + export const createMockCommitEvent = ( 152 + repo: string, 153 + seq: number, 154 + collection: string, 155 + rkey: string, 156 + record: Record<string, unknown>, 157 + action: "create" | "update" | "delete" = "create", 158 + ): RepoEvent => { 159 + const mockCID = { 160 + toString: () => `mock-cid-${seq}`, 161 + equals: (other: { toString(): string }) => 162 + other.toString() === `mock-cid-${seq}`, 163 + }; 164 + 165 + return { 166 + $type: "com.atproto.sync.subscribeRepos#commit", 167 + seq, 168 + time: new Date().toISOString(), 169 + repo, 170 + commit: mockCID as unknown as CID, 171 + rev: `rev-${seq}`, 172 + ops: [{ 173 + action, 174 + path: `${collection}/${rkey}`, 175 + cid: action === "delete" ? null : mockCID as unknown as CID, 176 + }], 177 + blocks: new Uint8Array( 178 + JSON.stringify(record).split("").map((c) => c.charCodeAt(0)), 179 + ), 180 + }; 181 + }; 182 + 183 + export const createMockIdentityEvent = ( 184 + did: string, 185 + seq: number, 186 + ): RepoEvent => ({ 187 + $type: "com.atproto.sync.subscribeRepos#identity", 188 + seq, 189 + time: new Date().toISOString(), 190 + did, 191 + }); 192 + 193 + export const createMockAccountEvent = ( 194 + did: string, 195 + seq: number, 196 + active = true, 197 + status?: string, 198 + ): RepoEvent => ({ 199 + $type: "com.atproto.sync.subscribeRepos#account", 200 + seq, 201 + time: new Date().toISOString(), 202 + did, 203 + active, 204 + status, 205 + }); 206 + 207 + export const createMockSyncEvent = ( 208 + did: string, 209 + seq: number, 210 + rev: string, 211 + ): RepoEvent => ({ 212 + $type: "com.atproto.sync.subscribeRepos#sync", 213 + seq, 214 + time: new Date().toISOString(), 215 + did, 216 + rev, 217 + blocks: new Uint8Array([0x01, 0x02, 0x03]), // Mock blocks 218 + });
+117
sync/tests/runner_test.ts
··· 1 + import { wait } from "@atp/common"; 2 + import { ConsecutiveList, MemoryRunner } from "../runner/index.ts"; 3 + import { assert, assertEquals, assertFalse } from "@std/assert"; 4 + 5 + Deno.test("ConsecutiveList tracks consecutive complete items.", () => { 6 + const consecutive = new ConsecutiveList<number>(); 7 + // add items 8 + const item1 = consecutive.push(1); 9 + const item2 = consecutive.push(2); 10 + const item3 = consecutive.push(3); 11 + assertFalse(item1.isComplete); 12 + assertFalse(item2.isComplete); 13 + assertFalse(item3.isComplete); 14 + // complete items out of order 15 + assertEquals(consecutive.list.length, 3); 16 + assertEquals(item2.complete(), []); 17 + assert(item2.isComplete); 18 + assertEquals(consecutive.list.length, 3); 19 + assertEquals(item1.complete(), [1, 2]); 20 + assert(item1.isComplete); 21 + assertEquals(consecutive.list.length, 1); 22 + assertEquals(item3.complete(), [3]); 23 + assertEquals(consecutive.list.length, 0); 24 + assert(item3.isComplete); 25 + }); 26 + 27 + Deno.test("MemoryRunner performs work in parallel across partitions, serial within a partition.", async () => { 28 + const runner = new MemoryRunner({ concurrency: Infinity }); 29 + const complete: number[] = []; 30 + // partition 1 items start slow but get faster: slow should still complete first. 31 + runner.addTask("1", async () => { 32 + await wait(30); 33 + complete.push(11); 34 + }); 35 + runner.addTask("1", async () => { 36 + await wait(20); 37 + complete.push(12); 38 + }); 39 + runner.addTask("1", async () => { 40 + await wait(1); 41 + complete.push(13); 42 + }); 43 + assertEquals(runner.partitions.size, 1); 44 + // partition 2 items complete quickly except the last, which is slowest of all events. 45 + runner.addTask("2", async () => { 46 + await wait(1); 47 + complete.push(21); 48 + }); 49 + runner.addTask("2", async () => { 50 + await wait(1); 51 + complete.push(22); 52 + }); 53 + runner.addTask("2", async () => { 54 + await wait(1); 55 + complete.push(23); 56 + }); 57 + runner.addTask("2", async () => { 58 + await wait(60); 59 + complete.push(24); 60 + }); 61 + assertEquals(runner.partitions.size, 2); 62 + await runner.mainQueue.onIdle(); 63 + assertEquals(complete, [21, 22, 23, 11, 12, 13, 24]); 64 + assertEquals(runner.partitions.size, 0); 65 + }); 66 + 67 + Deno.test("MemoryRunner limits overall concurrency.", async () => { 68 + const runner = new MemoryRunner({ concurrency: 1 }); 69 + const complete: number[] = []; 70 + // if concurrency were not constrained, partition 1 would complete all items 71 + // before any items from partition 2. since it is constrained, the work is complete in the order added. 72 + runner.addTask("1", async () => { 73 + await wait(1); 74 + complete.push(11); 75 + }); 76 + runner.addTask("2", async () => { 77 + await wait(10); 78 + complete.push(21); 79 + }); 80 + runner.addTask("1", async () => { 81 + await wait(1); 82 + complete.push(12); 83 + }); 84 + runner.addTask("2", async () => { 85 + await wait(10); 86 + complete.push(22); 87 + }); 88 + // only partition 1 exists so far due to the concurrency 89 + assertEquals(runner.partitions.size, 1); 90 + await runner.mainQueue.onIdle(); 91 + assertEquals(complete, [11, 21, 12, 22]); 92 + assertEquals(runner.partitions.size, 0); 93 + }); 94 + 95 + Deno.test("MemoryRunner settles with many items.", async () => { 96 + const runner = new MemoryRunner({ concurrency: 100 }); 97 + const complete: { partition: string; id: number }[] = []; 98 + const partitions = new Set<string>(); 99 + for (let i = 0; i < 500; ++i) { 100 + const partition = Math.floor(Math.random() * 16).toString(10); 101 + partitions.add(partition); 102 + runner.addTask(partition, async () => { 103 + await wait((i % 2) * 2); 104 + complete.push({ partition, id: i }); 105 + }); 106 + } 107 + assertEquals(runner.partitions.size, partitions.size); 108 + await runner.mainQueue.onIdle(); 109 + assertEquals(complete.length, 500); 110 + for (const partition of partitions) { 111 + const ids = complete 112 + .filter((item) => item.partition === partition) 113 + .map((item) => item.id); 114 + assertEquals(ids, [...ids].sort((a, b) => a - b)); 115 + } 116 + assertEquals(runner.partitions.size, 0); 117 + });
+17
sync/util.ts
··· 1 + import { 2 + isAccount, 3 + isCommit, 4 + isIdentity, 5 + isSync, 6 + type RepoEvent, 7 + } from "./firehose/lexicons.ts"; 8 + 9 + export const didAndSeqForEvt = ( 10 + evt: RepoEvent, 11 + ): { did: string; seq: number } | undefined => { 12 + if (isCommit(evt)) return { seq: evt.seq, did: evt.repo }; 13 + else if (isAccount(evt) || isIdentity(evt) || isSync(evt)) { 14 + return { seq: evt.seq, did: evt.did }; 15 + } 16 + return undefined; 17 + };