very fast at protocol indexer with flexible filtering, xrpc queries, cursor-backed event stream, and more, built on fjall
rust fjall at-protocol atproto indexer
60
fork

Configure Feed

Select the types of activity you want to include in your feed.

at main 261 lines 10 kB view raw
1#!/usr/bin/env nu 2# tests the blue.microcosm.links backlinks API endpoints. 3# 4# backfills a single repo and verifies: 5# 1. getBacklinks returns a result for a subject URI extracted from an indexed like record 6# 2. getBacklinksCount is >= number of getBacklinks results for the same subject 7# 3. cursor pagination returns each result exactly once with no duplicates 8# 4. source filter restricts results to the specified collection 9# 5. reverse=true returns the same set as forward, with inverted order verified when 10# a subject with 2+ backlinks is found 11# 12# usage: nu tests/backlinks_test.nu 13use common.nu * 14 15# paginate through all backlinks for subject+source using limit=2, return all entries 16def get-all-backlinks [url: string, subject: string, source: string] { 17 mut all = [] 18 mut cursor = null 19 loop { 20 let params = if $cursor != null { 21 $"subject=($subject | url encode)&source=($source)&limit=2&cursor=($cursor)" 22 } else { 23 $"subject=($subject | url encode)&source=($source)&limit=2" 24 } 25 let resp = (http get $"($url)/xrpc/blue.microcosm.links.getBacklinks?($params)") 26 $all = ($all | append $resp.backlinks) 27 let next = ($resp | get --optional cursor) 28 if ($next | is-empty) { break } 29 $cursor = $next 30 } 31 $all 32} 33 34# scan up to `limit` reply posts looking for a thread root that has 2+ backlinks 35# from app.bsky.feed.post. returns a record {subject, count} or null. 36def find-multi-backlink-subject [url: string, did: string, limit: int] { 37 let posts_resp = (try { 38 http get $"($url)/xrpc/com.atproto.repo.listRecords?repo=($did)&collection=app.bsky.feed.post&limit=($limit)" 39 } catch { 40 return null 41 }) 42 43 for post in $posts_resp.records { 44 let reply = ($post.value | get --optional reply) 45 if ($reply | is-empty) { continue } 46 let root_uri = $reply.root.uri 47 let count_resp = (try { 48 http get $"($url)/xrpc/blue.microcosm.links.getBacklinksCount?subject=($root_uri | url encode)" 49 } catch { 50 continue 51 }) 52 if $count_resp.count >= 2 { 53 return {subject: $root_uri, count: $count_resp.count} 54 } 55 } 56 57 null 58} 59 60# verify getBacklinks and getBacklinksCount return sensible results for sampled likes. 61# returns an error string on failure, or empty string on success. 62def check-basic [url: string, likes: list] { 63 print "checking basic backlinks and count..." 64 65 for like in $likes { 66 let subject = $like.value.subject.uri 67 print $" subject: ($subject)" 68 69 let bl_resp = (try { 70 http get $"($url)/xrpc/blue.microcosm.links.getBacklinks?subject=($subject | url encode)&source=app.bsky.feed.like" 71 } catch { 72 return $"getBacklinks request failed for ($subject)" 73 }) 74 75 if ($bl_resp.backlinks | is-empty) { 76 return $"expected at least 1 backlink for ($subject)" 77 } 78 let bl_count = ($bl_resp.backlinks | length) 79 print $" getBacklinks: ($bl_count) results" 80 81 let ct_resp = (try { 82 http get $"($url)/xrpc/blue.microcosm.links.getBacklinksCount?subject=($subject | url encode)&source=app.bsky.feed.like" 83 } catch { 84 return $"getBacklinksCount request failed for ($subject)" 85 }) 86 87 # count may exceed backlinks results because stale index entries (records deleted 88 # after indexing) are counted but skipped during result collection 89 if $ct_resp.count < ($bl_resp.backlinks | length) { 90 return $"count ($ct_resp.count) < backlinks result count ($bl_resp.backlinks | length) for ($subject)" 91 } 92 print $" getBacklinksCount: ($ct_resp.count)" 93 } 94 95 "" 96} 97 98# verify that cursor pagination returns every result exactly once with no duplicates. 99# returns an error string on failure, or empty string on success. 100def check-pagination [url: string, likes: list] { 101 print "checking cursor pagination..." 102 let subject = ($likes | first).value.subject.uri 103 print $" subject: ($subject)" 104 105 # fetch all in a single shot with a large limit 106 let full = (http get $"($url)/xrpc/blue.microcosm.links.getBacklinks?subject=($subject | url encode)&source=app.bsky.feed.like&limit=100") 107 let full_uris = ($full.backlinks | each { |b| $b.uri } | sort) 108 109 # paginate through the same subject with limit=2 110 let paged = (get-all-backlinks $url $subject "app.bsky.feed.like") 111 let paged_uris = ($paged | each { |b| $b.uri } | sort) 112 113 print $" full fetch: ($full_uris | length), paginated: ($paged_uris | length)" 114 115 if $full_uris != $paged_uris { 116 return "paginated results differ from single-page results" 117 } 118 119 let unique_count = ($paged_uris | uniq | length) 120 if $unique_count != ($paged_uris | length) { 121 return "duplicate URIs found in paginated results" 122 } 123 124 print " pagination OK" 125 "" 126} 127 128# verify source filter and that forward/reverse return the same set. 129# returns an error string on failure, or empty string on success. 130def check-source-filter [url: string, likes: list] { 131 print "checking source filter..." 132 let subject = ($likes | first).value.subject.uri 133 print $" subject: ($subject)" 134 135 let fwd = (http get $"($url)/xrpc/blue.microcosm.links.getBacklinks?subject=($subject | url encode)&source=app.bsky.feed.like&limit=50") 136 let rev = (http get $"($url)/xrpc/blue.microcosm.links.getBacklinks?subject=($subject | url encode)&source=app.bsky.feed.like&limit=50&reverse=true") 137 138 # source filter: all returned URIs must belong to app.bsky.feed.like 139 let bad = ($fwd.backlinks | where { |b| not ($b.uri | str contains "/app.bsky.feed.like/") }) 140 if not ($bad | is-empty) { 141 return $"source filter returned non-like records: ($bad)" 142 } 143 let fwd_count = ($fwd.backlinks | length) 144 print $" source filter OK: ($fwd_count) likes" 145 146 # reverse must return the same set 147 let fwd_sorted = ($fwd.backlinks | each { |b| $b.uri } | sort) 148 let rev_sorted = ($rev.backlinks | each { |b| $b.uri } | sort) 149 if $fwd_sorted != $rev_sorted { 150 return "forward and reverse scans returned different sets" 151 } 152 153 print " reverse set equality OK" 154 "" 155} 156 157# verify that reverse=true actually inverts the order using a subject with 2+ backlinks. 158# returns an error string on failure, or empty string on success. 159def check-reverse-ordering [url: string, subject: string, expected_count: int] { 160 print $"checking reverse ordering... subject has ($expected_count) backlinks..." 161 print $" subject: ($subject)" 162 163 let fwd = (http get $"($url)/xrpc/blue.microcosm.links.getBacklinks?subject=($subject | url encode)&limit=50") 164 let rev = (http get $"($url)/xrpc/blue.microcosm.links.getBacklinks?subject=($subject | url encode)&limit=50&reverse=true") 165 166 let fwd_uris = ($fwd.backlinks | each { |b| $b.uri }) 167 let rev_uris = ($rev.backlinks | each { |b| $b.uri }) 168 169 if ($fwd_uris | length) < 2 { 170 return $"expected >= 2 forward results for ordering test, got ($fwd_uris | length)" 171 } 172 173 if $fwd_uris != ($rev_uris | reverse) { 174 return "reverse order is not the inverse of forward order" 175 } 176 177 print $" order inversion verified with ($fwd_uris | length) results" 178 "" 179} 180 181def main [] { 182 let did = "did:plc:dfl62fgb7wtjj3fcbb72naae" 183 let port = resolve-test-port 3020 184 let url = $"http://localhost:($port)" 185 let db_path = (mktemp -d -t hydrant_backlinks_test.XXXXXX) 186 187 print $"database path: ($db_path)" 188 189 let binary = (build-hydrant-features "backlinks") 190 let instance = (start-hydrant $binary $db_path $port) 191 192 if not (wait-for-api $url) { 193 print "ERROR: hydrant failed to start" 194 try { kill -9 $instance.pid } 195 rm -rf $db_path 196 exit 1 197 } 198 199 print $"adding ($did) to tracking..." 200 http put -t application/json $"($url)/repos" [{ did: $did }] 201 202 if not (wait-for-backfill $url) { 203 print "ERROR: backfill timed out or failed" 204 try { kill -9 $instance.pid } 205 rm -rf $db_path 206 exit 1 207 } 208 209 # fetch a small set of like records to use as test subjects 210 print "fetching likes from indexed repo..." 211 let likes_resp = (try { 212 http get $"($url)/xrpc/com.atproto.repo.listRecords?repo=($did)&collection=app.bsky.feed.like&limit=5" 213 } catch { 214 print "ERROR: could not fetch like records" 215 try { kill -9 $instance.pid } 216 rm -rf $db_path 217 exit 1 218 }) 219 220 let likes = $likes_resp.records 221 if ($likes | is-empty) { 222 print "SKIP: no like records found for test DID. cannot verify backlinks" 223 try { kill -9 $instance.pid } 224 rm -rf $db_path 225 exit 0 226 } 227 print $"found ($likes | length) likes for testing" 228 229 # find a thread root with 2+ backlinks for ordering verification 230 print "scanning posts for a thread root with 2+ backlinks..." 231 let multi = (find-multi-backlink-subject $url $did 100) 232 if ($multi | is-empty) { 233 print " note: no thread root with 2+ backlinks found, ordering verification will be skipped" 234 } else { 235 print $" found subject with ($multi.count) backlinks: ($multi.subject)" 236 } 237 238 let basic_err = (check-basic $url $likes) 239 let paging_err = (check-pagination $url $likes) 240 let srcfilt_err = (check-source-filter $url $likes) 241 let ordering_err = if not ($multi | is-empty) { 242 check-reverse-ordering $url $multi.subject $multi.count 243 } else { 244 print "skipping reverse ordering check (no multi-backlink subject found)" 245 "" 246 } 247 248 let failed = ([$basic_err, $paging_err, $srcfilt_err, $ordering_err] | where { |e| not ($e | is-empty) }) 249 250 print "stopping hydrant..." 251 try { kill -9 $instance.pid } 252 rm -rf $db_path 253 254 if ($failed | is-empty) { 255 print "all backlinks tests PASSED" 256 exit 0 257 } else { 258 for err in $failed { print $"FAILED: ($err)" } 259 exit 1 260 } 261}