···1212#### ObjectBox Setup
13131414- [x] Add `objectbox`, `objectbox_flutter_libs` to `pubspec.yaml`; add `objectbox_generator` to dev deps
1515-- [ ] `EmbeddedPost` entity - `postUri` (unique), `accountDid`, `source` (saved/liked), `indexedText`, `embedding` (384D float vector, HNSW cosine index), `embeddedAt`
1616-- [ ] Run `build_runner` to generate `objectbox.g.dart` and `objectbox-model.json`
1717-- [ ] `ObjectBoxStore` singleton - `openStore()` at app startup (after Drift init), expose via `RepositoryProvider`
1818-- [ ] `EmbeddingRepository` - CRUD operations on `EmbeddedPost`: `upsert`, `deleteByUri`, `queryByAccount`, `countByAccount`
1515+- [x] `EmbeddedPost` entity - `postUri` (unique), `accountDid`, `source` (saved/liked), `indexedText`, `embedding` (384D float vector, HNSW cosine index), `embeddedAt`
1616+- [x] Run `build_runner` to generate `objectbox.g.dart` and `objectbox-model.json`
1717+- [x] `ObjectBoxStore` singleton - `openStore()` at app startup (after Drift init), expose via `RepositoryProvider`
1818+- [x] `EmbeddingRepository` - CRUD operations on `EmbeddedPost`: `upsert`, `deleteByUri`, `queryByAccount`, `countByAccount`
19192020#### TFLite Embedding Service
21212222- [x] Add `tflite_flutter` to `pubspec.yaml`
2323- [x] Bundle `minilm_l6_v2_int8.tflite` and `vocab.txt` as Flutter assets
2424-- [ ] `WordPieceTokenizer` - load vocab, tokenize text, pad/truncate to 256 tokens, return `List<int>`
2525-- [ ] `EmbeddingService` - long-lived background `Isolate` with `ReceivePort`/`SendPort` message passing
2626-- [ ] `EmbeddingService.initialize()` - spawn isolate, load TFLite model + tokenizer in isolate
2727-- [ ] `EmbeddingService.embed(String text)` - send text to isolate, receive `Float32List[384]`, L2-normalize
2828-- [ ] `EmbeddingService.isAvailable` - flag gating UI entry points, false if model fails to load
2929-- [ ] `EmbeddingService.dispose()` - close isolate and interpreter
3030-- [ ] `PostTextExtractor` - concatenate post text + image alt texts + link card title/description into a single searchable string
2424+- [x] `WordPieceTokenizer` - load vocab, tokenize text, pad/truncate to 256 tokens, return `List<int>`
2525+- [x] `EmbeddingService` - long-lived background `Isolate` with `ReceivePort`/`SendPort` message passing
2626+- [x] `EmbeddingService.initialize()` - spawn isolate, load TFLite model + tokenizer in isolate
2727+- [x] `EmbeddingService.embed(String text)` - send text to isolate, receive `Float32List[384]`, L2-normalize
2828+- [x] `EmbeddingService.isAvailable` - flag gating UI entry points, false if model fails to load
2929+- [x] `EmbeddingService.dispose()` - close isolate and interpreter
3030+- [x] `PostTextExtractor` - concatenate post text + image alt texts + link card title/description into a single searchable string
31313232#### Liked Posts Sync
3333···3737- [ ] `LikedPostsRepository.getLikedPosts(accountDid, {limit, offset})` - paginated query
3838- [ ] `LikedPostsRepository.removeLike(accountDid, postUri)` - delete entry
3939- [ ] Eviction: drop oldest entries when count exceeds 1000 per account
4040+- [ ] Documentation update: move development information from README.md to a top-level DEVELOPMENT.md.
4141+ Should be updated to reflect new architecture and patterns.
40424143#### Indexing Pipeline
4244
+186
lib/core/embedding/embedding_service.dart
···11+import 'dart:async';
22+import 'dart:isolate';
33+import 'dart:math' show sqrt;
44+55+import 'package:flutter/foundation.dart';
66+import 'package:flutter/services.dart';
77+import 'package:lazurite/core/embedding/word_piece_tokenizer.dart';
88+import 'package:tflite_flutter/tflite_flutter.dart';
99+1010+final class _SetupData {
1111+ const _SetupData({required this.sendPort, required this.rootIsolateToken});
1212+ final SendPort sendPort;
1313+ final RootIsolateToken rootIsolateToken;
1414+}
1515+1616+/// A request sent to the isolate: (text, replyPort) or null to dispose.
1717+typedef _EmbedRequest = (String text, SendPort replyPort);
1818+1919+/// L2-normalize [vector], returning a new [Float32List].
2020+///
2121+/// If the norm is near zero the original vector is returned unchanged.
2222+@visibleForTesting
2323+Float32List l2Normalize(Float32List vector) {
2424+ var norm = 0.0;
2525+ for (var i = 0; i < vector.length; i++) {
2626+ norm += vector[i] * vector[i];
2727+ }
2828+ norm = sqrt(norm);
2929+ if (norm < 1e-10) return vector;
3030+ final out = Float32List(vector.length);
3131+ for (var i = 0; i < vector.length; i++) {
3232+ out[i] = vector[i] / norm;
3333+ }
3434+ return out;
3535+}
3636+3737+Future<void> _isolateEntry(_SetupData setup) async {
3838+ BackgroundIsolateBinaryMessenger.ensureInitialized(setup.rootIsolateToken);
3939+4040+ final receivePort = ReceivePort();
4141+4242+ setup.sendPort.send(receivePort.sendPort);
4343+4444+ Interpreter? interpreter;
4545+ WordPieceTokenizer? tokenizer;
4646+4747+ try {
4848+ interpreter = await Interpreter.fromAsset('all-MiniLM-L6-v2-quant.tflite');
4949+ final vocabText = await rootBundle.loadString('assets/vocab.txt');
5050+ tokenizer = WordPieceTokenizer.fromString(vocabText);
5151+ setup.sendPort.send(true);
5252+ } catch (_) {
5353+ setup.sendPort.send(false);
5454+ receivePort.close();
5555+ return;
5656+ }
5757+5858+ await for (final message in receivePort) {
5959+ if (message == null) break;
6060+ final (text, replyPort) = message as _EmbedRequest;
6161+ try {
6262+ final result = _runInference(interpreter, tokenizer, text);
6363+ replyPort.send(result);
6464+ } catch (_) {
6565+ replyPort.send(null);
6666+ }
6767+ }
6868+6969+ interpreter.close();
7070+ receivePort.close();
7171+}
7272+7373+Float32List _runInference(Interpreter interpreter, WordPieceTokenizer tokenizer, String text) {
7474+ final tokenIds = tokenizer.tokenize(text);
7575+ const seqLen = WordPieceTokenizer.maxTokens;
7676+7777+ final inputIds = [tokenIds];
7878+ final attentionMask = [tokenIds.map((id) => id != 0 ? 1 : 0).toList()];
7979+ final tokenTypeIds = [List<int>.filled(seqLen, 0)];
8080+8181+ final outputBuffer = [List<double>.filled(384, 0.0)];
8282+ interpreter.runForMultipleInputs([inputIds, attentionMask, tokenTypeIds], {0: outputBuffer});
8383+8484+ return l2Normalize(Float32List.fromList(outputBuffer[0]));
8585+}
8686+8787+/// On-device text embedding service backed by a long-lived background [Isolate].
8888+///
8989+/// Start with [initialize], shut down with [dispose]. Check [isAvailable]
9090+/// before calling [embed]; the flag is false when the model fails to load or
9191+/// when the service has not yet been initialised.
9292+class EmbeddingService {
9393+ /// Creates a real embedding service backed by TFLite + Isolate.
9494+ EmbeddingService() : _mockEmbedFn = null;
9595+9696+ /// Creates a test double that bypasses the Isolate and TFLite entirely.
9797+ ///
9898+ /// [embedFn] is called synchronously (from the caller's perspective) on every
9999+ /// [embed] invocation. [initialize] immediately sets [isAvailable] to true.
100100+ @visibleForTesting
101101+ EmbeddingService.forTesting(Future<Float32List> Function(String text) embedFn) : _mockEmbedFn = embedFn;
102102+103103+ final Future<Float32List> Function(String text)? _mockEmbedFn;
104104+105105+ bool _isAvailable = false;
106106+ Isolate? _isolate;
107107+ SendPort? _isolateSendPort;
108108+ ReceivePort? _setupPort;
109109+110110+ /// Whether the service is ready to produce embeddings.
111111+ ///
112112+ /// False until [initialize] completes successfully, and false again after
113113+ /// [dispose] is called or if the model failed to load.
114114+ bool get isAvailable => _isAvailable;
115115+116116+ /// Initialise the service.
117117+ ///
118118+ /// For the real implementation this spawns a background [Isolate], loads the
119119+ /// TFLite model, and builds the [WordPieceTokenizer]. For the test double it
120120+ /// is a no-op that marks the service as available.
121121+ ///
122122+ /// Safe to call multiple times; subsequent calls are no-ops.
123123+ Future<void> initialize() async {
124124+ if (_isAvailable) return;
125125+ if (_mockEmbedFn != null) {
126126+ _isAvailable = true;
127127+ return;
128128+ }
129129+130130+ final setupPort = ReceivePort();
131131+ _setupPort = setupPort;
132132+ final messages = StreamIterator<dynamic>(setupPort);
133133+134134+ try {
135135+ _isolate = await Isolate.spawn(
136136+ _isolateEntry,
137137+ _SetupData(sendPort: setupPort.sendPort, rootIsolateToken: RootIsolateToken.instance!),
138138+ debugName: 'EmbeddingIsolate',
139139+ );
140140+141141+ await messages.moveNext();
142142+ _isolateSendPort = messages.current as SendPort;
143143+144144+ await messages.moveNext();
145145+ _isAvailable = messages.current as bool;
146146+ } finally {
147147+ await messages.cancel();
148148+ setupPort.close();
149149+ _setupPort = null;
150150+ }
151151+ }
152152+153153+ /// Embed [text] and return an L2-normalised [Float32List] of length 384.
154154+ ///
155155+ /// Throws [StateError] if the service is not available.
156156+ Future<Float32List> embed(String text) async {
157157+ if (!_isAvailable) {
158158+ throw StateError('EmbeddingService is not available. Call initialize() first.');
159159+ }
160160+161161+ if (_mockEmbedFn != null) {
162162+ return _mockEmbedFn(text);
163163+ }
164164+165165+ final responsePort = ReceivePort();
166166+ _isolateSendPort!.send((text, responsePort.sendPort));
167167+ final result = await responsePort.first;
168168+ responsePort.close();
169169+170170+ if (result == null) throw StateError('Embedding inference failed for text: "$text"');
171171+ return result as Float32List;
172172+ }
173173+174174+ /// Shut down the background isolate and mark the service as unavailable.
175175+ ///
176176+ /// Safe to call before [initialize] or after [dispose].
177177+ void dispose() {
178178+ _isolateSendPort?.send(null);
179179+ _isolate?.kill(priority: Isolate.immediate);
180180+ _setupPort?.close();
181181+ _isAvailable = false;
182182+ _isolate = null;
183183+ _isolateSendPort = null;
184184+ _setupPort = null;
185185+ }
186186+}
+147
lib/core/embedding/word_piece_tokenizer.dart
···11+import 'package:characters/characters.dart';
22+33+/// BERT-style WordPiece tokenizer compatible with all-MiniLM-L6-v2.
44+///
55+/// Converts text to token IDs using the standard BERT uncased vocabulary.
66+/// The returned list always has exactly [maxTokens] elements.
77+///
88+/// Token ID conventions (BERT-base-uncased):
99+/// [PAD] = 0
1010+/// [UNK] = 100
1111+/// [CLS] = 101
1212+/// [SEP] = 102
1313+class WordPieceTokenizer {
1414+ WordPieceTokenizer._(this._vocab);
1515+1616+ /// Constructs a tokenizer from the raw contents of a vocab file.
1717+ ///
1818+ /// Each line is one token; its line number (0-indexed) is its ID.
1919+ factory WordPieceTokenizer.fromString(String vocabText) {
2020+ final vocab = <String, int>{};
2121+ var index = 0;
2222+ for (final line in vocabText.split('\n')) {
2323+ final token = line.trimRight();
2424+ if (token.isNotEmpty) {
2525+ vocab[token] = index;
2626+ }
2727+ index++;
2828+ }
2929+ return WordPieceTokenizer._(vocab);
3030+ }
3131+ static const int padId = 0;
3232+ static const int unkId = 100;
3333+ static const int clsId = 101;
3434+ static const int sepId = 102;
3535+ static const int maxTokens = 256;
3636+3737+ final Map<String, int> _vocab;
3838+3939+ /// Tokenize [text] into a list of token IDs padded/truncated to [maxTokens].
4040+ ///
4141+ /// Layout: `[CLS] token_ids... [SEP] [PAD]...`
4242+ List<int> tokenize(String text) {
4343+ final cleaned = _cleanText(text.toLowerCase());
4444+ final basicTokens = _basicTokenize(cleaned);
4545+4646+ final ids = <int>[clsId];
4747+ for (final word in basicTokens) {
4848+ final pieces = _wordPiece(word);
4949+5050+ if (ids.length + pieces.length >= maxTokens) {
5151+ ids.addAll(pieces.take(maxTokens - ids.length - 1));
5252+ break;
5353+ }
5454+ ids.addAll(pieces);
5555+ }
5656+ ids.add(sepId);
5757+5858+ while (ids.length < maxTokens) {
5959+ ids.add(padId);
6060+ }
6161+6262+ return ids;
6363+ }
6464+6565+ /// Remove control characters and normalize whitespace.
6666+ String _cleanText(String text) {
6767+ final buf = StringBuffer();
6868+ for (final char in text.characters) {
6969+ final cp = char.codeUnitAt(0);
7070+ if (cp == 0 || cp == 0xFFFD || _isControlChar(cp)) continue;
7171+ buf.write(_isWhitespace(cp) ? ' ' : char);
7272+ }
7373+ return buf.toString();
7474+ }
7575+7676+ /// Split on whitespace and punctuation to produce basic tokens.
7777+ List<String> _basicTokenize(String text) {
7878+ final tokens = <String>[];
7979+ final buf = StringBuffer();
8080+ for (final char in text.characters) {
8181+ final cp = char.codeUnitAt(0);
8282+ if (_isWhitespace(cp)) {
8383+ if (buf.isNotEmpty) {
8484+ tokens.add(buf.toString());
8585+ buf.clear();
8686+ }
8787+ } else if (_isPunctuation(cp)) {
8888+ if (buf.isNotEmpty) {
8989+ tokens.add(buf.toString());
9090+ buf.clear();
9191+ }
9292+ tokens.add(char);
9393+ } else {
9494+ buf.write(char);
9595+ }
9696+ }
9797+ if (buf.isNotEmpty) tokens.add(buf.toString());
9898+ return tokens;
9999+ }
100100+101101+ /// WordPiece sub-word tokenization for a single [word].
102102+ ///
103103+ /// Returns `[unkId]` if no valid segmentation exists.
104104+ List<int> _wordPiece(String word) {
105105+ if (word.isEmpty) return [];
106106+ if (_vocab.containsKey(word)) return [_vocab[word]!];
107107+108108+ final result = <int>[];
109109+ var start = 0;
110110+111111+ while (start < word.length) {
112112+ var end = word.length;
113113+ int? foundId;
114114+ int? foundLen;
115115+116116+ while (start < end) {
117117+ final sub = start == 0 ? word.substring(0, end) : '##${word.substring(start, end)}';
118118+ if (_vocab.containsKey(sub)) {
119119+ foundId = _vocab[sub]!;
120120+ foundLen = end - start;
121121+ break;
122122+ }
123123+ end--;
124124+ }
125125+126126+ if (foundId == null) return [unkId];
127127+128128+ result.add(foundId);
129129+ start += foundLen!;
130130+ }
131131+132132+ return result;
133133+ }
134134+135135+ bool _isWhitespace(int cp) => cp == 0x20 || cp == 0x09 || cp == 0x0A || cp == 0x0D;
136136+137137+ bool _isControlChar(int cp) => (cp < 0x20 && !_isWhitespace(cp)) || (cp >= 0x7F && cp <= 0x9F);
138138+139139+ bool _isPunctuation(int cp) =>
140140+ (cp >= 33 && cp <= 47) ||
141141+ (cp >= 58 && cp <= 64) ||
142142+ (cp >= 91 && cp <= 96) ||
143143+ (cp >= 123 && cp <= 126) ||
144144+ (cp >= 0x2000 && cp <= 0x206F) ||
145145+ (cp >= 0x2E00 && cp <= 0x2E7F) ||
146146+ (cp >= 0x3000 && cp <= 0x303F);
147147+}
+62
lib/features/search/data/post_text_extractor.dart
···11+import 'package:bluesky/app_bsky_embed_recordwithmedia.dart';
22+import 'package:bluesky/app_bsky_feed_defs.dart';
33+import 'package:bluesky/app_bsky_feed_post.dart';
44+55+/// Extracts a single searchable string from a [PostView] for embedding.
66+///
77+/// Concatenates (in order, separated by spaces):
88+/// 1. Post body text
99+/// 2. Alt-text from every image in an images embed
1010+/// 3. Title + description from an external link-card embed
1111+///
1212+/// Returns an empty string if no text can be extracted.
1313+class PostTextExtractor {
1414+ const PostTextExtractor();
1515+1616+ String extract(PostView post) {
1717+ final parts = <String>[];
1818+1919+ final recordText = _recordText(post.record);
2020+ if (recordText.isNotEmpty) parts.add(recordText);
2121+2222+ final embed = post.embed;
2323+ if (embed != null) {
2424+ if (embed.isEmbedImagesView) {
2525+ for (final image in embed.embedImagesView!.images) {
2626+ final alt = image.alt.trim();
2727+ if (alt.isNotEmpty) parts.add(alt);
2828+ }
2929+ } else if (embed.isEmbedExternalView) {
3030+ final external = embed.embedExternalView!.external;
3131+ final title = external.title.trim();
3232+ if (title.isNotEmpty) parts.add(title);
3333+ final desc = external.description.trim();
3434+ if (desc.isNotEmpty) parts.add(desc);
3535+ } else if (embed.isEmbedRecordWithMediaView) {
3636+ final media = embed.embedRecordWithMediaView!.media;
3737+ if (media.isEmbedImagesView) {
3838+ for (final image in media.embedImagesView!.images) {
3939+ final alt = image.alt.trim();
4040+ if (alt.isNotEmpty) parts.add(alt);
4141+ }
4242+ } else if (media.isEmbedExternalView) {
4343+ final external = media.embedExternalView!.external;
4444+ final title = external.title.trim();
4545+ if (title.isNotEmpty) parts.add(title);
4646+ final desc = external.description.trim();
4747+ if (desc.isNotEmpty) parts.add(desc);
4848+ }
4949+ }
5050+ }
5151+5252+ return parts.join(' ');
5353+ }
5454+5555+ String _recordText(Map<String, dynamic> record) {
5656+ try {
5757+ return FeedPostRecord.fromJson(record).text.trim();
5858+ } catch (_) {
5959+ return '';
6060+ }
6161+ }
6262+}
+156
test/core/embedding/embedding_service_test.dart
···11+import 'dart:typed_data';
22+33+import 'package:flutter_test/flutter_test.dart';
44+import 'package:lazurite/core/embedding/embedding_service.dart';
55+66+void main() {
77+ group('l2Normalize', () {
88+ test('unit vector is unchanged', () {
99+ final v = Float32List.fromList([1.0, 0.0, 0.0]);
1010+ final result = l2Normalize(v);
1111+ expect(result[0], closeTo(1.0, 1e-6));
1212+ expect(result[1], closeTo(0.0, 1e-6));
1313+ expect(result[2], closeTo(0.0, 1e-6));
1414+ });
1515+1616+ test('scales vector to unit length', () {
1717+ final v = Float32List.fromList([3.0, 4.0]);
1818+ final result = l2Normalize(v);
1919+ // norm = 5; normalised = [0.6, 0.8]
2020+ expect(result[0], closeTo(0.6, 1e-6));
2121+ expect(result[1], closeTo(0.8, 1e-6));
2222+ });
2323+2424+ test('result has norm ≈ 1', () {
2525+ final v = Float32List.fromList(List.generate(384, (i) => (i + 1).toDouble()));
2626+ final result = l2Normalize(v);
2727+ var norm = 0.0;
2828+ for (final x in result) {
2929+ norm += x * x;
3030+ }
3131+ expect(norm, closeTo(1.0, 1e-5));
3232+ });
3333+3434+ test('near-zero vector is returned unchanged (no division by zero)', () {
3535+ final v = Float32List.fromList([0.0, 0.0, 0.0]);
3636+ final result = l2Normalize(v);
3737+ expect(result, equals(v));
3838+ });
3939+4040+ test('returns a new list, does not mutate input', () {
4141+ final v = Float32List.fromList([3.0, 4.0]);
4242+ l2Normalize(v);
4343+ expect(v[0], equals(3.0));
4444+ expect(v[1], equals(4.0));
4545+ });
4646+ });
4747+4848+ group('EmbeddingService', () {
4949+ group('initial state', () {
5050+ test('isAvailable is false before initialize', () {
5151+ final service = EmbeddingService.forTesting((_) async => Float32List(384));
5252+ expect(service.isAvailable, isFalse);
5353+ });
5454+ });
5555+5656+ group('initialize / dispose', () {
5757+ test('isAvailable is true after initialize with mock backend', () async {
5858+ final service = EmbeddingService.forTesting((_) async => Float32List(384));
5959+ await service.initialize();
6060+ expect(service.isAvailable, isTrue);
6161+ });
6262+6363+ test('initialize is idempotent', () async {
6464+ var calls = 0;
6565+ final service = EmbeddingService.forTesting((_) async {
6666+ calls++;
6767+ return Float32List(384);
6868+ });
6969+ await service.initialize();
7070+ await service.initialize(); // second call should be a no-op
7171+ expect(service.isAvailable, isTrue);
7272+ // Idempotency check: embed once to confirm service still works.
7373+ await service.embed('test');
7474+ expect(calls, equals(1)); // embed was called once, not initialize twice
7575+ });
7676+7777+ test('dispose resets isAvailable to false', () async {
7878+ final service = EmbeddingService.forTesting((_) async => Float32List(384));
7979+ await service.initialize();
8080+ service.dispose();
8181+ expect(service.isAvailable, isFalse);
8282+ });
8383+8484+ test('dispose before initialize does not throw', () {
8585+ final service = EmbeddingService.forTesting((_) async => Float32List(384));
8686+ expect(() => service.dispose(), returnsNormally);
8787+ });
8888+8989+ test('dispose can be called multiple times safely', () async {
9090+ final service = EmbeddingService.forTesting((_) async => Float32List(384));
9191+ await service.initialize();
9292+ service.dispose();
9393+ expect(() => service.dispose(), returnsNormally);
9494+ });
9595+ });
9696+9797+ group('embed', () {
9898+ test('throws StateError when not initialized', () async {
9999+ final service = EmbeddingService.forTesting((_) async => Float32List(384));
100100+ expect(() => service.embed('hello'), throwsStateError);
101101+ });
102102+103103+ test('throws StateError after dispose', () async {
104104+ final service = EmbeddingService.forTesting((_) async => Float32List(384));
105105+ await service.initialize();
106106+ service.dispose();
107107+ expect(() => service.embed('hello'), throwsStateError);
108108+ });
109109+110110+ test('returns the value produced by the mock backend', () async {
111111+ final expected = Float32List.fromList(List.generate(384, (i) => i.toDouble()));
112112+ final service = EmbeddingService.forTesting((_) async => expected);
113113+ await service.initialize();
114114+115115+ final result = await service.embed('some text');
116116+ expect(result, equals(expected));
117117+ });
118118+119119+ test('result has length 384', () async {
120120+ final service = EmbeddingService.forTesting((_) async => Float32List(384));
121121+ await service.initialize();
122122+123123+ final result = await service.embed('hello world');
124124+ expect(result.length, equals(384));
125125+ });
126126+127127+ test('forwards the exact text to the backend', () async {
128128+ String? received;
129129+ final service = EmbeddingService.forTesting((text) async {
130130+ received = text;
131131+ return Float32List(384);
132132+ });
133133+ await service.initialize();
134134+135135+ await service.embed('the quick brown fox');
136136+ expect(received, equals('the quick brown fox'));
137137+ });
138138+139139+ test('multiple concurrent embeds each receive correct results', () async {
140140+ var callCount = 0;
141141+ final service = EmbeddingService.forTesting((text) async {
142142+ callCount++;
143143+ final v = Float32List(384);
144144+ v[0] = callCount.toDouble();
145145+ return v;
146146+ });
147147+ await service.initialize();
148148+149149+ final results = await Future.wait([service.embed('a'), service.embed('b'), service.embed('c')]);
150150+151151+ expect(results.length, equals(3));
152152+ expect(results.every((r) => r.length == 384), isTrue);
153153+ });
154154+ });
155155+ });
156156+}