A better Rust ATProto crate
1use core::{
2 fmt::{Debug, Formatter},
3 marker::PhantomData,
4 ops::{Deref, DerefMut, Index, IndexMut},
5 slice::SliceIndex,
6};
7use std::collections::VecDeque;
8
9use maitake_sync::blocking::{RwLock, RwLockReadGuard, RwLockWriteGuard};
10use serde::{Deserialize, Serialize};
11
12use crate::{
13 Handle,
14 io::{FromEmbed, Read, Write},
15 new_handle,
16 storage::Storage,
17};
18
19#[cfg(not(feature = "alloc"))]
20pub type VecDeque<T> = heapless::Deque<T, 2>;
21
22pub struct LVec<T, S> {
23 handle: [u8; 4],
24 loaded: RwLock<VecDeque<(usize, T)>>,
25 lock: RwLock<()>,
26 len: usize,
27 buf_size: usize,
28 storage: S,
29}
30
31impl<T, S> LVec<T, S> {
32 pub fn new_with(handle: Handle, buf_size: usize, storage: S) -> Self {
33 LVec {
34 handle: handle.0.to_be_bytes(),
35 loaded: RwLock::new(VecDeque::new()),
36 lock: RwLock::new(()),
37 len: 0,
38 buf_size,
39 storage,
40 }
41 }
42
43 pub fn new(storage: S) -> Self {
44 LVec::new_with(new_handle(), 2, storage)
45 }
46}
47
48impl<T, S> LVec<T, S>
49where
50 S: Storage + Read + Write,
51 T: Serialize,
52{
53 pub fn len(&self) -> usize {
54 self.len
55 }
56
57 fn get_key(&self, index: usize) -> Vec<u8> {
58 let mut key = self.handle.to_vec();
59 key.extend_from_slice(&index.to_be_bytes());
60 key
61 }
62
63 /// NOTE: Do not hold these references for an extended period of time.
64 /// This does NOT spin waiting for a write lock if it misses the cache, but
65 /// then it will not cache what it found. I consider this an acceptable failure
66 /// mode for the intended use case.
67 pub fn get_elem<'de, 's: 'de>(&'s self, index: usize) -> Option<Ref<'s, T>>
68 where
69 T: Deserialize<'de>,
70 S: 'de,
71 {
72 let loaded = self.loaded.read();
73 let mut found = None;
74 for (i, (idx, _)) in loaded.iter().enumerate() {
75 if *idx == index {
76 found = Some(i);
77 }
78 }
79 if let Some(i) = found {
80 let guard = self.lock.read();
81 return loaded.get(i).map(|(_, e)| unsafe { Ref::new(guard, e) });
82 }
83 drop(loaded);
84
85 let key = self.get_key(index);
86 let reader = FromEmbed::new(self.storage.get::<S>(&key).ok()??);
87 let buffer = self.storage.buffer(&key);
88 let (elem, _buf) = postcard::from_eio::<T, _>((reader, buffer)).ok()?;
89 if let Some(mut loaded) = self.loaded.try_write() {
90 loaded.push_back((index, elem));
91 if loaded.len() > self.buf_size {
92 loaded.pop_front();
93 }
94 }
95 let read_guard = self.loaded.read();
96 let back = read_guard.back();
97 let guard = self.lock.read();
98 back.map(|(_, e)| unsafe { Ref::new(guard, e) })
99 }
100
101 pub fn get_elem_mut<'de, 's: 'de>(&'s mut self, index: usize) -> Option<RefMut<'s, T>>
102 where
103 T: Deserialize<'de>,
104 S: 'de,
105 {
106 let loaded = self.loaded.get_mut();
107 let mut found = None;
108 for (i, (idx, _)) in loaded.iter().enumerate() {
109 if *idx == index {
110 found = Some(i);
111 }
112 }
113 if let Some(i) = found {
114 let guard = self.lock.write();
115 loaded
116 .get_mut(i)
117 .map(|(_, e)| unsafe { RefMut::new(guard, e) })
118 } else {
119 self.get_stored_mut(index)
120 }
121 }
122
123 fn get_stored_mut<'de, 's: 'de>(&'s mut self, index: usize) -> Option<RefMut<'s, T>>
124 where
125 T: Deserialize<'de>,
126 S: 'de,
127 {
128 let key = self.get_key(index);
129 let reader = FromEmbed::new(self.storage.get::<S>(&key).ok()??);
130 let buffer = self.storage.buffer(&key);
131 let (elem, _buf) = postcard::from_eio::<T, _>((reader, buffer)).ok()?;
132 let loaded = self.loaded.get_mut();
133 loaded.push_back((index, elem));
134 if loaded.len() > self.buf_size {
135 loaded.pop_front();
136 }
137 let guard = self.lock.write();
138 let back = loaded.back_mut();
139 back.map(|(_, e)| unsafe { RefMut::new(guard, e) })
140 }
141
142 // pub fn get<I>(&self, index: I) -> Option<&<I as SliceIndex<[T]>>::Output>
143 // where
144 // I: SliceIndex<[T]>,
145 // {
146 // unimplemented!()
147 // }
148
149 // pub fn get_mut<I>(&mut self, index: I) -> Option<&mut <I as SliceIndex<[T]>>::Output>
150 // where
151 // I: SliceIndex<[T]>,
152 // {
153 // unimplemented!()
154 // }
155
156 pub fn push(&mut self, elem: T) {
157 let index = self.len;
158 let key = self.get_key(index);
159 let mut writer = FromEmbed::new(self.storage.writer(&key).expect("failed to get writer"));
160 postcard::to_eio(&elem, &mut writer).expect("Failed to serialize element");
161 self.storage.put::<_>(&key, writer.into_inner()).ok();
162 self.len += 1;
163 let loaded = self.loaded.get_mut();
164 loaded.push_back((index, elem));
165 }
166
167 pub fn insert(&mut self, index: usize, elem: T) {
168 let key = self.get_key(index);
169 let mut writer = FromEmbed::new(self.storage.writer(&key).expect("failed to get writer"));
170 postcard::to_eio(&elem, &mut writer).expect("Failed to serialize element");
171 self.len += 1;
172 let loaded = self.loaded.get_mut();
173 loaded.push_back((index, elem));
174 self.storage.put::<_>(&key, writer.into_inner()).ok();
175 }
176
177 pub fn remove<'de, 's: 'de>(&'s mut self, index: usize) -> Option<T>
178 where
179 T: Deserialize<'de>,
180 S: 'de,
181 {
182 if index >= self.len {
183 return None;
184 }
185 let mut found = None;
186 let mut loaded = self.loaded.write();
187 for (i, (idx, _)) in loaded.iter().enumerate() {
188 if *idx == index {
189 found = Some(i);
190 }
191 }
192 let key = self.get_key(index);
193 if let Some(i) = found {
194 self.storage.del(&key).ok();
195 self.len = self.len.saturating_sub(1);
196 loaded.remove(i).map(|(_, e)| e)
197 } else {
198 let reader = FromEmbed::new(self.storage.get::<S>(&key).ok()??);
199 let buffer = self.storage.buffer(&key);
200 let (elem, _buf) = postcard::from_eio::<T, _>((reader, buffer)).ok()?;
201 self.storage.del(&key).ok();
202 self.len = self.len.saturating_sub(1);
203 Some(elem)
204 }
205 }
206
207 pub fn pop<'de, 's: 'de>(&'s mut self) -> Option<T>
208 where
209 T: Deserialize<'de>,
210 S: 'de,
211 {
212 if self.len == 0 {
213 return None;
214 }
215 let index = self.len - 1;
216 self.remove(index)
217 }
218
219 pub fn index_ref<'de, 's: 'de, 'o: 'de>(&'s self, index: usize) -> &'o T
220 where
221 T: Deserialize<'de>,
222 S: 'de,
223 {
224 // SAFETY
225 //
226 // Yes, I have read https://doc.rust-lang.org/nomicon/transmutes.html
227 //
228 // I promise this is necessary specifically to launder two lifetimes
229 // which I know to be compatible but which the compiler gets confused by.
230 // This explicitly specifies the output lifetime, it exceeds the deserialize lifetime
231 // but is shorter than the 's self lifetime.
232 unsafe {
233 let get_ref = self.get_elem(index).expect("index out of bounds");
234 let get_ref = get_ref.value();
235
236 core::mem::transmute::<&T, &'o T>(get_ref)
237 }
238 }
239
240 pub fn index_get_mut<'de, 'o: 'de, 's: 'o>(&'s mut self, index: usize) -> &'o mut T
241 where
242 T: Deserialize<'de>,
243 S: 'de,
244 {
245 // SAFETY
246 //
247 // Yes, I have read https://doc.rust-lang.org/nomicon/transmutes.html
248 //
249 // I promise this is necessary specifically to launder two lifetimes
250 // which I know to be compatible but which the compiler gets confused by.
251 // This explicitly specifies the output lifetime, it exceeds the deserialize lifetime
252 // but is shorter than the 's self lifetime.
253 unsafe {
254 let mut mut_ref = self.get_elem_mut(index).expect("index out of bounds");
255 let mut_ref = mut_ref.value_mut();
256
257 core::mem::transmute::<&mut T, &'o mut T>(mut_ref)
258 }
259 }
260}
261
262impl<T, S> IndexMut<usize> for LVec<T, S>
263where
264 T: for<'de> Deserialize<'de> + Serialize,
265 S: Storage + Read + Write,
266{
267 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
268 self.index_get_mut(index)
269 }
270}
271
272impl<T, S> Index<usize> for LVec<T, S>
273where
274 T: for<'de> Deserialize<'de> + Serialize,
275 S: Storage + Read + Write,
276{
277 type Output = T;
278
279 fn index(&self, index: usize) -> &Self::Output {
280 self.index_ref(index)
281 }
282}
283
284pub struct IntoIter<T, S: Storage> {
285 storage: S,
286 handle: [u8; 4],
287 cursor: usize,
288 rev_cursor: Option<usize>,
289 len: usize,
290 _marker: PhantomData<T>,
291}
292
293impl<T, S: Storage> IntoIter<T, S> {
294 fn get_key(&self, index: usize) -> Vec<u8> {
295 let mut key = self.handle.to_vec();
296 key.extend_from_slice(&index.to_be_bytes());
297 key
298 }
299}
300
301fn get_stored<'de, T, S>(key: &[u8], storage: &'de S, buffer: &'de mut [u8]) -> Option<T>
302where
303 T: Deserialize<'de>,
304 S: Storage + Read,
305{
306 let reader = FromEmbed::new(storage.get::<S>(&key).ok()??);
307 let (elem, _buf) = postcard::from_eio::<T, _>((reader, buffer)).ok()?;
308 Some(elem)
309}
310
311impl<T, S> IntoIterator for LVec<T, S>
312where
313 T: for<'de> Deserialize<'de>,
314 S: Storage + Read,
315{
316 type Item = T;
317
318 type IntoIter = IntoIter<T, S>;
319
320 fn into_iter(self) -> Self::IntoIter {
321 let storage = self.storage;
322 let cursor = 0;
323 let len = self.len;
324 IntoIter {
325 handle: self.handle,
326 storage,
327 cursor,
328 rev_cursor: Some(len - 1),
329 len,
330 _marker: PhantomData,
331 }
332 }
333}
334
335impl<T, S> Iterator for IntoIter<T, S>
336where
337 T: for<'de> Deserialize<'de>,
338 S: Storage + Read,
339{
340 type Item = T;
341
342 fn next(&mut self) -> Option<Self::Item> {
343 let key = self.get_key(self.cursor);
344 self.cursor += 1;
345 let elem = get_stored(&key, &self.storage, self.storage.buffer(&key));
346 self.storage.del(&key).ok();
347 elem
348 }
349
350 fn size_hint(&self) -> (usize, Option<usize>) {
351 (self.len, Some(self.len))
352 }
353}
354
355impl<T, S> DoubleEndedIterator for IntoIter<T, S>
356where
357 T: for<'de> Deserialize<'de>,
358 S: Storage + Read,
359{
360 fn next_back(&mut self) -> Option<Self::Item> {
361 if self.rev_cursor.is_none() {
362 None
363 } else {
364 let key = self.get_key(self.rev_cursor.unwrap());
365 if self.rev_cursor.is_some_and(|c| c == 0) {
366 self.rev_cursor = None;
367 } else {
368 self.rev_cursor = self.rev_cursor.map(|i| i - 1);
369 }
370 let elem = get_stored(&key, &self.storage, self.storage.buffer(&key));
371 self.storage.del(&key).ok();
372 elem
373 }
374 }
375}
376
377impl<T, S> Drop for IntoIter<T, S>
378where
379 S: Storage,
380{
381 fn drop(&mut self) {
382 let rev_cursor = self.rev_cursor.unwrap_or_default();
383 for idx in self.cursor..=rev_cursor {
384 let key = self.get_key(idx);
385 self.storage.del(&key).ok();
386 }
387 }
388}
389
390pub struct Iter<'a, T, S> {
391 storage: &'a S,
392 handle: [u8; 4],
393 cursor: usize,
394 rev_cursor: Option<usize>,
395 len: usize,
396 _marker: PhantomData<&'a T>,
397}
398
399impl<'a, T, S> Iter<'a, T, S> {
400 pub fn new(storage: &'a S, len: usize, handle: [u8; 4]) -> Self {
401 let (cursor, rev_cursor) = (0, Some(len - 1));
402 Self {
403 handle,
404 storage,
405 len,
406 cursor,
407 rev_cursor,
408 _marker: PhantomData,
409 }
410 }
411}
412
413impl<T, S: Storage> Iter<'_, T, S> {
414 fn get_key(&self, index: usize) -> Vec<u8> {
415 let mut key = self.handle.to_vec();
416 key.extend_from_slice(&index.to_be_bytes());
417 key
418 }
419}
420
421impl<'a, T, S> Iterator for Iter<'a, T, S>
422where
423 T: Deserialize<'a>,
424 S: Storage + Read,
425{
426 type Item = T;
427
428 fn next(&mut self) -> Option<Self::Item> {
429 if self.cursor >= self.len {
430 None
431 } else {
432 let key = self.get_key(self.cursor);
433 self.cursor += 1;
434 let elem = get_stored(&key, self.storage, self.storage.buffer(&key));
435 elem
436 }
437 }
438
439 fn size_hint(&self) -> (usize, Option<usize>) {
440 (self.len, Some(self.len))
441 }
442}
443
444impl<'a, T, S> DoubleEndedIterator for Iter<'a, T, S>
445where
446 T: Deserialize<'a>,
447 S: Storage + Read,
448{
449 fn next_back(&mut self) -> Option<Self::Item> {
450 if self.rev_cursor.is_none() {
451 None
452 } else {
453 let key = self.get_key(self.rev_cursor.unwrap());
454 if self.rev_cursor.is_some_and(|c| c == 0) {
455 self.rev_cursor = None;
456 } else {
457 self.rev_cursor = self.rev_cursor.map(|i| i - 1);
458 }
459 let elem = get_stored(&key, self.storage, self.storage.buffer(&key));
460 elem
461 }
462 }
463}
464
465pub struct Ref<'a, V> {
466 _guard: RwLockReadGuard<'a, ()>,
467 v: *const V,
468}
469
470unsafe impl<'a, V: Sync> Send for Ref<'a, V> {}
471unsafe impl<'a, V: Sync> Sync for Ref<'a, V> {}
472
473impl<'a, V> Ref<'a, V> {
474 pub(crate) unsafe fn new(guard: RwLockReadGuard<'a, ()>, v: *const V) -> Self {
475 Self { _guard: guard, v }
476 }
477
478 pub fn value(&self) -> &V {
479 unsafe { &*self.v }
480 }
481}
482
483impl<'a, V: Debug> Debug for Ref<'a, V> {
484 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
485 f.debug_struct("Ref").field("v", &self.v).finish()
486 }
487}
488
489impl<'a, V> Deref for Ref<'a, V> {
490 type Target = V;
491
492 fn deref(&self) -> &V {
493 self.value()
494 }
495}
496
497impl<'a, V> AsRef<V> for Ref<'a, V> {
498 fn as_ref(&self) -> &V {
499 self.value()
500 }
501}
502
503pub struct RefMut<'a, V> {
504 #[allow(dead_code)]
505 guard: RwLockWriteGuard<'a, ()>,
506 v: *mut V,
507}
508
509unsafe impl<'a, V: Sync> Send for RefMut<'a, V> {}
510unsafe impl<'a, V: Sync> Sync for RefMut<'a, V> {}
511
512impl<'a, V> RefMut<'a, V> {
513 pub(crate) unsafe fn new(guard: RwLockWriteGuard<'a, ()>, v: *mut V) -> Self {
514 Self { guard, v }
515 }
516
517 pub fn value(&self) -> &V {
518 unsafe { &*self.v }
519 }
520
521 pub fn value_mut(&mut self) -> &mut V {
522 unsafe { &mut *self.v }
523 }
524}
525
526impl<'a, V: Debug> Debug for RefMut<'a, V> {
527 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
528 f.debug_struct("RefMut").field("v", &self.v).finish()
529 }
530}
531
532impl<'a, V> Deref for RefMut<'a, V> {
533 type Target = V;
534
535 fn deref(&self) -> &V {
536 self.value()
537 }
538}
539
540impl<'a, V> DerefMut for RefMut<'a, V> {
541 fn deref_mut(&mut self) -> &mut V {
542 self.value_mut()
543 }
544}
545
546impl<'a, V> AsRef<V> for RefMut<'a, V> {
547 fn as_ref(&self) -> &V {
548 self.value()
549 }
550}