A file-based task manager
0
fork

Configure Feed

Select the types of activity you want to include in your feed.

Make git refs the source of truth for git-backed workspaces

When tsk init runs inside a git repository, all workspace state — tasks,
index, attrs, backlinks, remotes — is stored as git blobs addressed by refs
under refs/tsk/. No file cache is kept on disk; only a marker file in .tsk/
records that this is a git-backed workspace.

Outside of git repositories, the file-based backend remains and continues to
keep state in .tsk/.

Architecture: a Store trait (src/backend.rs) exposes a tiny logical blob
key/value API. FileStore writes files; GitStore writes git blobs via git2
and addresses them by ref. High-level operations (next_id, read/write_task,
attrs, backlinks, remotes, move_task) are free functions over &dyn Store so
both backends share a single implementation.

Storage changes:
- Per-task attrs and backlinks are now their own blobs (attrs/<id>,
backlinks/<id>) instead of filesystem xattrs — uniform across backends.
- Active vs archived tasks live in distinct ref/path namespaces (tasks/<id>
vs archive/<id>); drop and reopen now move the blob rather than maintain
symlinks.
- Removed nix flock dependency along with the now-unused locking helper.

Tests: both backends exercised through a shared lifecycle suite, plus
backend-level round-trip tests for blob ops, listing, attrs, backlinks,
remotes, and active/archive helpers.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>

+996 -1034
+87 -16
Cargo.lock
··· 59 59 checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394" 60 60 61 61 [[package]] 62 - name = "cfg-if" 63 - version = "1.0.3" 62 + name = "cc" 63 + version = "1.2.61" 64 64 source = "registry+https://github.com/rust-lang/crates.io-index" 65 - checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" 65 + checksum = "d16d90359e986641506914ba71350897565610e87ce0ad9e6f28569db3dd5c6d" 66 + dependencies = [ 67 + "find-msvc-tools", 68 + "jobserver", 69 + "libc", 70 + "shlex", 71 + ] 66 72 67 73 [[package]] 68 - name = "cfg_aliases" 69 - version = "0.2.1" 74 + name = "cfg-if" 75 + version = "1.0.3" 70 76 source = "registry+https://github.com/rust-lang/crates.io-index" 71 - checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" 77 + checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" 72 78 73 79 [[package]] 74 80 name = "clap" ··· 188 194 checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" 189 195 190 196 [[package]] 197 + name = "find-msvc-tools" 198 + version = "0.1.9" 199 + source = "registry+https://github.com/rust-lang/crates.io-index" 200 + checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" 201 + 202 + [[package]] 191 203 name = "form_urlencoded" 192 204 version = "1.2.2" 193 205 source = "registry+https://github.com/rust-lang/crates.io-index" ··· 209 221 ] 210 222 211 223 [[package]] 224 + name = "git2" 225 + version = "0.20.4" 226 + source = "registry+https://github.com/rust-lang/crates.io-index" 227 + checksum = "7b88256088d75a56f8ecfa070513a775dd9107f6530ef14919dac831af9cfe2b" 228 + dependencies = [ 229 + "bitflags", 230 + "libc", 231 + "libgit2-sys", 232 + "log", 233 + "url", 234 + ] 235 + 236 + [[package]] 212 237 name = "heck" 213 238 version = "0.5.0" 214 239 source = "registry+https://github.com/rust-lang/crates.io-index" ··· 365 390 ] 366 391 367 392 [[package]] 393 + name = "jobserver" 394 + version = "0.1.34" 395 + source = "registry+https://github.com/rust-lang/crates.io-index" 396 + checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" 397 + dependencies = [ 398 + "getrandom", 399 + "libc", 400 + ] 401 + 402 + [[package]] 368 403 name = "libc" 369 404 version = "0.2.175" 370 405 source = "registry+https://github.com/rust-lang/crates.io-index" 371 406 checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" 372 407 373 408 [[package]] 409 + name = "libgit2-sys" 410 + version = "0.18.3+1.9.2" 411 + source = "registry+https://github.com/rust-lang/crates.io-index" 412 + checksum = "c9b3acc4b91781bb0b3386669d325163746af5f6e4f73e6d2d630e09a35f3487" 413 + dependencies = [ 414 + "cc", 415 + "libc", 416 + "libz-sys", 417 + "pkg-config", 418 + ] 419 + 420 + [[package]] 421 + name = "libz-sys" 422 + version = "1.1.28" 423 + source = "registry+https://github.com/rust-lang/crates.io-index" 424 + checksum = "fc3a226e576f50782b3305c5ccf458698f92798987f551c6a02efe8276721e22" 425 + dependencies = [ 426 + "cc", 427 + "libc", 428 + "pkg-config", 429 + "vcpkg", 430 + ] 431 + 432 + [[package]] 374 433 name = "linux-raw-sys" 375 434 version = "0.4.15" 376 435 source = "registry+https://github.com/rust-lang/crates.io-index" ··· 389 448 checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" 390 449 391 450 [[package]] 392 - name = "nix" 393 - version = "0.30.1" 451 + name = "log" 452 + version = "0.4.29" 394 453 source = "registry+https://github.com/rust-lang/crates.io-index" 395 - checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6" 396 - dependencies = [ 397 - "bitflags", 398 - "cfg-if", 399 - "cfg_aliases", 400 - "libc", 401 - ] 454 + checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" 402 455 403 456 [[package]] 404 457 name = "once_cell" ··· 434 487 version = "2.3.2" 435 488 source = "registry+https://github.com/rust-lang/crates.io-index" 436 489 checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" 490 + 491 + [[package]] 492 + name = "pkg-config" 493 + version = "0.3.33" 494 + source = "registry+https://github.com/rust-lang/crates.io-index" 495 + checksum = "19f132c84eca552bf34cab8ec81f1c1dcc229b811638f9d283dceabe58c5569e" 437 496 438 497 [[package]] 439 498 name = "potential_utf" ··· 521 580 ] 522 581 523 582 [[package]] 583 + name = "shlex" 584 + version = "1.3.0" 585 + source = "registry+https://github.com/rust-lang/crates.io-index" 586 + checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" 587 + 588 + [[package]] 524 589 name = "smallvec" 525 590 version = "1.15.1" 526 591 source = "registry+https://github.com/rust-lang/crates.io-index" ··· 612 677 "clap_mangen", 613 678 "colored", 614 679 "edit", 680 + "git2", 615 681 "itertools", 616 - "nix", 617 682 "open", 618 683 "tempfile", 619 684 "thiserror", ··· 650 715 version = "0.2.2" 651 716 source = "registry+https://github.com/rust-lang/crates.io-index" 652 717 checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" 718 + 719 + [[package]] 720 + name = "vcpkg" 721 + version = "0.2.15" 722 + source = "registry+https://github.com/rust-lang/crates.io-index" 723 + checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" 653 724 654 725 [[package]] 655 726 name = "wasi"
+1 -1
Cargo.toml
··· 18 18 clap = { version = "4", features = ["derive", "env"] } 19 19 clap_complete = "4" 20 20 edit = "0" 21 - nix = { version = "0", features = ["fs"] } 22 21 thiserror = "2" 23 22 url = "2" 24 23 xattr = "1" 25 24 colored = "3" 26 25 open = "5" 27 26 itertools = "0" 27 + git2 = { version = "0.20", default-features = false } 28 28 29 29 [dev-dependencies] 30 30 tempfile = "3"
+510
src/backend.rs
··· 1 + //! Storage backends for tsk workspaces. 2 + //! 3 + //! A [`Store`] is a logical key/value blob store. Two impls are provided: 4 + //! 5 + //! - [`FileStore`] keeps blobs as files under `.tsk/`. Used when `tsk init` runs 6 + //! outside a git repository. 7 + //! - [`GitStore`] stores each blob as a git blob, addressed by a ref under 8 + //! `refs/tsk/`. Used when `tsk init` runs inside a git repository — the git 9 + //! refs are the only durable storage; nothing is cached on disk. 10 + //! 11 + //! Higher-level operations (tasks, attrs, backlinks, index, remotes) are 12 + //! implemented as free functions over `dyn Store` so both backends share a 13 + //! single implementation. 14 + 15 + use crate::errors::{Error, Result}; 16 + use crate::workspace::{Id, Remote}; 17 + use git2::{ObjectType, Oid, Repository}; 18 + use std::collections::{BTreeMap, HashSet}; 19 + use std::fs::{self, OpenOptions}; 20 + use std::io::Write; 21 + use std::path::{Path, PathBuf}; 22 + use std::str::FromStr; 23 + 24 + pub const GIT_BACKED_MARKER: &str = "git-backed"; 25 + const REF_PREFIX: &str = "refs/tsk"; 26 + 27 + /// A logical blob store. Keys are forward-slash separated strings. 28 + pub trait Store: Send + Sync { 29 + fn read(&self, key: &str) -> Result<Option<Vec<u8>>>; 30 + fn write(&self, key: &str, data: &[u8]) -> Result<()>; 31 + fn delete(&self, key: &str) -> Result<()>; 32 + fn exists(&self, key: &str) -> Result<bool>; 33 + /// List all keys with the given prefix (no trailing slash). Returns full keys. 34 + fn list(&self, prefix: &str) -> Result<Vec<String>>; 35 + } 36 + 37 + // ─── FileStore ────────────────────────────────────────────────────────────── 38 + 39 + pub struct FileStore { 40 + pub root: PathBuf, 41 + } 42 + 43 + impl FileStore { 44 + pub fn new(root: PathBuf) -> Self { 45 + Self { root } 46 + } 47 + 48 + fn path(&self, key: &str) -> PathBuf { 49 + self.root.join(key) 50 + } 51 + } 52 + 53 + impl Store for FileStore { 54 + fn read(&self, key: &str) -> Result<Option<Vec<u8>>> { 55 + let p = self.path(key); 56 + match fs::read(&p) { 57 + Ok(data) => Ok(Some(data)), 58 + Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(None), 59 + Err(e) => Err(e.into()), 60 + } 61 + } 62 + 63 + fn write(&self, key: &str, data: &[u8]) -> Result<()> { 64 + let p = self.path(key); 65 + if let Some(parent) = p.parent() { 66 + fs::create_dir_all(parent)?; 67 + } 68 + let tmp = p.with_extension("tmp"); 69 + let mut f = OpenOptions::new() 70 + .write(true) 71 + .create(true) 72 + .truncate(true) 73 + .open(&tmp)?; 74 + f.write_all(data)?; 75 + f.sync_all()?; 76 + fs::rename(&tmp, &p)?; 77 + Ok(()) 78 + } 79 + 80 + fn delete(&self, key: &str) -> Result<()> { 81 + match fs::remove_file(self.path(key)) { 82 + Ok(()) => Ok(()), 83 + Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()), 84 + Err(e) => Err(e.into()), 85 + } 86 + } 87 + 88 + fn exists(&self, key: &str) -> Result<bool> { 89 + Ok(self.path(key).exists()) 90 + } 91 + 92 + fn list(&self, prefix: &str) -> Result<Vec<String>> { 93 + let dir = self.path(prefix); 94 + if !dir.is_dir() { 95 + return Ok(Vec::new()); 96 + } 97 + let mut out = Vec::new(); 98 + for entry in fs::read_dir(&dir)? { 99 + let entry = entry?; 100 + if entry.file_type()?.is_file() 101 + && let Some(name) = entry.file_name().to_str() 102 + { 103 + out.push(format!("{prefix}/{name}")); 104 + } 105 + } 106 + Ok(out) 107 + } 108 + 109 + } 110 + 111 + // ─── GitStore ─────────────────────────────────────────────────────────────── 112 + 113 + pub struct GitStore { 114 + git_dir: PathBuf, 115 + } 116 + 117 + impl GitStore { 118 + pub fn open(git_dir: PathBuf) -> Result<Self> { 119 + // Validate by opening once. 120 + Repository::open(&git_dir).map_err(|e| Error::Parse(format!("git open failed: {e}")))?; 121 + Ok(Self { git_dir }) 122 + } 123 + 124 + fn repo(&self) -> Result<Repository> { 125 + Repository::open(&self.git_dir).map_err(|e| Error::Parse(format!("git open: {e}"))) 126 + } 127 + 128 + fn refname(key: &str) -> String { 129 + format!("{REF_PREFIX}/{key}") 130 + } 131 + } 132 + 133 + fn read_blob(repo: &Repository, refname: &str) -> Result<Option<(Oid, Vec<u8>)>> { 134 + let r = match repo.find_reference(refname) { 135 + Ok(r) => r, 136 + Err(e) if e.code() == git2::ErrorCode::NotFound => return Ok(None), 137 + Err(e) => return Err(Error::Parse(format!("find_reference {refname}: {e}"))), 138 + }; 139 + let obj = r 140 + .peel(ObjectType::Blob) 141 + .map_err(|e| Error::Parse(format!("peel: {e}")))?; 142 + let blob = obj.as_blob().ok_or_else(|| Error::Parse("not a blob".into()))?; 143 + Ok(Some((obj.id(), blob.content().to_vec()))) 144 + } 145 + 146 + impl Store for GitStore { 147 + fn read(&self, key: &str) -> Result<Option<Vec<u8>>> { 148 + let repo = self.repo()?; 149 + Ok(read_blob(&repo, &Self::refname(key))?.map(|(_, data)| data)) 150 + } 151 + 152 + fn write(&self, key: &str, data: &[u8]) -> Result<()> { 153 + let repo = self.repo()?; 154 + let oid = repo 155 + .blob(data) 156 + .map_err(|e| Error::Parse(format!("blob write: {e}")))?; 157 + repo.reference(&Self::refname(key), oid, true, "tsk write") 158 + .map_err(|e| Error::Parse(format!("update ref: {e}")))?; 159 + Ok(()) 160 + } 161 + 162 + fn delete(&self, key: &str) -> Result<()> { 163 + let repo = self.repo()?; 164 + let refname = Self::refname(key); 165 + match repo.find_reference(&refname) { 166 + Ok(mut r) => { 167 + r.delete() 168 + .map_err(|e| Error::Parse(format!("delete ref: {e}")))?; 169 + Ok(()) 170 + } 171 + Err(e) if e.code() == git2::ErrorCode::NotFound => Ok(()), 172 + Err(e) => Err(Error::Parse(format!("find_reference: {e}"))), 173 + } 174 + } 175 + 176 + fn exists(&self, key: &str) -> Result<bool> { 177 + let repo = self.repo()?; 178 + match repo.find_reference(&Self::refname(key)) { 179 + Ok(_) => Ok(true), 180 + Err(e) if e.code() == git2::ErrorCode::NotFound => Ok(false), 181 + Err(e) => Err(Error::Parse(format!("find_reference: {e}"))), 182 + } 183 + } 184 + 185 + fn list(&self, prefix: &str) -> Result<Vec<String>> { 186 + let repo = self.repo()?; 187 + let glob = format!("{REF_PREFIX}/{prefix}/*"); 188 + let mut out = Vec::new(); 189 + let refs = repo 190 + .references_glob(&glob) 191 + .map_err(|e| Error::Parse(format!("references_glob: {e}")))?; 192 + for r in refs { 193 + let r = r.map_err(|e| Error::Parse(format!("ref iter: {e}")))?; 194 + if let Some(name) = r.name() 195 + && let Some(stripped) = name.strip_prefix(&format!("{REF_PREFIX}/")) 196 + { 197 + out.push(stripped.to_string()); 198 + } 199 + } 200 + Ok(out) 201 + } 202 + 203 + } 204 + 205 + // ─── High-level operations over any Store ─────────────────────────────────── 206 + 207 + pub fn next_id(store: &dyn Store) -> Result<Id> { 208 + let cur = store 209 + .read("next")? 210 + .map(|b| String::from_utf8_lossy(&b).trim().to_string()) 211 + .unwrap_or_else(|| "1".to_string()); 212 + let id: u32 = cur.parse().unwrap_or(1); 213 + store.write("next", format!("{}\n", id + 1).as_bytes())?; 214 + Ok(Id(id)) 215 + } 216 + 217 + fn task_key(id: Id, archived: bool) -> String { 218 + let bucket = if archived { "archive" } else { "tasks" }; 219 + format!("{bucket}/{}", id.0) 220 + } 221 + 222 + #[derive(Copy, Clone, Eq, PartialEq, Debug)] 223 + pub enum Loc { 224 + Active, 225 + Archived, 226 + } 227 + 228 + pub fn task_location(store: &dyn Store, id: Id) -> Result<Option<Loc>> { 229 + if store.exists(&task_key(id, false))? { 230 + Ok(Some(Loc::Active)) 231 + } else if store.exists(&task_key(id, true))? { 232 + Ok(Some(Loc::Archived)) 233 + } else { 234 + Ok(None) 235 + } 236 + } 237 + 238 + pub fn read_task(store: &dyn Store, id: Id) -> Result<Option<(String, String, Loc)>> { 239 + for (loc, archived) in [(Loc::Active, false), (Loc::Archived, true)] { 240 + if let Some(data) = store.read(&task_key(id, archived))? { 241 + let text = String::from_utf8_lossy(&data); 242 + let mut parts = text.splitn(2, '\n'); 243 + let title = parts.next().unwrap_or("").trim().to_string(); 244 + let body = parts.next().unwrap_or("").trim().to_string(); 245 + return Ok(Some((title, body, loc))); 246 + } 247 + } 248 + Ok(None) 249 + } 250 + 251 + pub fn write_task(store: &dyn Store, id: Id, title: &str, body: &str, loc: Loc) -> Result<()> { 252 + let payload = format!("{}\n\n{}", title.trim(), body.trim()); 253 + store.write(&task_key(id, loc == Loc::Archived), payload.as_bytes())?; 254 + Ok(()) 255 + } 256 + 257 + pub fn move_task(store: &dyn Store, id: Id, to: Loc) -> Result<()> { 258 + let from_archived = to == Loc::Active; 259 + let from_key = task_key(id, from_archived); 260 + let to_key = task_key(id, to == Loc::Archived); 261 + if from_key == to_key { 262 + return Ok(()); 263 + } 264 + let data = store.read(&from_key)?.ok_or_else(|| Error::Parse(format!("task {id} not present at {from_key}")))?; 265 + store.write(&to_key, &data)?; 266 + store.delete(&from_key)?; 267 + Ok(()) 268 + } 269 + 270 + pub fn list_active(store: &dyn Store) -> Result<Vec<Id>> { 271 + list_bucket(store, "tasks") 272 + } 273 + 274 + pub fn list_archive(store: &dyn Store) -> Result<Vec<Id>> { 275 + list_bucket(store, "archive") 276 + } 277 + 278 + fn list_bucket(store: &dyn Store, bucket: &str) -> Result<Vec<Id>> { 279 + let mut ids = Vec::new(); 280 + for key in store.list(bucket)? { 281 + if let Some(idstr) = key.strip_prefix(&format!("{bucket}/")) 282 + && let Ok(n) = idstr.trim_end_matches(".tsk").parse::<u32>() 283 + { 284 + ids.push(Id(n)); 285 + } 286 + } 287 + ids.sort_by_key(|i| i.0); 288 + Ok(ids) 289 + } 290 + 291 + pub fn read_attrs(store: &dyn Store, id: Id) -> Result<BTreeMap<String, String>> { 292 + let mut out = BTreeMap::new(); 293 + if let Some(data) = store.read(&format!("attrs/{}", id.0))? { 294 + for line in String::from_utf8_lossy(&data).lines() { 295 + if let Some((k, v)) = line.split_once('\t') { 296 + out.insert(k.to_string(), v.to_string()); 297 + } 298 + } 299 + } 300 + Ok(out) 301 + } 302 + 303 + pub fn write_attrs(store: &dyn Store, id: Id, attrs: &BTreeMap<String, String>) -> Result<()> { 304 + if attrs.is_empty() { 305 + return store.delete(&format!("attrs/{}", id.0)); 306 + } 307 + let mut buf = String::new(); 308 + for (k, v) in attrs { 309 + buf.push_str(k); 310 + buf.push('\t'); 311 + buf.push_str(v); 312 + buf.push('\n'); 313 + } 314 + store.write(&format!("attrs/{}", id.0), buf.as_bytes()) 315 + } 316 + 317 + pub fn read_backlinks(store: &dyn Store, id: Id) -> Result<HashSet<Id>> { 318 + let mut out = HashSet::new(); 319 + if let Some(data) = store.read(&format!("backlinks/{}", id.0))? { 320 + for tok in String::from_utf8_lossy(&data).split(',') { 321 + if let Ok(i) = Id::from_str(tok.trim()) { 322 + out.insert(i); 323 + } 324 + } 325 + } 326 + Ok(out) 327 + } 328 + 329 + pub fn write_backlinks(store: &dyn Store, id: Id, links: &HashSet<Id>) -> Result<()> { 330 + if links.is_empty() { 331 + return store.delete(&format!("backlinks/{}", id.0)); 332 + } 333 + let joined = itertools::join(links, ","); 334 + store.write(&format!("backlinks/{}", id.0), joined.as_bytes()) 335 + } 336 + 337 + pub fn read_remotes(store: &dyn Store) -> Result<Vec<Remote>> { 338 + let mut out = Vec::new(); 339 + if let Some(data) = store.read("remotes")? { 340 + for line in String::from_utf8_lossy(&data).lines() { 341 + let line = line.trim(); 342 + if line.is_empty() || line.starts_with('#') { 343 + continue; 344 + } 345 + if let Some((prefix, path)) = line.split_once('\t') { 346 + out.push(Remote { 347 + prefix: prefix.trim().to_string(), 348 + path: PathBuf::from(path.trim()), 349 + }); 350 + } 351 + } 352 + } 353 + Ok(out) 354 + } 355 + 356 + pub fn write_remotes(store: &dyn Store, remotes: &[Remote]) -> Result<()> { 357 + if remotes.is_empty() { 358 + return store.delete("remotes"); 359 + } 360 + let mut buf = String::new(); 361 + for r in remotes { 362 + buf.push_str(&format!("{}\t{}\n", r.prefix, r.path.display())); 363 + } 364 + store.write("remotes", buf.as_bytes()) 365 + } 366 + 367 + // ─── Detection / construction ────────────────────────────────────────────── 368 + 369 + pub fn detect_git_dir(start: &Path) -> Option<PathBuf> { 370 + crate::util::find_parent_with_dir(start.to_path_buf(), ".git") 371 + .ok() 372 + .flatten() 373 + } 374 + 375 + pub fn store_for(tsk_dir: &Path) -> Result<Box<dyn Store>> { 376 + let marker = tsk_dir.join(GIT_BACKED_MARKER); 377 + if marker.exists() { 378 + let git_dir = fs::read_to_string(&marker)?.trim().to_string(); 379 + Ok(Box::new(GitStore::open(PathBuf::from(git_dir))?)) 380 + } else { 381 + Ok(Box::new(FileStore::new(tsk_dir.to_path_buf()))) 382 + } 383 + } 384 + 385 + #[cfg(test)] 386 + mod test { 387 + use super::*; 388 + 389 + fn run_git_init(dir: &Path) { 390 + let s = std::process::Command::new("git") 391 + .args(["init", "-q"]) 392 + .current_dir(dir) 393 + .status() 394 + .unwrap(); 395 + assert!(s.success()); 396 + } 397 + 398 + fn store_pair() -> (tempfile::TempDir, Box<dyn Store>, Box<dyn Store>) { 399 + let dir = tempfile::tempdir().unwrap(); 400 + let file_root = dir.path().join("file"); 401 + let git_root = dir.path().join("git"); 402 + fs::create_dir_all(&file_root).unwrap(); 403 + fs::create_dir_all(&git_root).unwrap(); 404 + run_git_init(&git_root); 405 + let f: Box<dyn Store> = Box::new(FileStore::new(file_root)); 406 + let g: Box<dyn Store> = Box::new(GitStore::open(git_root.join(".git")).unwrap()); 407 + (dir, f, g) 408 + } 409 + 410 + #[test] 411 + fn test_basic_blob_ops_both_backends() { 412 + let (_d, file, git) = store_pair(); 413 + for s in [file, git] { 414 + assert_eq!(s.read("missing").unwrap(), None); 415 + assert!(!s.exists("k").unwrap()); 416 + s.write("k", b"hello").unwrap(); 417 + assert!(s.exists("k").unwrap()); 418 + assert_eq!(s.read("k").unwrap().as_deref(), Some(&b"hello"[..])); 419 + s.write("k", b"world").unwrap(); 420 + assert_eq!(s.read("k").unwrap().as_deref(), Some(&b"world"[..])); 421 + s.delete("k").unwrap(); 422 + assert!(!s.exists("k").unwrap()); 423 + // delete nonexistent is fine 424 + s.delete("k").unwrap(); 425 + } 426 + } 427 + 428 + #[test] 429 + fn test_list_both_backends() { 430 + let (_d, file, git) = store_pair(); 431 + for s in [file, git] { 432 + s.write("tasks/1", b"a").unwrap(); 433 + s.write("tasks/2", b"b").unwrap(); 434 + s.write("archive/3", b"c").unwrap(); 435 + let mut tasks = s.list("tasks").unwrap(); 436 + tasks.sort(); 437 + assert_eq!(tasks, vec!["tasks/1", "tasks/2"]); 438 + let arch = s.list("archive").unwrap(); 439 + assert_eq!(arch, vec!["archive/3"]); 440 + assert!(s.list("nothing").unwrap().is_empty()); 441 + } 442 + } 443 + 444 + #[test] 445 + fn test_high_level_task_ops_both_backends() { 446 + let (_d, file, git) = store_pair(); 447 + for s in [file.as_ref(), git.as_ref()] { 448 + let id = next_id(s).unwrap(); 449 + assert_eq!(id, Id(1)); 450 + let id2 = next_id(s).unwrap(); 451 + assert_eq!(id2, Id(2)); 452 + 453 + write_task(s, id, "title", "body", Loc::Active).unwrap(); 454 + let (t, b, loc) = read_task(s, id).unwrap().unwrap(); 455 + assert_eq!(t, "title"); 456 + assert_eq!(b, "body"); 457 + assert_eq!(loc, Loc::Active); 458 + 459 + move_task(s, id, Loc::Archived).unwrap(); 460 + assert_eq!(task_location(s, id).unwrap(), Some(Loc::Archived)); 461 + move_task(s, id, Loc::Active).unwrap(); 462 + assert_eq!(task_location(s, id).unwrap(), Some(Loc::Active)); 463 + 464 + let mut attrs = BTreeMap::new(); 465 + attrs.insert("foo".to_string(), "bar".to_string()); 466 + write_attrs(s, id, &attrs).unwrap(); 467 + assert_eq!(read_attrs(s, id).unwrap(), attrs); 468 + 469 + let mut bl = HashSet::new(); 470 + bl.insert(Id(7)); 471 + bl.insert(Id(9)); 472 + write_backlinks(s, id, &bl).unwrap(); 473 + assert_eq!(read_backlinks(s, id).unwrap(), bl); 474 + 475 + // Empty attrs/backlinks delete the blob. 476 + write_attrs(s, id, &BTreeMap::new()).unwrap(); 477 + assert!(read_attrs(s, id).unwrap().is_empty()); 478 + write_backlinks(s, id, &HashSet::new()).unwrap(); 479 + assert!(read_backlinks(s, id).unwrap().is_empty()); 480 + } 481 + } 482 + 483 + #[test] 484 + fn test_remotes_round_trip_both_backends() { 485 + let (_d, file, git) = store_pair(); 486 + for s in [file.as_ref(), git.as_ref()] { 487 + assert!(read_remotes(s).unwrap().is_empty()); 488 + let remotes = vec![ 489 + Remote { prefix: "a".into(), path: PathBuf::from("/x") }, 490 + Remote { prefix: "b".into(), path: PathBuf::from("/y") }, 491 + ]; 492 + write_remotes(s, &remotes).unwrap(); 493 + assert_eq!(read_remotes(s).unwrap(), remotes); 494 + write_remotes(s, &[]).unwrap(); 495 + assert!(read_remotes(s).unwrap().is_empty()); 496 + } 497 + } 498 + 499 + #[test] 500 + fn test_list_active_archive_helpers() { 501 + let (_d, file, git) = store_pair(); 502 + for s in [file.as_ref(), git.as_ref()] { 503 + write_task(s, Id(1), "t1", "", Loc::Active).unwrap(); 504 + write_task(s, Id(2), "t2", "", Loc::Archived).unwrap(); 505 + write_task(s, Id(3), "t3", "", Loc::Active).unwrap(); 506 + assert_eq!(list_active(s).unwrap(), vec![Id(1), Id(3)]); 507 + assert_eq!(list_archive(s).unwrap(), vec![Id(2)]); 508 + } 509 + } 510 + }
-2
src/errors.rs
··· 12 12 AlreadyInitialized, 13 13 #[error("Unable to read file: {0}")] 14 14 Io(#[from] std::io::Error), 15 - #[error("Unable to acquire locc: {0}")] 16 - Lock(nix::errno::Errno), 17 15 #[error("Unable to parse id: {0}")] 18 16 ParseId(#[from] std::num::ParseIntError), 19 17 #[error("General parsing error: {0}")]
-206
src/git_store.rs
··· 1 - //! Mirror tsk workspace state into git refs under `refs/tsk/`. 2 - //! 3 - //! When `tsk init` is run inside a git repository, a `.tsk/git-backed` marker is 4 - //! written containing the absolute path to the `.git` directory. After every 5 - //! mutating command, [`sync`] walks the workspace and writes each task / index 6 - //! file as a git blob, updating refs to point at them. The on-disk files remain 7 - //! the source of truth; git refs are an additive durable mirror. 8 - 9 - use crate::errors::{Error, Result}; 10 - use std::path::{Path, PathBuf}; 11 - use std::process::{Command, Stdio}; 12 - 13 - const MARKER: &str = "git-backed"; 14 - const REF_PREFIX: &str = "refs/tsk"; 15 - 16 - pub fn detect_git_dir(start: &Path) -> Option<PathBuf> { 17 - crate::util::find_parent_with_dir(start.to_path_buf(), ".git").ok().flatten() 18 - } 19 - 20 - pub fn write_marker(tsk_dir: &Path, git_dir: &Path) -> Result<()> { 21 - std::fs::write(tsk_dir.join(MARKER), git_dir.to_string_lossy().as_bytes())?; 22 - Ok(()) 23 - } 24 - 25 - pub fn read_marker(tsk_dir: &Path) -> Option<PathBuf> { 26 - let s = std::fs::read_to_string(tsk_dir.join(MARKER)).ok()?; 27 - let trimmed = s.trim(); 28 - if trimmed.is_empty() { 29 - return None; 30 - } 31 - Some(PathBuf::from(trimmed)) 32 - } 33 - 34 - fn git(git_dir: &Path) -> Command { 35 - let mut c = Command::new("git"); 36 - c.env("GIT_DIR", git_dir); 37 - c 38 - } 39 - 40 - fn hash_object(git_dir: &Path, path: &Path) -> Result<String> { 41 - let out = git(git_dir) 42 - .args(["hash-object", "-w", "--"]) 43 - .arg(path) 44 - .stderr(Stdio::piped()) 45 - .output()?; 46 - if !out.status.success() { 47 - return Err(Error::Parse(format!( 48 - "git hash-object failed: {}", 49 - String::from_utf8_lossy(&out.stderr) 50 - ))); 51 - } 52 - Ok(String::from_utf8_lossy(&out.stdout).trim().to_string()) 53 - } 54 - 55 - fn update_ref(git_dir: &Path, refname: &str, hash: &str) -> Result<()> { 56 - let status = git(git_dir) 57 - .args(["update-ref", refname, hash]) 58 - .stderr(Stdio::piped()) 59 - .status()?; 60 - if !status.success() { 61 - return Err(Error::Parse(format!("git update-ref {refname} failed"))); 62 - } 63 - Ok(()) 64 - } 65 - 66 - fn delete_ref(git_dir: &Path, refname: &str) -> Result<()> { 67 - let _ = git(git_dir) 68 - .args(["update-ref", "-d", refname]) 69 - .stderr(Stdio::null()) 70 - .status()?; 71 - Ok(()) 72 - } 73 - 74 - fn list_refs(git_dir: &Path, prefix: &str) -> Result<Vec<String>> { 75 - let out = git(git_dir) 76 - .args(["for-each-ref", "--format=%(refname)", prefix]) 77 - .output()?; 78 - if !out.status.success() { 79 - return Ok(Vec::new()); 80 - } 81 - Ok(String::from_utf8_lossy(&out.stdout) 82 - .lines() 83 - .map(|s| s.to_string()) 84 - .collect()) 85 - } 86 - 87 - /// Walk the workspace and mirror its contents to git refs. No-op if no marker. 88 - pub fn sync(tsk_dir: &Path) -> Result<()> { 89 - let Some(git_dir) = read_marker(tsk_dir) else { 90 - return Ok(()); 91 - }; 92 - if !git_dir.exists() { 93 - return Ok(()); 94 - } 95 - 96 - let mut wanted: std::collections::HashSet<String> = std::collections::HashSet::new(); 97 - 98 - // Mirror archive task contents. 99 - let archive_dir = tsk_dir.join("archive"); 100 - if archive_dir.exists() { 101 - for entry in std::fs::read_dir(&archive_dir)? { 102 - let entry = entry?; 103 - let path = entry.path(); 104 - if !path.is_file() { 105 - continue; 106 - } 107 - let Some(name) = path.file_name().and_then(|n| n.to_str()) else { 108 - continue; 109 - }; 110 - if !name.starts_with("tsk-") || !name.ends_with(".tsk") { 111 - continue; 112 - } 113 - let hash = hash_object(&git_dir, &path)?; 114 - // Determine whether the task is currently active (has a symlink in tasks/). 115 - let active = tsk_dir.join("tasks").join(name).exists(); 116 - let bucket = if active { "tasks" } else { "archive" }; 117 - let refname = format!("{REF_PREFIX}/{bucket}/{}", name.trim_end_matches(".tsk")); 118 - update_ref(&git_dir, &refname, &hash)?; 119 - wanted.insert(refname); 120 - } 121 - } 122 - 123 - // Mirror top-level metadata files. 124 - for meta in ["index", "next", "cache", "remotes"] { 125 - let path = tsk_dir.join(meta); 126 - if path.is_file() { 127 - let hash = hash_object(&git_dir, &path)?; 128 - let refname = format!("{REF_PREFIX}/meta/{meta}"); 129 - update_ref(&git_dir, &refname, &hash)?; 130 - wanted.insert(refname); 131 - } 132 - } 133 - 134 - // Prune stale refs. 135 - for refname in list_refs(&git_dir, REF_PREFIX)? { 136 - if !wanted.contains(&refname) { 137 - delete_ref(&git_dir, &refname)?; 138 - } 139 - } 140 - 141 - Ok(()) 142 - } 143 - 144 - #[cfg(test)] 145 - mod test { 146 - use super::*; 147 - 148 - fn run(cmd: &mut Command) { 149 - let out = cmd.output().unwrap(); 150 - assert!(out.status.success(), "{:?}", out); 151 - } 152 - 153 - #[test] 154 - fn test_detect_and_sync() { 155 - let dir = tempfile::tempdir().unwrap(); 156 - let root = dir.path(); 157 - 158 - // Initialize a real git repo. 159 - run(Command::new("git").args(["init", "-q"]).current_dir(root)); 160 - 161 - // Create a .tsk workspace inside it. 162 - let tsk_dir = root.join(".tsk"); 163 - std::fs::create_dir(&tsk_dir).unwrap(); 164 - std::fs::create_dir(tsk_dir.join("tasks")).unwrap(); 165 - std::fs::create_dir(tsk_dir.join("archive")).unwrap(); 166 - 167 - let git_dir = detect_git_dir(&tsk_dir).expect("git dir found"); 168 - write_marker(&tsk_dir, &git_dir).unwrap(); 169 - assert_eq!(read_marker(&tsk_dir), Some(git_dir.clone())); 170 - 171 - // Create one active task and one archived task. 172 - std::fs::write(tsk_dir.join("archive/tsk-1.tsk"), "active title\n\nbody").unwrap(); 173 - std::os::unix::fs::symlink( 174 - PathBuf::from("../archive/tsk-1.tsk"), 175 - tsk_dir.join("tasks/tsk-1.tsk"), 176 - ) 177 - .unwrap(); 178 - std::fs::write(tsk_dir.join("archive/tsk-2.tsk"), "archived title\n\n").unwrap(); 179 - std::fs::write(tsk_dir.join("index"), "tsk-1\tactive title\t0\n").unwrap(); 180 - std::fs::write(tsk_dir.join("next"), "3\n").unwrap(); 181 - 182 - sync(&tsk_dir).unwrap(); 183 - 184 - let refs = list_refs(&git_dir, REF_PREFIX).unwrap(); 185 - assert!(refs.contains(&"refs/tsk/tasks/tsk-1".to_string())); 186 - assert!(refs.contains(&"refs/tsk/archive/tsk-2".to_string())); 187 - assert!(refs.contains(&"refs/tsk/meta/index".to_string())); 188 - assert!(refs.contains(&"refs/tsk/meta/next".to_string())); 189 - 190 - // Drop tsk-1 (remove symlink) and re-sync; ref should move to archive. 191 - std::fs::remove_file(tsk_dir.join("tasks/tsk-1.tsk")).unwrap(); 192 - sync(&tsk_dir).unwrap(); 193 - let refs = list_refs(&git_dir, REF_PREFIX).unwrap(); 194 - assert!(!refs.contains(&"refs/tsk/tasks/tsk-1".to_string())); 195 - assert!(refs.contains(&"refs/tsk/archive/tsk-1".to_string())); 196 - } 197 - 198 - #[test] 199 - fn test_sync_noop_without_marker() { 200 - let dir = tempfile::tempdir().unwrap(); 201 - let tsk_dir = dir.path().join(".tsk"); 202 - std::fs::create_dir(&tsk_dir).unwrap(); 203 - // No marker, no git dir — should not error. 204 - sync(&tsk_dir).unwrap(); 205 - } 206 - }
+3 -10
src/main.rs
··· 1 1 mod attrs; 2 + mod backend; 2 3 mod errors; 3 4 mod fzf; 4 - mod git_store; 5 5 mod stack; 6 6 mod task; 7 7 mod util; ··· 325 325 Commands::GitSetup { gitignore } => command_git_setup(dir, gitignore), 326 326 Commands::Reopen { task_id } => command_reopen(dir, task_id), 327 327 }; 328 + let _ = sync_dir; 328 329 let result = var_name; 329 - // Best-effort mirror to git refs if the workspace is git-backed. Failures here 330 - // do not fail the user's command — the on-disk store remains authoritative. 331 - if result.is_ok() 332 - && let Ok(ws) = Workspace::from_path(sync_dir) 333 - && let Err(e) = ws.sync_git() 334 - { 335 - eprintln!("warning: git ref sync failed: {e}"); 336 - } 337 330 match result { 338 331 Ok(_) => exit(0), 339 332 Err(e) => { ··· 464 457 task.title = title.replace(['\n', '\r'], " "); 465 458 task.body = body.to_string(); 466 459 workspace.handle_metadata(&task, pre_links)?; 467 - task.save()?; 460 + workspace.save_task(&task)?; 468 461 } 469 462 Ok(()) 470 463 }
+62 -95
src/stack.rs
··· 1 1 #![allow(dead_code)] 2 2 //! The Task stack. Tasks created with `push` end up at the top here. It is invalid for a task that 3 3 //! has been completed/archived to be on the stack. 4 + //! 5 + //! The stack is persisted as a single blob keyed `index` in the workspace's 6 + //! [`Store`](crate::backend::Store). Each line is `tsk-N\ttitle\ttimestamp`. 4 7 8 + use crate::backend::Store; 5 9 use crate::errors::{Error, Result}; 6 - use crate::util; 7 10 use std::collections::VecDeque; 8 11 use std::collections::vec_deque::Iter; 9 12 use std::fmt::Display; 10 - use std::fs::File; 11 - use std::io::{self, BufRead, BufReader, Seek, Write}; 12 - use std::path::Path; 13 13 use std::str::FromStr; 14 14 use std::time::{Duration, SystemTime, UNIX_EPOCH}; 15 - 16 - use nix::fcntl::{Flock, FlockArg}; 17 15 18 16 use crate::workspace::{Id, Task}; 19 17 20 - const TASKSFOLDER: &str = "tasks"; 21 - const INDEXFILE: &str = "index"; 22 - 18 + #[derive(Clone)] 23 19 pub struct StackItem { 24 20 pub id: Id, 25 21 pub title: String, ··· 28 24 29 25 impl Display for StackItem { 30 26 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 31 - // .trim is used here on the title because there may be a newline in here if we read the 32 - // title from the task file. 33 - write!( 34 - f, 35 - // NOTE: we do NOT print the access time. 36 - "{}\t{}", 37 - self.id, 38 - self.title.trim(), 39 - ) 27 + write!(f, "{}\t{}", self.id, self.title.trim()) 40 28 } 41 29 } 42 30 43 - impl TryFrom<Task> for StackItem { 44 - type Error = Error; 45 - 46 - fn try_from(value: Task) -> std::result::Result<Self, Self::Error> { 47 - let modify_time = value.file.metadata()?.modified()?; 48 - Ok(Self { 31 + impl From<&Task> for StackItem { 32 + fn from(value: &Task) -> Self { 33 + Self { 49 34 id: value.id, 50 - // replace tabs with spaces, they're not valid in StackItem titles. 51 35 title: value.title.replace("\t", " "), 52 - modify_time, 53 - }) 36 + modify_time: SystemTime::now(), 37 + } 54 38 } 55 39 } 56 40 57 - fn eof() -> Error { 58 - Error::Io(io::Error::new( 59 - io::ErrorKind::UnexpectedEof, 60 - "Unexpected end of file", 61 - )) 41 + impl From<Task> for StackItem { 42 + fn from(value: Task) -> Self { 43 + Self::from(&value) 44 + } 62 45 } 63 46 64 47 impl FromStr for StackItem { 65 48 type Err = Error; 66 49 67 50 fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { 68 - let mut parts = s.trim().split("\t"); 51 + let mut parts = s.trim().split('\t'); 69 52 let id: Id = parts 70 53 .next() 71 - .ok_or(Error::Parse( 72 - "Incomplete index line. Missing tsk ID".to_owned(), 73 - ))? 54 + .ok_or(Error::Parse("Incomplete index line. Missing tsk ID".into()))? 74 55 .parse()?; 75 - let title: String = parts 56 + let title = parts 76 57 .next() 77 - .ok_or(Error::Parse( 78 - "Incomplete index line. Missing title.".to_owned(), 79 - ))? 58 + .ok_or(Error::Parse("Incomplete index line. Missing title.".into()))? 80 59 .trim() 81 60 .to_string(); 82 - // parse the timestamp as an integer 83 - let index_epoch: u64 = parts.next().unwrap_or("0").parse()?; 84 - // get a usable system time from the UNIX epoch, defaulting to the UNIX_EPOCH if there's 85 - // any failures. This means that if there's errors, we will always read the title and 86 - // modify_time from the task file. 61 + let index_epoch: u64 = parts.next().unwrap_or("0").parse().unwrap_or(0); 87 62 let modify_time = UNIX_EPOCH 88 63 .checked_add(Duration::from_secs(index_epoch)) 89 64 .unwrap_or(UNIX_EPOCH); 90 - Ok(Self { 91 - id, 92 - title, 93 - modify_time, 94 - }) 95 - } 96 - } 97 - 98 - impl StackItem { 99 - /// Parses a [`StackItem`] from a string. The expected format is a tab-delimited line with the 100 - /// files: task id title 101 - pub fn from_line(workspace_path: &Path, line: String) -> Result<Self> { 102 - let mut stack_item: StackItem = line.parse()?; 103 - 104 - let task = util::flopen( 105 - workspace_path 106 - .join(TASKSFOLDER) 107 - .join(stack_item.id.filename()), 108 - FlockArg::LockExclusive, 109 - )?; 110 - let task_modify_time = task.metadata()?.modified()?; 111 - // if the task file has been modified since we last looked at it, re-read the title and 112 - // metadata 113 - if (task_modify_time - Duration::from_secs(1)) > stack_item.modify_time { 114 - stack_item.title.clear(); 115 - BufReader::new(&*task).read_line(&mut stack_item.title)?; 116 - stack_item.modify_time = task_modify_time; 117 - } 118 - Ok(stack_item) 65 + Ok(Self { id, title, modify_time }) 119 66 } 120 67 } 121 68 122 69 pub struct TaskStack { 123 - /// All items within the stack 124 - all: VecDeque<StackItem>, 125 - file: Flock<File>, 70 + pub all: VecDeque<StackItem>, 126 71 } 127 72 128 73 impl TaskStack { 129 - pub fn from_tskdir(workspace_path: &Path) -> Result<Self> { 130 - let file = util::flopen(workspace_path.join(INDEXFILE), FlockArg::LockExclusive)?; 131 - let index = BufReader::new(&*file).lines(); 74 + pub fn parse(text: &str) -> Result<Self> { 132 75 let mut all = VecDeque::new(); 133 - for line in index { 134 - let stack_item = StackItem::from_line(workspace_path, line?)?; 135 - all.push_back(stack_item); 76 + for line in text.lines() { 77 + if line.trim().is_empty() { 78 + continue; 79 + } 80 + all.push_back(line.parse()?); 136 81 } 137 - Ok(Self { all, file }) 82 + Ok(Self { all }) 138 83 } 139 84 140 - /// Saves the task stack to disk. 141 - pub fn save(mut self) -> Result<()> { 142 - // Clear the file 143 - self.file.seek(std::io::SeekFrom::Start(0))?; 144 - self.file.set_len(0)?; 145 - for item in self.all.iter() { 146 - let time = item.modify_time.duration_since(UNIX_EPOCH)?.as_secs(); 147 - self.file 148 - .write_all(format!("{item}\t{}\n", time).as_bytes())?; 85 + pub fn load(store: &dyn Store) -> Result<Self> { 86 + let raw = store.read("index")?.unwrap_or_default(); 87 + Self::parse(&String::from_utf8_lossy(&raw)) 88 + } 89 + 90 + pub fn serialize(&self) -> String { 91 + let mut s = String::new(); 92 + for item in &self.all { 93 + let ts = item 94 + .modify_time 95 + .duration_since(UNIX_EPOCH) 96 + .map(|d| d.as_secs()) 97 + .unwrap_or(0); 98 + s.push_str(&format!("{item}\t{ts}\n")); 149 99 } 150 - Ok(()) 100 + s 101 + } 102 + 103 + pub fn save(&self, store: &dyn Store) -> Result<()> { 104 + store.write("index", self.serialize().as_bytes()) 151 105 } 152 106 153 107 pub fn push(&mut self, item: StackItem) { ··· 186 140 pub fn get(&self, index: usize) -> Option<&StackItem> { 187 141 self.all.get(index) 188 142 } 143 + 144 + pub fn position(&self, id: Id) -> Option<usize> { 145 + self.all.iter().position(|i| i.id == id) 146 + } 147 + 148 + /// Refresh stack item titles from authoritative task content. 149 + pub fn refresh_titles(&mut self, store: &dyn Store) -> Result<()> { 150 + for item in self.all.iter_mut() { 151 + if let Some((title, _, _)) = crate::backend::read_task(store, item.id)? { 152 + item.title = title.replace('\t', " "); 153 + } 154 + } 155 + Ok(()) 156 + } 189 157 } 190 158 191 159 impl IntoIterator for TaskStack { 192 160 type Item = StackItem; 193 - 194 161 type IntoIter = std::collections::vec_deque::IntoIter<Self::Item>; 195 162 196 163 fn into_iter(self) -> Self::IntoIter {
+2 -20
src/util.rs
··· 1 - use crate::errors::{Error, Result}; 1 + use crate::errors::Result; 2 2 use std::fs; 3 3 use std::os::unix::fs::MetadataExt; 4 - use std::{ 5 - fs::{File, OpenOptions}, 6 - path::{Path, PathBuf}, 7 - }; 8 - 9 - use nix::fcntl::{Flock, FlockArg}; 10 - 11 - pub fn flopen(path: PathBuf, mode: FlockArg) -> Result<Flock<File>> { 12 - let file = OpenOptions::new() 13 - .read(true) 14 - .write(true) 15 - .create(true) 16 - .truncate(false) 17 - .open(path)?; 18 - Flock::lock(file, mode).map_err(|(_, errno)| Error::Lock(errno)) 19 - } 4 + use std::path::{Path, PathBuf}; 20 5 21 6 /// Recursively searches upwards for a directory 22 7 pub fn find_parent_with_dir( 23 8 dir: PathBuf, 24 9 searching_for: impl AsRef<Path>, 25 10 ) -> Result<Option<PathBuf>> { 26 - // Create a new pathbuf to modify, we slap a segment onto the end but then pop it off right 27 - // away 28 11 let mut d = dir.join(&searching_for); 29 12 while d.pop() { 30 13 let check = d.join(&searching_for); 31 14 if check.exists() { 32 15 if fs::metadata(&check)?.dev() != fs::metadata(&dir)?.dev() { 33 - // we hit a filesystem boundary 34 16 return Ok(None); 35 17 } 36 18 return Ok(Some(check));
+331 -684
src/workspace.rs
··· 1 1 #![allow(dead_code)] 2 - use nix::fcntl::{Flock, FlockArg}; 3 - use xattr::FileExt; 2 + //! High-level workspace API. The workspace owns a [`Store`](crate::backend::Store) 3 + //! and exposes typed task / stack / remote operations on top of it. 4 4 5 5 use crate::attrs::Attrs; 6 + use crate::backend::{self, Loc, Store}; 6 7 use crate::errors::{Error, Result}; 7 8 use crate::stack::{StackItem, TaskStack}; 8 9 use crate::task::parse as parse_task; 9 - use crate::{fzf, git_store, util}; 10 + use crate::{fzf, util}; 10 11 use std::collections::{BTreeMap, HashSet, vec_deque}; 11 - use std::ffi::OsString; 12 12 use std::fmt::Display; 13 - use std::fs::{File, remove_file}; 14 - use std::io::{BufRead as _, BufReader, Read, Seek, SeekFrom}; 15 - use std::ops::Deref; 16 - use std::os::unix::fs::symlink; 17 13 use std::path::PathBuf; 18 - use std::process::{Command, Stdio}; 19 14 use std::str::FromStr; 20 - use std::{fs::OpenOptions, io::Write}; 21 15 22 - const INDEXFILE: &str = "index"; 23 - const TITLECACHEFILE: &str = "cache"; 24 - const REMOTESFILE: &str = "remotes"; 25 - const XATTRPREFIX: &str = "user.tsk."; 26 - const BACKREFXATTR: &str = "user.tsk.references"; 27 16 /// A unique identifier for a task. When referenced in text, it is prefixed with `tsk-`. 28 17 #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] 29 18 pub struct Id(pub u32); 30 19 31 20 impl FromStr for Id { 32 21 type Err = Error; 33 - 34 22 fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { 35 23 let upper = s.to_uppercase(); 36 24 let s = upper ··· 54 42 } 55 43 56 44 impl Id { 57 - /// Returns the filename for a task with this id. 58 45 pub fn filename(&self) -> String { 59 46 format!("tsk-{}.tsk", self.0) 60 47 } ··· 72 59 } 73 60 } 74 61 75 - pub struct Workspace { 76 - /// The path to the workspace root, excluding the .tsk directory. This should *contain* the 77 - /// .tsk directory. 78 - pub path: PathBuf, 79 - } 80 - 81 62 #[derive(Clone, Debug, Eq, PartialEq)] 82 63 pub struct Remote { 83 64 pub prefix: String, ··· 90 71 } 91 72 } 92 73 74 + pub struct Workspace { 75 + /// The path to the .tsk marker directory. 76 + pub path: PathBuf, 77 + store: Box<dyn Store>, 78 + } 79 + 93 80 impl Workspace { 94 81 pub fn init(path: PathBuf) -> Result<()> { 95 - // TODO: detect if in a git repo and add .tsk/ to `.git/info/exclude` 96 82 let tsk_dir = path.join(".tsk"); 97 83 if tsk_dir.exists() { 98 84 return Err(Error::AlreadyInitialized); 99 85 } 100 86 std::fs::create_dir(&tsk_dir)?; 101 - // Create the tasks directory 102 - std::fs::create_dir(tsk_dir.join("tasks"))?; 103 - // Create the archive directory 104 - std::fs::create_dir(tsk_dir.join("archive"))?; 105 - let mut next = OpenOptions::new() 106 - .read(true) 107 - .write(true) 108 - .create(true) 109 - .truncate(true) 110 - .open(tsk_dir.join("next"))?; 111 - // initialize the next file with ID 1 112 - next.write_all(b"1\n")?; 113 - // If we're inside a git repository, mark this workspace as git-backed so 114 - // future mutations mirror state into refs/tsk/. Outside of git this is a 115 - // no-op and the file-based store is the only persistence. 116 - if let Some(git_dir) = git_store::detect_git_dir(&path) { 117 - git_store::write_marker(&tsk_dir, &git_dir)?; 87 + // If we're in a git repo, mark this workspace as git-backed and use refs 88 + // for storage. Otherwise fall back to the file backend (tasks live under 89 + // .tsk/). 90 + if let Some(git_dir) = backend::detect_git_dir(&path) { 91 + std::fs::write( 92 + tsk_dir.join(backend::GIT_BACKED_MARKER), 93 + git_dir.to_string_lossy().as_bytes(), 94 + )?; 95 + // GitStore is fully ref-based — no on-disk task data. 96 + } else { 97 + // Pre-create directory tree for the file backend. 98 + std::fs::create_dir(tsk_dir.join("tasks"))?; 99 + std::fs::create_dir(tsk_dir.join("archive"))?; 100 + std::fs::write(tsk_dir.join("next"), b"1\n")?; 118 101 } 119 102 Ok(()) 120 103 } 121 104 122 - /// Mirror the workspace into git refs if this workspace was initialized 123 - /// inside a git repository. No-op otherwise. 124 - pub fn sync_git(&self) -> Result<()> { 125 - git_store::sync(&self.path) 126 - } 127 - 128 105 pub fn from_path(path: PathBuf) -> Result<Self> { 129 106 let tsk_dir = util::find_parent_with_dir(path, ".tsk")?.ok_or(Error::Uninitialized)?; 130 - Ok(Self { path: tsk_dir }) 107 + let store = backend::store_for(&tsk_dir)?; 108 + Ok(Self { path: tsk_dir, store }) 109 + } 110 + 111 + pub fn store(&self) -> &dyn Store { 112 + self.store.as_ref() 113 + } 114 + 115 + pub fn is_git_backed(&self) -> bool { 116 + self.path.join(backend::GIT_BACKED_MARKER).exists() 131 117 } 132 118 133 119 fn resolve(&self, identifier: TaskIdentifier) -> Result<Id> { ··· 138 124 let stack_item = stack.get(r as usize).ok_or(Error::NoTasks)?; 139 125 Ok(stack_item.id) 140 126 } 141 - TaskIdentifier::Find { 142 - exclude_body, 143 - archived, 144 - } => self 127 + TaskIdentifier::Find { exclude_body, archived } => self 145 128 .search(None, !exclude_body, archived)? 146 129 .ok_or(Error::NotSelected), 147 130 } 148 131 } 149 132 150 - /// Increments the `next` counter and returns the previous value. 151 133 pub fn next_id(&self) -> Result<Id> { 152 - let mut file = util::flopen(self.path.join("next"), FlockArg::LockExclusive)?; 153 - let mut buf = String::new(); 154 - file.read_to_string(&mut buf)?; 155 - let id = buf.trim().parse::<u32>()?; 156 - // reset the files contents 157 - file.set_len(0)?; 158 - file.seek(SeekFrom::Start(0))?; 159 - // store the *next* if 160 - file.write_all(format!("{}\n", id + 1).as_bytes())?; 161 - Ok(Id(id)) 134 + backend::next_id(self.store()) 162 135 } 163 136 164 137 pub fn new_task(&self, title: String, body: String) -> Result<Task> { 165 - // WARN: we could improperly increment the id if the task is not written to disk/errors. 166 - // But who cares 167 138 let id = self.next_id()?; 168 - let task_name = format!("tsk-{}.tsk", id.0); 169 - // the task goes in the archive first 170 - let task_path = self.path.join("archive").join(&task_name); 171 - let mut file = util::flopen(task_path.clone(), FlockArg::LockExclusive)?; 172 - file.write_all(format!("{title}\n\n{body}").as_bytes())?; 173 - // create a hardlink to the task dir to mark it as "open" 174 - symlink( 175 - PathBuf::from("../archive").join(&task_name), 176 - self.path.join("tasks").join(task_name), 177 - )?; 139 + backend::write_task(self.store(), id, &title, &body, Loc::Active)?; 140 + Ok(Task { id, title, body, attributes: Default::default() }) 141 + } 142 + 143 + pub fn task(&self, identifier: TaskIdentifier) -> Result<Task> { 144 + let id = self.resolve(identifier)?; 145 + let (title, body, _loc) = backend::read_task(self.store(), id)? 146 + .ok_or_else(|| Error::Parse(format!("Task {id} not found")))?; 147 + let attrs_map = backend::read_attrs(self.store(), id)?; 178 148 Ok(Task { 179 149 id, 180 150 title, 181 151 body, 182 - file, 183 - attributes: Default::default(), 152 + attributes: Attrs::from_written(attrs_map), 184 153 }) 185 154 } 186 155 187 - pub fn task(&self, identifier: TaskIdentifier) -> Result<Task> { 188 - let id = self.resolve(identifier)?; 156 + pub fn save_task(&self, task: &Task) -> Result<()> { 157 + let loc = match backend::task_location(self.store(), task.id)? { 158 + Some(l) => l, 159 + None => Loc::Active, 160 + }; 161 + backend::write_task(self.store(), task.id, &task.title, &task.body, loc)?; 162 + // Persist any modified attrs. 163 + let mut combined: BTreeMap<String, String> = task.attributes.written.clone(); 164 + for (k, v) in task.attributes.updated.iter() { 165 + combined.insert(k.clone(), v.clone()); 166 + } 167 + backend::write_attrs(self.store(), task.id, &combined)?; 168 + // After editing, refresh stack title for this id. 169 + self.update_stack_title(task.id, &task.title)?; 170 + Ok(()) 171 + } 189 172 190 - let file = util::flopen( 191 - self.path.join("tasks").join(format!("tsk-{}.tsk", id.0)), 192 - FlockArg::LockExclusive, 193 - )?; 194 - let mut title = String::new(); 195 - let mut body = String::new(); 196 - let mut reader = BufReader::new(&*file); 197 - reader.read_line(&mut title)?; 198 - reader.read_to_string(&mut body)?; 199 - drop(reader); 200 - let mut read_attributes = BTreeMap::new(); 201 - if let Ok(attrs) = file.list_xattr() { 202 - for attr in attrs { 203 - if let Some((key, value)) = Self::read_xattr(&file, attr) { 204 - read_attributes.insert(key, value); 205 - } 173 + fn update_stack_title(&self, id: Id, title: &str) -> Result<()> { 174 + let mut stack = self.read_stack()?; 175 + let mut changed = false; 176 + for item in stack.all.iter_mut() { 177 + if item.id == id { 178 + item.title = title.replace('\t', " "); 179 + changed = true; 206 180 } 207 181 } 208 - Ok(Task { 209 - id, 210 - file, 211 - title: title.trim().to_string(), 212 - body: body.trim().to_string(), 213 - attributes: Attrs::from_written(read_attributes), 214 - }) 182 + if changed { 183 + stack.save(self.store())?; 184 + } 185 + Ok(()) 215 186 } 216 187 217 188 pub fn handle_metadata(&self, tsk: &Task, pre_links: Option<HashSet<Id>>) -> Result<()> { 218 - // Parse the task and update any backlinks 219 189 if let Some(parsed_task) = parse_task(&tsk.to_string()) { 220 190 let internal_links = parsed_task.intenal_links(); 221 191 for link in &internal_links { ··· 232 202 } 233 203 234 204 fn add_backlink(&self, to: Id, from: Id) -> Result<()> { 235 - let to_task = self.task(TaskIdentifier::Id(to))?; 236 - let (_, current_backlinks_text) = 237 - Self::read_xattr(&to_task.file, BACKREFXATTR.into()).unwrap_or_default(); 238 - let mut backlinks: HashSet<Id> = current_backlinks_text 239 - .split(',') 240 - .filter_map(|s| Id::from_str(s).ok()) 241 - .collect(); 242 - backlinks.insert(from); 243 - Self::set_xattr( 244 - &to_task.file, 245 - BACKREFXATTR, 246 - &itertools::join(backlinks, ","), 247 - ) 205 + let mut links = backend::read_backlinks(self.store(), to)?; 206 + links.insert(from); 207 + backend::write_backlinks(self.store(), to, &links) 248 208 } 249 209 250 210 fn remove_backlink(&self, to: Id, from: Id) -> Result<()> { 251 - let to_task = self.task(TaskIdentifier::Id(to))?; 252 - let (_, current_backlinks_text) = 253 - Self::read_xattr(&to_task.file, BACKREFXATTR.into()).unwrap_or_default(); 254 - let mut backlinks: HashSet<Id> = current_backlinks_text 255 - .split(',') 256 - .filter_map(|s| Id::from_str(s).ok()) 257 - .collect(); 258 - backlinks.remove(&from); 259 - Self::set_xattr( 260 - &to_task.file, 261 - BACKREFXATTR, 262 - &itertools::join(backlinks, ","), 263 - ) 264 - } 265 - 266 - /// Reads an xattr from a file, stripping the prefix for 267 - fn read_xattr<D: Deref<Target = File>>(file: &D, key: OsString) -> Option<(String, String)> { 268 - // this *shouldn't* allocate, but it does O(n) scan the str for UTF-8 correctness 269 - let parsedkey = key.as_os_str().to_str()?.strip_prefix(XATTRPREFIX)?; 270 - let valuebytes = file.get_xattr(&key).ok().flatten()?; 271 - Some((parsedkey.to_string(), String::from_utf8(valuebytes).ok()?)) 272 - } 273 - 274 - fn set_xattr<D: Deref<Target = File>>(file: &D, key: &str, value: &str) -> Result<()> { 275 - let key = if !key.starts_with(XATTRPREFIX) { 276 - format!("{XATTRPREFIX}.{key}") 277 - } else { 278 - key.to_string() 279 - }; 280 - Ok(file.set_xattr(key, value.as_bytes())?) 211 + let mut links = backend::read_backlinks(self.store(), to)?; 212 + links.remove(&from); 213 + backend::write_backlinks(self.store(), to, &links) 281 214 } 282 215 283 216 pub fn read_stack(&self) -> Result<TaskStack> { 284 - TaskStack::from_tskdir(&self.path) 217 + TaskStack::load(self.store()) 285 218 } 286 219 287 220 pub fn push_task(&self, task: Task) -> Result<()> { 288 - let mut stack = TaskStack::from_tskdir(&self.path)?; 289 - stack.push(task.try_into()?); 290 - stack.save()?; 291 - Ok(()) 221 + let mut stack = self.read_stack()?; 222 + stack.push((&task).into()); 223 + stack.save(self.store()) 292 224 } 293 225 294 226 pub fn append_task(&self, task: Task) -> Result<()> { 295 - let mut stack = TaskStack::from_tskdir(&self.path)?; 296 - stack.push_back(task.try_into()?); 297 - stack.save()?; 298 - Ok(()) 227 + let mut stack = self.read_stack()?; 228 + stack.push_back((&task).into()); 229 + stack.save(self.store()) 299 230 } 300 231 301 232 pub fn swap_top(&self) -> Result<()> { 302 - let mut stack = TaskStack::from_tskdir(&self.path)?; 233 + let mut stack = self.read_stack()?; 303 234 stack.swap(); 304 - stack.save()?; 305 - Ok(()) 235 + stack.save(self.store()) 306 236 } 307 237 308 238 pub fn rot(&self) -> Result<()> { 309 - let mut stack = TaskStack::from_tskdir(&self.path)?; 310 - let top = stack.pop(); 311 - let second = stack.pop(); 312 - let third = stack.pop(); 313 - 314 - if top.is_none() || second.is_none() || third.is_none() { 315 - return Ok(()); 239 + let mut stack = self.read_stack()?; 240 + let (a, b, c) = (stack.pop(), stack.pop(), stack.pop()); 241 + if let (Some(a), Some(b), Some(c)) = (a, b, c) { 242 + stack.push(b); 243 + stack.push(a); 244 + stack.push(c); 245 + stack.save(self.store())?; 316 246 } 317 - 318 - // unwrap is ok here because we checked above 319 - stack.push(second.unwrap()); 320 - stack.push(top.unwrap()); 321 - stack.push(third.unwrap()); 322 - stack.save()?; 323 247 Ok(()) 324 248 } 325 249 326 - /// The inverse of tor. Pushes the top item behind the second item, shifting #2 and #3 to #1 327 - /// and #2 respectively. 328 250 pub fn tor(&self) -> Result<()> { 329 - let mut stack = TaskStack::from_tskdir(&self.path)?; 330 - let top = stack.pop(); 331 - let second = stack.pop(); 332 - let third = stack.pop(); 333 - 334 - if top.is_none() || second.is_none() || third.is_none() { 335 - return Ok(()); 251 + let mut stack = self.read_stack()?; 252 + let (a, b, c) = (stack.pop(), stack.pop(), stack.pop()); 253 + if let (Some(a), Some(b), Some(c)) = (a, b, c) { 254 + stack.push(a); 255 + stack.push(c); 256 + stack.push(b); 257 + stack.save(self.store())?; 336 258 } 337 - 338 - stack.push(top.unwrap()); 339 - stack.push(third.unwrap()); 340 - stack.push(second.unwrap()); 341 - stack.save()?; 342 259 Ok(()) 343 260 } 344 261 345 262 pub fn drop(&self, identifier: TaskIdentifier) -> Result<Option<Id>> { 346 263 let id = self.resolve(identifier)?; 347 264 let mut stack = self.read_stack()?; 348 - let index = &stack.iter().map(|i| i.id).position(|i| i == id); 349 - // TODO: remove the softlink in .tsk/tasks 350 - let task = if let Some(index) = index { 351 - let prioritized_task = stack.remove(*index); 352 - stack.save()?; 353 - prioritized_task.map(|t| t.id) 265 + let removed = if let Some(idx) = stack.position(id) { 266 + let item = stack.remove(idx); 267 + stack.save(self.store())?; 268 + item.map(|t| t.id) 354 269 } else { 355 270 None 356 271 }; 357 - remove_file(self.path.join("tasks").join(format!("{id}.tsk")))?; 358 - Ok(task) 272 + // Move the task content to the archive bucket. 273 + if backend::task_location(self.store(), id)? == Some(Loc::Active) { 274 + backend::move_task(self.store(), id, Loc::Archived)?; 275 + } 276 + Ok(removed) 359 277 } 360 278 361 279 pub fn search( ··· 370 288 self.read_stack()? 371 289 }; 372 290 if include_archived { 373 - let archive_dir = self.path.join("archive"); 374 - let mut all_tasks: Vec<SearchTask> = stack 375 - .into_iter() 376 - .filter_map(|item| { 377 - self.task(TaskIdentifier::Id(item.id)) 378 - .ok() 379 - .map(|t| t.bare()) 380 - }) 381 - .collect(); 382 - let mut indexed_ids: HashSet<Id> = HashSet::new(); 383 - for t in &all_tasks { 384 - indexed_ids.insert(t.id); 291 + let mut all_tasks: Vec<SearchTask> = Vec::new(); 292 + let mut seen: HashSet<Id> = HashSet::new(); 293 + for item in stack.iter() { 294 + if let Ok(t) = self.task(TaskIdentifier::Id(item.id)) { 295 + seen.insert(t.id); 296 + all_tasks.push(t.bare()); 297 + } 385 298 } 386 - if archive_dir.exists() { 387 - for entry in std::fs::read_dir(&archive_dir)? { 388 - let entry = entry?; 389 - let path = entry.path(); 390 - if !path.is_file() { 391 - continue; 392 - } 393 - let filename = entry.file_name(); 394 - let filename_str = filename.to_string_lossy(); 395 - if let Some(id_str) = filename_str 396 - .strip_prefix("tsk-") 397 - .and_then(|s| s.strip_suffix(".tsk")) 398 - { 399 - if let Ok(id_num) = id_str.parse::<u32>() { 400 - let id = Id(id_num); 401 - if !indexed_ids.contains(&id) { 402 - if let Ok(contents) = std::fs::read_to_string(&path) { 403 - let mut lines = contents.splitn(2, '\n'); 404 - let title = lines.next().unwrap_or("").trim().to_string(); 405 - let body = lines.next().unwrap_or("").trim().to_string(); 406 - all_tasks.push(SearchTask { id, title, body }); 407 - } 408 - } 409 - } 410 - } 299 + for id in backend::list_archive(self.store())? { 300 + if seen.contains(&id) { 301 + continue; 302 + } 303 + if let Some((title, body, _)) = backend::read_task(self.store(), id)? { 304 + all_tasks.push(SearchTask { id, title, body }); 411 305 } 412 306 } 413 307 if search_body { ··· 425 319 ], 426 320 )?) 427 321 } else { 428 - Ok(fzf::select::<_, Id, _>( 429 - all_tasks, 430 - ["--delimiter=\t", "--accept-nth=1"], 431 - )?) 322 + Ok(fzf::select::<_, Id, _>(all_tasks, ["--delimiter=\t", "--accept-nth=1"])?) 432 323 } 433 324 } else if search_body { 434 - let loader = LazyTaskLoader { 435 - files: stack.into_iter(), 436 - workspace: self, 437 - }; 325 + let loader = LazyTaskLoader { items: stack.into_iter(), workspace: self }; 438 326 Ok(fzf::select::<_, Id, _>( 439 327 loader, 440 328 [ ··· 449 337 ], 450 338 )?) 451 339 } else { 452 - Ok(fzf::select::<_, Id, _>( 453 - stack, 454 - ["--delimiter=\t", "--accept-nth=1"], 455 - )?) 340 + Ok(fzf::select::<_, Id, _>(stack, ["--delimiter=\t", "--accept-nth=1"])?) 456 341 } 457 342 } 458 343 459 344 pub fn prioritize(&self, identifier: TaskIdentifier) -> Result<()> { 460 345 let id = self.resolve(identifier)?; 461 346 let mut stack = self.read_stack()?; 462 - let index = &stack.iter().map(|i| i.id).position(|i| i == id); 463 - if let Some(index) = index { 464 - let prioritized_task = stack.remove(*index); 465 - // unwrap here is safe because we just searched for the index and know it exists 466 - stack.push(prioritized_task.unwrap()); 467 - stack.save()?; 347 + if let Some(idx) = stack.position(id) { 348 + let task = stack.remove(idx).unwrap(); 349 + stack.push(task); 350 + stack.save(self.store())?; 468 351 } 469 352 Ok(()) 470 353 } ··· 472 355 pub fn deprioritize(&self, identifier: TaskIdentifier) -> Result<()> { 473 356 let id = self.resolve(identifier)?; 474 357 let mut stack = self.read_stack()?; 475 - let index = &stack.iter().map(|i| i.id).position(|i| i == id); 476 - if let Some(index) = index { 477 - let deprioritized_task = stack.remove(*index); 478 - // unwrap here is safe because we just searched for the index and know it exists 479 - stack.push_back(deprioritized_task.unwrap()); 480 - stack.save()?; 358 + if let Some(idx) = stack.position(id) { 359 + let task = stack.remove(idx).unwrap(); 360 + stack.push_back(task); 361 + stack.save(self.store())?; 481 362 } 482 363 Ok(()) 483 364 } 484 365 366 + /// Remove "active" task entries that aren't in the index. 485 367 pub fn clean(&self) -> Result<()> { 486 368 let stack = self.read_stack()?; 487 - let indexed_ids: HashSet<Id> = stack.iter().map(|item| item.id).collect(); 488 - 489 - let tasks_dir = self.path.join("tasks"); 490 - if !tasks_dir.exists() { 491 - return Ok(()); 492 - } 493 - 494 - for entry in std::fs::read_dir(&tasks_dir)? { 495 - let entry = entry?; 496 - let path = entry.path(); 497 - if !path.is_file() { 498 - continue; 499 - } 500 - let filename = entry.file_name(); 501 - let filename_str = filename.to_string_lossy(); 502 - if let Some(id_str) = filename_str 503 - .strip_prefix("tsk-") 504 - .and_then(|s| s.strip_suffix(".tsk")) 505 - { 506 - if let Ok(id_num) = id_str.parse::<u32>() { 507 - let id = Id(id_num); 508 - if !indexed_ids.contains(&id) { 509 - remove_file(&path)?; 510 - eprintln!("Removed orphaned task: {id}"); 511 - } 512 - } 369 + let indexed: HashSet<Id> = stack.iter().map(|i| i.id).collect(); 370 + for id in backend::list_active(self.store())? { 371 + if !indexed.contains(&id) { 372 + // Move orphan to archive rather than delete, to avoid data loss. 373 + backend::move_task(self.store(), id, Loc::Archived)?; 374 + eprintln!("Removed orphaned task: {id}"); 513 375 } 514 376 } 515 377 Ok(()) 516 378 } 517 379 518 380 pub fn read_remotes(&self) -> Result<Vec<Remote>> { 519 - let remotes_path = self.path.join(REMOTESFILE); 520 - if !remotes_path.exists() { 521 - return Ok(Vec::new()); 522 - } 523 - let file = util::flopen(remotes_path, FlockArg::LockShared)?; 524 - let reader = BufReader::new(&*file); 525 - let mut remotes = Vec::new(); 526 - for line in reader.lines() { 527 - let line = line?; 528 - let line = line.trim(); 529 - if line.is_empty() || line.starts_with('#') { 530 - continue; 531 - } 532 - if let Some((prefix, path)) = line.split_once('\t') { 533 - remotes.push(Remote { 534 - prefix: prefix.trim().to_string(), 535 - path: PathBuf::from(path.trim()), 536 - }); 537 - } 538 - } 539 - Ok(remotes) 381 + backend::read_remotes(self.store()) 540 382 } 541 383 542 384 pub fn add_remote(&self, prefix: &str, path: &str) -> Result<()> { ··· 548 390 prefix: prefix.to_string(), 549 391 path: PathBuf::from(path), 550 392 }); 551 - self.write_remotes(&remotes) 393 + backend::write_remotes(self.store(), &remotes) 552 394 } 553 395 554 396 pub fn remove_remote(&self, prefix: &str) -> Result<()> { ··· 558 400 if new_remotes.len() == len { 559 401 return Err(Error::Parse(format!("Remote '{prefix}' not found"))); 560 402 } 561 - self.write_remotes(&new_remotes) 562 - } 563 - 564 - fn write_remotes(&self, remotes: &[Remote]) -> Result<()> { 565 - let remotes_path = self.path.join(REMOTESFILE); 566 - let mut file = OpenOptions::new() 567 - .write(true) 568 - .create(true) 569 - .truncate(true) 570 - .open(remotes_path)?; 571 - for remote in remotes { 572 - writeln!(file, "{}\t{}", remote.prefix, remote.path.display())?; 573 - } 574 - Ok(()) 403 + backend::write_remotes(self.store(), &new_remotes) 575 404 } 576 405 577 406 pub fn resolve_foreign_link(&self, prefix: &str, id: u32) -> Result<Option<Task>> { ··· 587 416 588 417 pub fn reopen(&self, identifier: TaskIdentifier) -> Result<Id> { 589 418 let id = self.resolve(identifier)?; 590 - let archive_path = self.path.join("archive").join(id.filename()); 591 - if !archive_path.exists() { 592 - return Err(Error::Parse(format!("Task {id} not found in archive"))); 593 - } 594 - let tasks_path = self.path.join("tasks").join(id.filename()); 595 - if tasks_path.exists() { 596 - return Err(Error::Parse(format!("Task {id} is already open"))); 419 + match backend::task_location(self.store(), id)? { 420 + None => return Err(Error::Parse(format!("Task {id} not found in archive"))), 421 + Some(Loc::Active) => return Err(Error::Parse(format!("Task {id} is already open"))), 422 + Some(Loc::Archived) => {} 597 423 } 598 - symlink(PathBuf::from("../archive").join(id.filename()), &tasks_path)?; 424 + backend::move_task(self.store(), id, Loc::Active)?; 425 + let (title, _, _) = backend::read_task(self.store(), id)? 426 + .ok_or_else(|| Error::Parse(format!("Task {id} content missing after move")))?; 599 427 let mut stack = self.read_stack()?; 600 - let title = std::fs::read_to_string(&archive_path)? 601 - .lines() 602 - .next() 603 - .unwrap_or("") 604 - .trim() 605 - .to_string(); 606 - let modify_time = std::fs::metadata(&archive_path)?.modified()?; 607 - let stack_item = StackItem { 428 + stack.push(StackItem { 608 429 id, 609 430 title: title.replace('\t', " "), 610 - modify_time, 611 - }; 612 - stack.push(stack_item); 613 - stack.save()?; 431 + modify_time: std::time::SystemTime::now(), 432 + }); 433 + stack.save(self.store())?; 614 434 Ok(id) 615 435 } 616 436 } ··· 619 439 pub id: Id, 620 440 pub title: String, 621 441 pub body: String, 622 - pub file: Flock<File>, 623 442 pub attributes: Attrs, 624 443 } 625 444 ··· 630 449 } 631 450 632 451 impl Task { 633 - /// Consumes a task and saves it to disk. 634 - pub fn save(mut self) -> Result<()> { 635 - self.file.set_len(0)?; 636 - self.file.seek(SeekFrom::Start(0))?; 637 - self.file.write_all(self.title.trim().as_bytes())?; 638 - self.file.write_all(b"\n\n")?; 639 - self.file.write_all(self.body.trim().as_bytes())?; 640 - Ok(()) 641 - } 642 - 643 - /// Returns a [`SearchTas`] which is plain task data with no file or attrs 644 452 fn bare(self) -> SearchTask { 645 - SearchTask { 646 - id: self.id, 647 - title: self.title, 648 - body: self.body, 649 - } 453 + SearchTask { id: self.id, title: self.title, body: self.body } 650 454 } 651 455 } 652 456 653 - /// A task container without a file handle 654 457 pub struct SearchTask { 655 458 pub id: Id, 656 459 pub title: String, ··· 668 471 } 669 472 670 473 struct LazyTaskLoader<'a> { 671 - files: vec_deque::IntoIter<StackItem>, 474 + items: vec_deque::IntoIter<StackItem>, 672 475 workspace: &'a Workspace, 673 476 } 674 477 675 478 impl Iterator for LazyTaskLoader<'_> { 676 479 type Item = SearchTask; 677 - 678 480 fn next(&mut self) -> Option<Self::Item> { 679 - let stack_item = self.files.next()?; 680 - let task = self 681 - .workspace 682 - .task(TaskIdentifier::Id(stack_item.id)) 683 - .ok()?; 481 + let item = self.items.next()?; 482 + let task = self.workspace.task(TaskIdentifier::Id(item.id)).ok()?; 684 483 Some(task.bare()) 685 484 } 686 485 } 687 486 688 - fn select_task(input: impl IntoIterator<Item = SearchTask>) -> Result<Option<Id>> { 689 - let mut child = Command::new("cat") 690 - .stderr(Stdio::inherit()) 691 - .stdin(Stdio::piped()) 692 - .stdout(Stdio::piped()) 693 - .spawn()?; 694 - let child_in = child.stdin.as_mut().unwrap(); 695 - for item in input.into_iter() { 696 - writeln!(child_in, "{item}\0")?; 697 - } 698 - let output = child.wait_with_output()?; 699 - if output.stdout.is_empty() { 700 - Ok(None) 701 - } else { 702 - Ok(Some(String::from_utf8(output.stdout)?.parse()?)) 703 - } 704 - } 705 - 706 487 #[cfg(test)] 707 488 mod test { 708 489 use super::*; 709 - use std::fs; 710 490 711 - fn setup_test_workspace() -> (tempfile::TempDir, Workspace) { 712 - let dir = tempfile::tempdir().unwrap(); 713 - let path = dir.path().to_path_buf(); 714 - Workspace::init(path.clone()).unwrap(); 715 - let workspace = Workspace::from_path(path.clone()).unwrap(); 716 - (dir, workspace) 491 + fn run_git_init(dir: &std::path::Path) { 492 + let s = std::process::Command::new("git") 493 + .args(["init", "-q"]) 494 + .current_dir(dir) 495 + .status() 496 + .unwrap(); 497 + assert!(s.success()); 717 498 } 718 499 719 - #[test] 720 - fn test_bare_task_display() { 721 - let task = SearchTask { 722 - id: Id(123), 723 - title: "Hello, world".to_string(), 724 - body: "The body of the task.\nAnother line is here.".to_string(), 725 - }; 726 - assert_eq!( 727 - "tsk-123\tHello, world\n\nThe body of the task.\nAnother line is here.", 728 - task.to_string() 729 - ); 500 + /// Create both a file-backed and a git-backed workspace for the same test. 501 + fn setup_dual() -> (tempfile::TempDir, Workspace, Workspace) { 502 + let dir = tempfile::tempdir().unwrap(); 503 + let file_root = dir.path().join("file"); 504 + let git_root = dir.path().join("git"); 505 + std::fs::create_dir_all(&file_root).unwrap(); 506 + std::fs::create_dir_all(&git_root).unwrap(); 507 + run_git_init(&git_root); 508 + Workspace::init(file_root.clone()).unwrap(); 509 + Workspace::init(git_root.clone()).unwrap(); 510 + let f = Workspace::from_path(file_root).unwrap(); 511 + let g = Workspace::from_path(git_root).unwrap(); 512 + assert!(!f.is_git_backed(), "file workspace should not be git-backed"); 513 + assert!(g.is_git_backed(), "git workspace should be git-backed"); 514 + (dir, f, g) 730 515 } 731 516 732 - #[test] 733 - fn test_task_display() { 734 - let task = Task { 735 - id: Id(123), 736 - title: "Hello, world".to_string(), 737 - body: "The body of the task.".to_string(), 738 - file: util::flopen("/dev/null".into(), FlockArg::LockShared).unwrap(), 739 - attributes: Default::default(), 740 - }; 741 - assert_eq!("Hello, world\n\nThe body of the task.", task.to_string()); 742 - } 517 + fn run_full_lifecycle(ws: &Workspace) { 518 + // Push two tasks, drop one, verify state. 519 + let t1 = ws.new_task("First".to_string(), "body one".to_string()).unwrap(); 520 + let id1 = t1.id; 521 + ws.push_task(t1).unwrap(); 522 + let t2 = ws.new_task("Second".to_string(), "body two".to_string()).unwrap(); 523 + let id2 = t2.id; 524 + ws.push_task(t2).unwrap(); 743 525 744 - #[test] 745 - fn test_clean_removes_orphaned_tasks() { 746 - let (_dir, workspace) = setup_test_workspace(); 526 + let stack = ws.read_stack().unwrap(); 527 + assert_eq!(stack.iter().count(), 2); 528 + assert_eq!(stack.iter().next().unwrap().id, id2, "newest on top"); 747 529 748 - { 749 - let ws = Workspace::from_path(workspace.path.clone()).unwrap(); 750 - let task1 = ws 751 - .new_task("Task one".to_string(), "body1".to_string()) 752 - .unwrap(); 753 - ws.push_task(task1).unwrap(); 530 + // Read back the task content. 531 + let read = ws.task(TaskIdentifier::Id(id1)).unwrap(); 532 + assert_eq!(read.title, "First"); 533 + assert_eq!(read.body, "body one"); 754 534 755 - let task2 = ws 756 - .new_task("Task two".to_string(), "body2".to_string()) 757 - .unwrap(); 758 - ws.push_task(task2).unwrap(); 759 - } 535 + // Drop top. 536 + ws.drop(TaskIdentifier::Id(id2)).unwrap(); 537 + let stack = ws.read_stack().unwrap(); 538 + assert_eq!(stack.iter().count(), 1); 539 + assert_eq!(stack.iter().next().unwrap().id, id1); 760 540 761 - let stack_count = { 762 - let stack = workspace.read_stack().unwrap(); 763 - stack.iter().count() 764 - }; 765 - assert_eq!(stack_count, 2); 766 - 767 - let tasks_dir = workspace.path.join("tasks"); 768 - let task_files: Vec<_> = fs::read_dir(&tasks_dir) 769 - .unwrap() 770 - .filter(|e| e.as_ref().unwrap().path().is_file()) 771 - .collect(); 772 - assert_eq!(task_files.len(), 2); 541 + // Reopen. 542 + ws.reopen(TaskIdentifier::Id(id2)).unwrap(); 543 + let stack = ws.read_stack().unwrap(); 544 + assert_eq!(stack.iter().count(), 2); 773 545 774 - // Manually create an orphaned task file (not in the index) to simulate corruption 775 - let orphan_path = tasks_dir.join("tsk-999.tsk"); 776 - fs::write(&orphan_path, "orphan\n\nbody").unwrap(); 546 + // Reopen non-archived fails. 547 + assert!(ws.reopen(TaskIdentifier::Id(id1)).is_err()); 777 548 778 - let task_files_with_orphan: Vec<_> = fs::read_dir(&tasks_dir) 779 - .unwrap() 780 - .filter(|e| e.as_ref().unwrap().path().is_file()) 781 - .collect(); 782 - assert_eq!( 783 - task_files_with_orphan.len(), 784 - 3, 785 - "orphaned symlink should exist" 786 - ); 549 + // Edit and save. 550 + let mut t = ws.task(TaskIdentifier::Id(id1)).unwrap(); 551 + t.title = "First (edited)".into(); 552 + t.body = "new body".into(); 553 + ws.save_task(&t).unwrap(); 554 + let read = ws.task(TaskIdentifier::Id(id1)).unwrap(); 555 + assert_eq!(read.title, "First (edited)"); 556 + let stack = ws.read_stack().unwrap(); 557 + let item = stack.iter().find(|i| i.id == id1).unwrap(); 558 + assert_eq!(item.title, "First (edited)", "stack title should refresh on save"); 787 559 788 - workspace.clean().unwrap(); 560 + // Remotes. 561 + ws.add_remote("up", "/path").unwrap(); 562 + let remotes = ws.read_remotes().unwrap(); 563 + assert_eq!(remotes.len(), 1); 564 + ws.remove_remote("up").unwrap(); 565 + assert!(ws.read_remotes().unwrap().is_empty()); 789 566 790 - let task_files_cleaned: Vec<_> = fs::read_dir(&tasks_dir) 791 - .unwrap() 792 - .filter(|e| e.as_ref().unwrap().path().is_file()) 793 - .collect(); 794 - assert_eq!( 795 - task_files_cleaned.len(), 796 - 2, 797 - "clean should remove orphaned task" 798 - ); 567 + // Backlinks. 568 + ws.handle_metadata( 569 + &Task { 570 + id: id1, 571 + title: "x".into(), 572 + body: format!("see [[{id2}]]"), 573 + attributes: Default::default(), 574 + }, 575 + None, 576 + ) 577 + .unwrap(); 578 + let bl = backend::read_backlinks(ws.store(), id2).unwrap(); 579 + assert!(bl.contains(&id1)); 799 580 } 800 581 801 582 #[test] 802 - fn test_clean_does_nothing_when_no_orphans() { 803 - let (_dir, workspace) = setup_test_workspace(); 804 - let ws = Workspace::from_path(workspace.path.clone()).unwrap(); 805 - 806 - let task = ws 807 - .new_task("Only task".to_string(), "body".to_string()) 808 - .unwrap(); 809 - ws.push_task(task).unwrap(); 810 - 811 - workspace.clean().unwrap(); 812 - 813 - let tasks_dir = workspace.path.join("tasks"); 814 - let task_files: Vec<_> = fs::read_dir(&tasks_dir) 815 - .unwrap() 816 - .filter(|e| e.as_ref().unwrap().path().is_file()) 817 - .collect(); 818 - assert_eq!(task_files.len(), 1); 583 + fn test_full_lifecycle_file_backend() { 584 + let dir = tempfile::tempdir().unwrap(); 585 + Workspace::init(dir.path().to_path_buf()).unwrap(); 586 + let ws = Workspace::from_path(dir.path().to_path_buf()).unwrap(); 587 + assert!(!ws.is_git_backed()); 588 + run_full_lifecycle(&ws); 819 589 } 820 590 821 591 #[test] 822 - fn test_remote_add_and_list() { 823 - let (_dir, workspace) = setup_test_workspace(); 824 - 825 - let remotes = workspace.read_remotes().unwrap(); 826 - assert!(remotes.is_empty()); 827 - 828 - workspace.add_remote("jira", "/path/to/jira").unwrap(); 829 - 830 - let remotes = workspace.read_remotes().unwrap(); 831 - assert_eq!(remotes.len(), 1); 832 - assert_eq!(remotes[0].prefix, "jira"); 833 - assert_eq!(remotes[0].path, PathBuf::from("/path/to/jira")); 834 - 835 - workspace.add_remote("gl", "/path/to/gitlab").unwrap(); 836 - 837 - let remotes = workspace.read_remotes().unwrap(); 838 - assert_eq!(remotes.len(), 2); 592 + fn test_full_lifecycle_git_backend() { 593 + let dir = tempfile::tempdir().unwrap(); 594 + run_git_init(dir.path()); 595 + Workspace::init(dir.path().to_path_buf()).unwrap(); 596 + let ws = Workspace::from_path(dir.path().to_path_buf()).unwrap(); 597 + assert!(ws.is_git_backed()); 598 + run_full_lifecycle(&ws); 839 599 } 840 600 841 601 #[test] 842 - fn test_remote_add_duplicate_fails() { 843 - let (_dir, workspace) = setup_test_workspace(); 844 - 845 - workspace.add_remote("jira", "/path/to/jira").unwrap(); 846 - 847 - let result = workspace.add_remote("jira", "/other/path"); 848 - assert!(result.is_err()); 602 + fn test_init_picks_backend_correctly() { 603 + let (_d, f, g) = setup_dual(); 604 + assert!(!f.is_git_backed()); 605 + assert!(g.is_git_backed()); 849 606 } 850 607 851 608 #[test] 852 - fn test_remote_remove() { 853 - let (_dir, workspace) = setup_test_workspace(); 609 + fn test_clean_archives_orphaned_tasks() { 610 + let (_d, file, git) = setup_dual(); 611 + for ws in [&file, &git] { 612 + // Push a task, then directly orphan it in the store. 613 + let t = ws.new_task("Indexed".into(), "ok".into()).unwrap(); 614 + ws.push_task(t).unwrap(); 615 + // Write an unindexed task directly to the store. 616 + backend::write_task(ws.store(), Id(999), "orphan", "", Loc::Active).unwrap(); 854 617 855 - workspace.add_remote("jira", "/path/to/jira").unwrap(); 856 - workspace.add_remote("gl", "/path/to/gl").unwrap(); 857 - 858 - workspace.remove_remote("jira").unwrap(); 859 - 860 - let remotes = workspace.read_remotes().unwrap(); 861 - assert_eq!(remotes.len(), 1); 862 - assert_eq!(remotes[0].prefix, "gl"); 863 - } 864 - 865 - #[test] 866 - fn test_remote_remove_nonexistent_fails() { 867 - let (_dir, workspace) = setup_test_workspace(); 868 - 869 - let result = workspace.remove_remote("nonexistent"); 870 - assert!(result.is_err()); 618 + let active_before = backend::list_active(ws.store()).unwrap(); 619 + assert!(active_before.contains(&Id(999))); 620 + ws.clean().unwrap(); 621 + let active_after = backend::list_active(ws.store()).unwrap(); 622 + assert!(!active_after.contains(&Id(999))); 623 + let archived = backend::list_archive(ws.store()).unwrap(); 624 + assert!(archived.contains(&Id(999))); 625 + } 871 626 } 872 627 873 628 #[test] 874 629 fn test_remote_persistence() { 875 - let (_dir, workspace) = setup_test_workspace(); 876 - 877 - workspace.add_remote("jira", "/path/to/jira").unwrap(); 878 - workspace.add_remote("gl", "/path/to/gl").unwrap(); 879 - 880 - let workspace2 = Workspace::from_path(workspace.path.clone()).unwrap(); 881 - let remotes = workspace2.read_remotes().unwrap(); 882 - assert_eq!(remotes.len(), 2); 883 - assert_eq!(remotes[0].prefix, "jira"); 884 - assert_eq!(remotes[1].prefix, "gl"); 630 + let (_d, file, git) = setup_dual(); 631 + for ws in [&file, &git] { 632 + ws.add_remote("a", "/x").unwrap(); 633 + ws.add_remote("b", "/y").unwrap(); 634 + let ws2 = Workspace::from_path(ws.path.clone()).unwrap(); 635 + assert_eq!(ws2.read_remotes().unwrap().len(), 2); 636 + assert!(ws.add_remote("a", "/z").is_err()); 637 + assert!(ws.remove_remote("nope").is_err()); 638 + ws.remove_remote("a").unwrap(); 639 + assert_eq!(ws.read_remotes().unwrap().len(), 1); 640 + } 885 641 } 886 642 887 643 #[test] 888 - fn test_remote_display() { 889 - let remote = Remote { 890 - prefix: "jira".to_string(), 891 - path: PathBuf::from("/path/to/jira"), 892 - }; 893 - assert_eq!("jira\t/path/to/jira", remote.to_string()); 644 + fn test_search_archived_round_trip() { 645 + let (_d, file, git) = setup_dual(); 646 + for ws in [&file, &git] { 647 + let t = ws.new_task("Archived".into(), "a".into()).unwrap(); 648 + let id = t.id; 649 + ws.push_task(t).unwrap(); 650 + ws.drop(TaskIdentifier::Id(id)).unwrap(); 651 + assert_eq!(backend::task_location(ws.store(), id).unwrap(), Some(Loc::Archived)); 652 + } 894 653 } 895 654 896 655 #[test] 897 - fn test_git_setup_exclude() { 898 - let (_dir, workspace) = setup_test_workspace(); 899 - let git_dir = workspace.path.join(".git"); 900 - let info_dir = git_dir.join("info"); 901 - fs::create_dir_all(&info_dir).unwrap(); 902 - 903 - let exclude_path = info_dir.join("exclude"); 904 - assert!(!exclude_path.exists()); 905 - 906 - let content = std::fs::read_to_string(&exclude_path).unwrap_or_default(); 907 - assert!(!content.contains(".tsk/")); 908 - 909 - // Simulate git_setup logic 910 - let mut file = OpenOptions::new() 911 - .append(true) 912 - .create(true) 913 - .open(&exclude_path) 914 - .unwrap(); 915 - writeln!(file, ".tsk/").unwrap(); 916 - 917 - let content = std::fs::read_to_string(&exclude_path).unwrap(); 918 - assert!(content.contains(".tsk/")); 656 + fn test_rot_tor_swap() { 657 + let (_d, file, git) = setup_dual(); 658 + for ws in [&file, &git] { 659 + let mut ids = Vec::new(); 660 + for n in 0..3 { 661 + let t = ws.new_task(format!("t{n}"), "".into()).unwrap(); 662 + ids.push(t.id); 663 + ws.push_task(t).unwrap(); 664 + } 665 + // Stack now: [ids[2], ids[1], ids[0]] 666 + ws.swap_top().unwrap(); 667 + let s = ws.read_stack().unwrap(); 668 + let order: Vec<_> = s.iter().map(|i| i.id).collect(); 669 + assert_eq!(order, vec![ids[1], ids[2], ids[0]]); 670 + ws.swap_top().unwrap(); // back 671 + ws.rot().unwrap(); 672 + ws.tor().unwrap(); 673 + let s = ws.read_stack().unwrap(); 674 + let order: Vec<_> = s.iter().map(|i| i.id).collect(); 675 + assert_eq!(order, vec![ids[2], ids[1], ids[0]], "rot then tor is identity"); 676 + } 919 677 } 920 678 921 679 #[test] 922 - fn test_git_setup_no_duplicate() { 923 - let (_dir, workspace) = setup_test_workspace(); 924 - let git_dir = workspace.path.join(".git"); 925 - let info_dir = git_dir.join("info"); 926 - fs::create_dir_all(&info_dir).unwrap(); 927 - 928 - let exclude_path = info_dir.join("exclude"); 929 - fs::write(&exclude_path, ".tsk/\nother/").unwrap(); 930 - 931 - let content = std::fs::read_to_string(&exclude_path).unwrap(); 932 - let already_present = content.lines().any(|line| line.trim() == ".tsk/"); 933 - assert!(already_present); 680 + fn test_remote_display() { 681 + let r = Remote { prefix: "jira".into(), path: PathBuf::from("/p") }; 682 + assert_eq!(r.to_string(), "jira\t/p"); 934 683 } 935 684 936 685 #[test] 937 - fn test_reopen_archived_task() { 938 - let (_dir, workspace) = setup_test_workspace(); 939 - 940 - let task_id = { 941 - let ws = Workspace::from_path(workspace.path.clone()).unwrap(); 942 - let task = ws 943 - .new_task("Task to reopen".to_string(), "body".to_string()) 944 - .unwrap(); 945 - let id = task.id; 946 - ws.push_task(task).unwrap(); 947 - id 948 - }; 949 - 950 - workspace.drop(TaskIdentifier::Id(task_id)).unwrap(); 951 - 952 - { 953 - let stack_after_drop = workspace.read_stack().unwrap(); 954 - assert_eq!(stack_after_drop.iter().count(), 0); 955 - } 956 - 957 - let tasks_dir = workspace.path.join("tasks"); 958 - let task_link = tasks_dir.join(task_id.filename()); 959 - assert!(!task_link.exists(), "symlink should be removed on drop"); 960 - 961 - workspace.reopen(TaskIdentifier::Id(task_id)).unwrap(); 962 - 963 - { 964 - let stack_after_reopen = workspace.read_stack().unwrap(); 965 - assert_eq!(stack_after_reopen.iter().count(), 1); 966 - } 967 - assert!(task_link.exists(), "symlink should be recreated on reopen"); 686 + fn test_bare_task_display() { 687 + let t = SearchTask { id: Id(1), title: "x".into(), body: "y".into() }; 688 + assert_eq!(t.to_string(), "tsk-1\tx\n\ny"); 968 689 } 969 690 970 691 #[test] 971 - fn test_reopen_nonexistent_task_fails() { 972 - let (_dir, workspace) = setup_test_workspace(); 973 - 974 - let result = workspace.reopen(TaskIdentifier::Id(Id(999))); 975 - assert!(result.is_err()); 692 + fn test_task_display() { 693 + let t = Task { id: Id(1), title: "x".into(), body: "y".into(), attributes: Default::default() }; 694 + assert_eq!(t.to_string(), "x\n\ny"); 976 695 } 977 696 978 697 #[test] 979 - fn test_reopen_already_open_task_fails() { 980 - let (_dir, workspace) = setup_test_workspace(); 981 - 982 - let task_id = { 983 - let ws = Workspace::from_path(workspace.path.clone()).unwrap(); 984 - let task = ws 985 - .new_task("Open task".to_string(), "body".to_string()) 986 - .unwrap(); 987 - let id = task.id; 988 - ws.push_task(task).unwrap(); 989 - id 990 - }; 991 - 992 - let result = workspace.reopen(TaskIdentifier::Id(task_id)); 993 - assert!(result.is_err()); 994 - } 995 - 996 - #[test] 997 - fn test_search_archived_includes_dropped_tasks() { 998 - let (_dir, workspace) = setup_test_workspace(); 999 - 1000 - let task_id = { 1001 - let ws = Workspace::from_path(workspace.path.clone()).unwrap(); 1002 - let task = ws 1003 - .new_task("Archived task".to_string(), "archived body".to_string()) 1004 - .unwrap(); 1005 - let id = task.id; 1006 - ws.push_task(task).unwrap(); 1007 - id 1008 - }; 1009 - 1010 - let stack_count = { 1011 - let stack = workspace.read_stack().unwrap(); 1012 - stack.iter().count() 1013 - }; 1014 - assert_eq!(stack_count, 1); 1015 - 1016 - workspace.drop(TaskIdentifier::Id(task_id)).unwrap(); 1017 - 1018 - let stack_after_drop = { 1019 - let stack = workspace.read_stack().unwrap(); 1020 - stack.iter().count() 1021 - }; 1022 - assert_eq!(stack_after_drop, 0); 1023 - 1024 - let archive_dir = workspace.path.join("archive"); 1025 - assert!(archive_dir.join(format!("tsk-{}.tsk", task_id.0)).exists()); 1026 - 1027 - let archive_tasks_dir = workspace.path.join("tasks"); 1028 - assert!( 1029 - !archive_tasks_dir 1030 - .join(format!("tsk-{}.tsk", task_id.0)) 1031 - .exists() 1032 - ); 1033 - 1034 - let archived_tasks: Vec<SearchTask> = std::fs::read_dir(&archive_dir) 1035 - .unwrap() 1036 - .filter_map(|entry| { 1037 - let entry = entry.ok()?; 1038 - let path = entry.path(); 1039 - if !path.is_file() { 1040 - return None; 1041 - } 1042 - let filename = entry.file_name(); 1043 - let filename_str = filename.to_string_lossy(); 1044 - let id_str = filename_str 1045 - .strip_prefix("tsk-") 1046 - .and_then(|s| s.strip_suffix(".tsk"))?; 1047 - let id_num = id_str.parse::<u32>().ok()?; 1048 - let contents = std::fs::read_to_string(&path).ok()?; 1049 - let mut lines = contents.splitn(2, '\n'); 1050 - let title = lines.next().unwrap_or("").trim().to_string(); 1051 - let body = lines.next().unwrap_or("").trim().to_string(); 1052 - Some(SearchTask { 1053 - id: Id(id_num), 1054 - title, 1055 - body, 1056 - }) 1057 - }) 1058 - .collect(); 1059 - 1060 - assert_eq!(archived_tasks.len(), 1); 1061 - assert_eq!(archived_tasks[0].title, "Archived task"); 698 + fn test_attrs_round_trip() { 699 + let (_d, file, git) = setup_dual(); 700 + for ws in [&file, &git] { 701 + let mut t = ws.new_task("t".into(), "b".into()).unwrap(); 702 + t.attributes.insert("k1".into(), "v1".into()); 703 + t.attributes.insert("k2".into(), "v2".into()); 704 + ws.save_task(&t).unwrap(); 705 + let reread = ws.task(TaskIdentifier::Id(t.id)).unwrap(); 706 + assert_eq!(reread.attributes.get("k1"), Some(&"v1".to_string())); 707 + assert_eq!(reread.attributes.get("k2"), Some(&"v2".to_string())); 708 + } 1062 709 } 1063 710 }