use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::path::{Path, PathBuf}; use thiserror::Error; #[derive(Error, Debug)] pub enum ConfigError { #[error("Failed to read config file: {0}")] ReadError(#[from] std::io::Error), #[error("Failed to parse config file: {0}")] ParseError(#[from] toml::de::Error), #[error("No mlf.toml found in current directory or parent directories")] NotFound, } #[derive(Debug, Serialize, Deserialize)] pub struct MlfConfig { #[serde(default)] pub source: SourceConfig, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub output: Vec, #[serde(default)] pub dependencies: DependenciesConfig, } #[derive(Debug, Serialize, Deserialize)] pub struct SourceConfig { #[serde(default = "default_source_directory", skip_serializing_if = "is_default_source_directory")] pub directory: String, } impl Default for SourceConfig { fn default() -> Self { Self { directory: default_source_directory(), } } } fn default_source_directory() -> String { "./lexicons".to_string() } fn is_default_source_directory(s: &str) -> bool { s == default_source_directory() } #[derive(Debug, Serialize, Deserialize)] pub struct OutputConfig { pub r#type: String, pub directory: String, } #[derive(Debug, Serialize, Deserialize)] pub struct DependenciesConfig { #[serde(default)] pub dependencies: Vec, #[serde(default = "default_allow_transitive_deps", skip_serializing_if = "is_default_allow_transitive_deps")] pub allow_transitive_deps: bool, #[serde(default = "default_optimize_transitive_fetches", skip_serializing_if = "is_default_optimize_transitive_fetches")] pub optimize_transitive_fetches: bool, } fn default_allow_transitive_deps() -> bool { true } fn default_optimize_transitive_fetches() -> bool { false } fn is_default_allow_transitive_deps(b: &bool) -> bool { *b == default_allow_transitive_deps() } fn is_default_optimize_transitive_fetches(b: &bool) -> bool { *b == default_optimize_transitive_fetches() } impl Default for DependenciesConfig { fn default() -> Self { Self { dependencies: vec![], allow_transitive_deps: default_allow_transitive_deps(), optimize_transitive_fetches: default_optimize_transitive_fetches(), } } } impl Default for MlfConfig { fn default() -> Self { Self { source: SourceConfig::default(), output: vec![], dependencies: DependenciesConfig::default(), } } } impl MlfConfig { pub fn load(path: &Path) -> Result { let content = std::fs::read_to_string(path)?; let config: MlfConfig = toml::from_str(&content)?; Ok(config) } pub fn save(&self, path: &Path) -> Result<(), ConfigError> { let content = toml::to_string_pretty(self) .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?; std::fs::write(path, content)?; Ok(()) } pub fn create_default(path: &Path) -> Result { let config = MlfConfig::default(); config.save(path)?; Ok(config) } } /// Find the project root by looking for mlf.toml /// Searches upward from the given directory pub fn find_project_root(start_dir: &Path) -> Result { let mut current = start_dir.to_path_buf(); const MAX_DEPTH: usize = 10; for _ in 0..MAX_DEPTH { let config_path = current.join("mlf.toml"); if config_path.exists() { return Ok(current); } match current.parent() { Some(parent) => current = parent.to_path_buf(), None => break, } } Err(ConfigError::NotFound) } /// Get or create the .mlf cache directory pub fn get_mlf_cache_dir(project_root: &Path) -> PathBuf { project_root.join(".mlf") } /// Initialize the .mlf directory structure pub fn init_mlf_cache(project_root: &Path) -> std::io::Result<()> { let mlf_dir = get_mlf_cache_dir(project_root); // Create directory structure std::fs::create_dir_all(&mlf_dir)?; std::fs::create_dir_all(mlf_dir.join("lexicons/json"))?; std::fs::create_dir_all(mlf_dir.join("lexicons/mlf"))?; // Create .gitignore let gitignore_path = mlf_dir.join(".gitignore"); if !gitignore_path.exists() { std::fs::write(&gitignore_path, "*\n!.gitignore\n")?; } Ok(()) } /// Lock file format for tracking resolved lexicons #[derive(Debug, Serialize, Deserialize, Default)] pub struct LockFile { /// Lock file format version pub version: u32, /// All resolved lexicons (both direct and transitive dependencies) #[serde(default)] pub lexicons: HashMap, } /// A single locked lexicon entry #[derive(Debug, Clone, Serialize, Deserialize)] pub struct LockedLexicon { /// The NSID of this lexicon pub nsid: String, /// The DID of the repository this was fetched from pub did: String, /// SHA-256 checksum of the JSON content pub checksum: String, /// List of NSIDs this lexicon depends on (external references) #[serde(default, skip_serializing_if = "Vec::is_empty")] pub dependencies: Vec, } impl LockFile { pub fn new() -> Self { Self { version: 1, lexicons: HashMap::new(), } } pub fn load(path: &Path) -> Result { if !path.exists() { return Ok(Self::new()); } let content = std::fs::read_to_string(path)?; toml::from_str(&content).map_err(|e| ConfigError::ParseError(e)) } pub fn save(&self, path: &Path) -> Result<(), ConfigError> { let content = toml::to_string_pretty(self) .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?; std::fs::write(path, content)?; Ok(()) } pub fn add_lexicon(&mut self, nsid: String, did: String, checksum: String, dependencies: Vec) { self.lexicons.insert(nsid.clone(), LockedLexicon { nsid, did, checksum, dependencies, }); } } #[cfg(test)] mod tests { use super::*; #[test] fn test_default_config() { let config = MlfConfig::default(); assert_eq!(config.source.directory, "./lexicons"); assert!(config.output.is_empty()); assert!(config.dependencies.dependencies.is_empty()); } #[test] fn test_lockfile_basic() { let mut lockfile = LockFile::new(); assert_eq!(lockfile.version, 1); assert!(lockfile.lexicons.is_empty()); lockfile.add_lexicon( "app.bsky.actor.profile".to_string(), "did:plc:test".to_string(), "sha256:abc123".to_string(), vec![], ); assert_eq!(lockfile.lexicons.len(), 1); let locked = lockfile.lexicons.get("app.bsky.actor.profile").unwrap(); assert_eq!(locked.nsid, "app.bsky.actor.profile"); assert_eq!(locked.did, "did:plc:test"); assert_eq!(locked.checksum, "sha256:abc123"); } }