this repo has no description
1
fork

Configure Feed

Select the types of activity you want to include in your feed.

Add generate command and HTML templates for static site generation

- Add generate command to create static HTML pages from stored feeds
- Add HTML templates for timeline, users, links, and index pages
- Update CLI commands structure and reference parser
- Remove outdated test files

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>

+2062 -859
+2 -1
pyproject.toml
··· 39 39 "bleach>=6.0.0", 40 40 "platformdirs>=4.0.0", 41 41 "pyyaml>=6.0.0", 42 - "email_validator" 42 + "email_validator", 43 + "jinja2>=3.1.6", 43 44 ] 44 45 45 46 [project.optional-dependencies]
+2 -2
src/thicket/cli/commands/__init__.py
··· 1 1 """CLI commands for thicket.""" 2 2 3 3 # Import all commands to register them with the main app 4 - from . import add, duplicates, index_cmd, info_cmd, init, links_cmd, list_cmd, sync 4 + from . import add, duplicates, generate, index_cmd, info_cmd, init, links_cmd, list_cmd, sync 5 5 6 - __all__ = ["add", "duplicates", "index_cmd", "info_cmd", "init", "links_cmd", "list_cmd", "sync"] 6 + __all__ = ["add", "duplicates", "generate", "index_cmd", "info_cmd", "init", "links_cmd", "list_cmd", "sync"]
+703
src/thicket/cli/commands/generate.py
··· 1 + """Generate static HTML website from thicket data.""" 2 + 3 + import base64 4 + import json 5 + import re 6 + import shutil 7 + from datetime import datetime 8 + from pathlib import Path 9 + from typing import Any, Optional, TypedDict, Union 10 + 11 + import typer 12 + from jinja2 import Environment, FileSystemLoader, select_autoescape 13 + from rich.progress import Progress, SpinnerColumn, TextColumn 14 + 15 + from ...core.git_store import GitStore 16 + from ...models.feed import AtomEntry 17 + from ...models.user import GitStoreIndex, UserMetadata 18 + from ..main import app 19 + from ..utils import console, load_config 20 + 21 + 22 + class UserData(TypedDict): 23 + """Type definition for user data structure.""" 24 + 25 + metadata: UserMetadata 26 + recent_entries: list[tuple[str, AtomEntry]] 27 + 28 + 29 + def safe_anchor_id(atom_id: str) -> str: 30 + """Convert an Atom ID to a safe HTML anchor ID.""" 31 + # Use base64 URL-safe encoding without padding 32 + encoded = base64.urlsafe_b64encode(atom_id.encode('utf-8')).decode('ascii').rstrip('=') 33 + # Prefix with 'id' to ensure it starts with a letter (HTML requirement) 34 + return f"id{encoded}" 35 + 36 + 37 + class WebsiteGenerator: 38 + """Generate static HTML website from thicket data.""" 39 + 40 + def __init__(self, git_store: GitStore, output_dir: Path): 41 + self.git_store = git_store 42 + self.output_dir = output_dir 43 + self.template_dir = Path(__file__).parent.parent.parent / "templates" 44 + 45 + # Initialize Jinja2 environment 46 + self.env = Environment( 47 + loader=FileSystemLoader(self.template_dir), 48 + autoescape=select_autoescape(["html", "xml"]), 49 + ) 50 + 51 + # Data containers 52 + self.index: Optional[GitStoreIndex] = None 53 + self.entries: list[tuple[str, AtomEntry]] = [] # (username, entry) 54 + self.links_data: Optional[dict[str, Any]] = None 55 + self.threads: list[list[dict[str, Any]]] = [] # List of threads with metadata 56 + 57 + def get_display_name(self, username: str) -> str: 58 + """Get display name for a user, falling back to username.""" 59 + if self.index and username in self.index.users: 60 + user = self.index.users[username] 61 + return user.display_name or username 62 + return username 63 + 64 + def get_user_homepage(self, username: str) -> Optional[str]: 65 + """Get homepage URL for a user.""" 66 + if self.index and username in self.index.users: 67 + user = self.index.users[username] 68 + return str(user.homepage) if user.homepage else None 69 + return None 70 + 71 + def clean_html_summary(self, content: Optional[str], max_length: int = 200) -> str: 72 + """Clean HTML content and truncate for display in timeline.""" 73 + if not content: 74 + return "" 75 + 76 + # Remove HTML tags 77 + clean_text = re.sub(r"<[^>]+>", " ", content) 78 + # Replace multiple whitespace with single space 79 + clean_text = re.sub(r"\s+", " ", clean_text) 80 + # Strip leading/trailing whitespace 81 + clean_text = clean_text.strip() 82 + 83 + # Truncate with ellipsis if needed 84 + if len(clean_text) > max_length: 85 + # Try to break at word boundary 86 + truncated = clean_text[:max_length] 87 + last_space = truncated.rfind(" ") 88 + if ( 89 + last_space > max_length * 0.8 90 + ): # If we can break reasonably close to the limit 91 + clean_text = truncated[:last_space] + "..." 92 + else: 93 + clean_text = truncated + "..." 94 + 95 + return clean_text 96 + 97 + def load_data(self) -> None: 98 + """Load all data from the git repository.""" 99 + with Progress( 100 + SpinnerColumn(), 101 + TextColumn("[progress.description]{task.description}"), 102 + console=console, 103 + ) as progress: 104 + # Load index 105 + task = progress.add_task("Loading repository index...", total=None) 106 + self.index = self.git_store._load_index() 107 + if not self.index: 108 + raise ValueError("No index found in repository") 109 + progress.update(task, completed=True) 110 + 111 + # Load all entries 112 + task = progress.add_task("Loading entries...", total=None) 113 + for username, user_metadata in self.index.users.items(): 114 + user_dir = self.git_store.repo_path / user_metadata.directory 115 + if user_dir.exists(): 116 + for entry_file in user_dir.glob("*.json"): 117 + if entry_file.name not in ["index.json", "duplicates.json"]: 118 + try: 119 + with open(entry_file) as f: 120 + entry_data = json.load(f) 121 + entry = AtomEntry(**entry_data) 122 + self.entries.append((username, entry)) 123 + except Exception as e: 124 + console.print( 125 + f"[yellow]Warning: Failed to load {entry_file}: {e}[/yellow]" 126 + ) 127 + progress.update(task, completed=True) 128 + 129 + # Sort entries by date (newest first) - prioritize updated over published 130 + self.entries.sort( 131 + key=lambda x: x[1].updated or x[1].published or datetime.min, reverse=True 132 + ) 133 + 134 + # Load links data 135 + task = progress.add_task("Loading links and references...", total=None) 136 + links_file = self.git_store.repo_path / "links.json" 137 + if links_file.exists(): 138 + with open(links_file) as f: 139 + self.links_data = json.load(f) 140 + progress.update(task, completed=True) 141 + 142 + def build_threads(self) -> None: 143 + """Build threaded conversations from references.""" 144 + if not self.links_data or "references" not in self.links_data: 145 + return 146 + 147 + # Map entry IDs to (username, entry) tuples 148 + entry_map: dict[str, tuple[str, AtomEntry]] = {} 149 + for username, entry in self.entries: 150 + entry_map[entry.id] = (username, entry) 151 + 152 + # Build adjacency lists for references 153 + self.outbound_refs: dict[str, set[str]] = {} 154 + self.inbound_refs: dict[str, set[str]] = {} 155 + self.reference_details: dict[ 156 + str, list[dict[str, Any]] 157 + ] = {} # Store full reference info 158 + 159 + for ref in self.links_data["references"]: 160 + source_id = ref["source_entry_id"] 161 + target_id = ref.get("target_entry_id") 162 + 163 + if target_id and source_id in entry_map and target_id in entry_map: 164 + self.outbound_refs.setdefault(source_id, set()).add(target_id) 165 + self.inbound_refs.setdefault(target_id, set()).add(source_id) 166 + 167 + # Store reference details for UI 168 + self.reference_details.setdefault(source_id, []).append( 169 + { 170 + "target_id": target_id, 171 + "target_username": ref.get("target_username"), 172 + "type": "outbound", 173 + } 174 + ) 175 + self.reference_details.setdefault(target_id, []).append( 176 + { 177 + "source_id": source_id, 178 + "source_username": ref.get("source_username"), 179 + "type": "inbound", 180 + } 181 + ) 182 + 183 + # Find conversation threads (multi-post discussions) 184 + processed = set() 185 + 186 + for entry_id, (_username, _entry) in entry_map.items(): 187 + if entry_id in processed: 188 + continue 189 + 190 + # Build thread starting from this entry 191 + thread = [] 192 + to_visit = [entry_id] 193 + thread_ids = set() 194 + level_map: dict[str, int] = {} # Track levels for this thread 195 + 196 + # First, traverse up to find the root 197 + current = entry_id 198 + while current in self.inbound_refs: 199 + parents = self.inbound_refs[current] - { 200 + current 201 + } # Exclude self-references 202 + if not parents: 203 + break 204 + # Take the first parent 205 + parent = next(iter(parents)) 206 + if parent in thread_ids: # Avoid cycles 207 + break 208 + current = parent 209 + to_visit.insert(0, current) 210 + 211 + # Now traverse down from the root 212 + while to_visit: 213 + current = to_visit.pop(0) 214 + if current in thread_ids or current not in entry_map: 215 + continue 216 + 217 + thread_ids.add(current) 218 + username, entry = entry_map[current] 219 + 220 + # Calculate thread level 221 + thread_level = self._calculate_thread_level(current, level_map) 222 + 223 + # Add threading metadata 224 + thread_entry = { 225 + "username": username, 226 + "display_name": self.get_display_name(username), 227 + "entry": entry, 228 + "entry_id": current, 229 + "references_to": list(self.outbound_refs.get(current, [])), 230 + "referenced_by": list(self.inbound_refs.get(current, [])), 231 + "thread_level": thread_level, 232 + } 233 + thread.append(thread_entry) 234 + processed.add(current) 235 + 236 + # Add children 237 + if current in self.outbound_refs: 238 + children = self.outbound_refs[current] - thread_ids # Avoid cycles 239 + to_visit.extend(sorted(children)) 240 + 241 + if len(thread) > 1: # Only keep actual threads 242 + # Sort thread by date (newest first) - prioritize updated over published 243 + thread.sort(key=lambda x: x["entry"].updated or x["entry"].published or datetime.min, reverse=True) # type: ignore 244 + self.threads.append(thread) 245 + 246 + # Sort threads by the date of their most recent entry - prioritize updated over published 247 + self.threads.sort( 248 + key=lambda t: max( 249 + item["entry"].updated or item["entry"].published or datetime.min for item in t 250 + ), 251 + reverse=True, 252 + ) 253 + 254 + def _calculate_thread_level( 255 + self, entry_id: str, processed_entries: dict[str, int] 256 + ) -> int: 257 + """Calculate indentation level for threaded display.""" 258 + if entry_id in processed_entries: 259 + return processed_entries[entry_id] 260 + 261 + if entry_id not in self.inbound_refs: 262 + processed_entries[entry_id] = 0 263 + return 0 264 + 265 + parents_in_thread = self.inbound_refs[entry_id] & set(processed_entries.keys()) 266 + if not parents_in_thread: 267 + processed_entries[entry_id] = 0 268 + return 0 269 + 270 + # Find the deepest parent level + 1 271 + max_parent_level = 0 272 + for parent_id in parents_in_thread: 273 + parent_level = self._calculate_thread_level(parent_id, processed_entries) 274 + max_parent_level = max(max_parent_level, parent_level) 275 + 276 + level = min(max_parent_level + 1, 4) # Cap at level 4 277 + processed_entries[entry_id] = level 278 + return level 279 + 280 + def get_standalone_references(self) -> list[dict[str, Any]]: 281 + """Get posts that have references but aren't part of multi-post threads.""" 282 + if not hasattr(self, "reference_details"): 283 + return [] 284 + 285 + threaded_entry_ids = set() 286 + for thread in self.threads: 287 + for item in thread: 288 + threaded_entry_ids.add(item["entry_id"]) 289 + 290 + standalone_refs = [] 291 + for username, entry in self.entries: 292 + if ( 293 + entry.id in self.reference_details 294 + and entry.id not in threaded_entry_ids 295 + ): 296 + refs = self.reference_details[entry.id] 297 + # Only include if it has meaningful references (not just self-references) 298 + meaningful_refs = [ 299 + r 300 + for r in refs 301 + if r.get("target_id") != entry.id and r.get("source_id") != entry.id 302 + ] 303 + if meaningful_refs: 304 + standalone_refs.append( 305 + { 306 + "username": username, 307 + "display_name": self.get_display_name(username), 308 + "entry": entry, 309 + "references": meaningful_refs, 310 + } 311 + ) 312 + 313 + return standalone_refs 314 + 315 + def _add_cross_thread_links(self, timeline_items: list[dict[str, Any]]) -> None: 316 + """Add cross-thread linking for entries that appear in multiple threads.""" 317 + # Map entry IDs to their positions in the timeline 318 + entry_positions: dict[str, list[int]] = {} 319 + # Map URLs referenced by entries to the entries that reference them 320 + url_references: dict[str, list[tuple[str, int]]] = {} # url -> [(entry_id, position)] 321 + 322 + # First pass: collect all entry IDs, their positions, and referenced URLs 323 + for i, item in enumerate(timeline_items): 324 + if item["type"] == "post": 325 + entry_id = item["content"]["entry"].id 326 + entry_positions.setdefault(entry_id, []).append(i) 327 + # Track URLs this entry references 328 + if entry_id in self.reference_details: 329 + for ref in self.reference_details[entry_id]: 330 + if ref["type"] == "outbound" and "target_id" in ref: 331 + # Find the target entry's URL if available 332 + target_entry = self._find_entry_by_id(ref["target_id"]) 333 + if target_entry and target_entry.link: 334 + url = str(target_entry.link) 335 + url_references.setdefault(url, []).append((entry_id, i)) 336 + elif item["type"] == "thread": 337 + for thread_item in item["content"]: 338 + entry_id = thread_item["entry"].id 339 + entry_positions.setdefault(entry_id, []).append(i) 340 + # Track URLs this entry references 341 + if entry_id in self.reference_details: 342 + for ref in self.reference_details[entry_id]: 343 + if ref["type"] == "outbound" and "target_id" in ref: 344 + target_entry = self._find_entry_by_id(ref["target_id"]) 345 + if target_entry and target_entry.link: 346 + url = str(target_entry.link) 347 + url_references.setdefault(url, []).append((entry_id, i)) 348 + 349 + # Build cross-thread connections - only for entries that actually appear multiple times 350 + cross_thread_connections: dict[str, set[int]] = {} # entry_id -> set of timeline positions 351 + 352 + # Add connections ONLY for entries that appear multiple times in the timeline 353 + for entry_id, positions in entry_positions.items(): 354 + if len(positions) > 1: 355 + cross_thread_connections[entry_id] = set(positions) 356 + # Debug: uncomment to see which entries have multiple appearances 357 + # print(f"Entry {entry_id[:50]}... appears at positions: {positions}") 358 + 359 + # Apply cross-thread links to timeline items 360 + for entry_id, positions_set in cross_thread_connections.items(): 361 + positions_list = list(positions_set) 362 + for pos in positions_list: 363 + item = timeline_items[pos] 364 + other_positions = sorted([p for p in positions_list if p != pos]) 365 + 366 + if item["type"] == "post": 367 + # Add cross-thread info to individual posts 368 + item["content"]["cross_thread_links"] = self._build_cross_thread_link_data(entry_id, other_positions, timeline_items) 369 + # Add info about shared references 370 + item["content"]["shared_references"] = self._get_shared_references(entry_id, positions_set, timeline_items) 371 + elif item["type"] == "thread": 372 + # Add cross-thread info to thread items 373 + for thread_item in item["content"]: 374 + if thread_item["entry"].id == entry_id: 375 + thread_item["cross_thread_links"] = self._build_cross_thread_link_data(entry_id, other_positions, timeline_items) 376 + thread_item["shared_references"] = self._get_shared_references(entry_id, positions_set, timeline_items) 377 + break 378 + 379 + def _build_cross_thread_link_data(self, entry_id: str, other_positions: list[int], timeline_items: list[dict[str, Any]]) -> list[dict[str, Any]]: 380 + """Build detailed cross-thread link data with anchor information.""" 381 + cross_thread_links = [] 382 + 383 + for pos in other_positions: 384 + item = timeline_items[pos] 385 + if item["type"] == "post": 386 + # For individual posts 387 + safe_id = safe_anchor_id(entry_id) 388 + cross_thread_links.append({ 389 + "position": pos, 390 + "anchor_id": f"post-{pos}-{safe_id}", 391 + "context": "individual post", 392 + "title": item["content"]["entry"].title 393 + }) 394 + elif item["type"] == "thread": 395 + # For thread items, find the specific thread item 396 + for thread_idx, thread_item in enumerate(item["content"]): 397 + if thread_item["entry"].id == entry_id: 398 + safe_id = safe_anchor_id(entry_id) 399 + cross_thread_links.append({ 400 + "position": pos, 401 + "anchor_id": f"post-{pos}-{thread_idx}-{safe_id}", 402 + "context": f"thread (level {thread_item.get('thread_level', 0)})", 403 + "title": thread_item["entry"].title 404 + }) 405 + break 406 + 407 + return cross_thread_links 408 + 409 + def _find_entry_by_id(self, entry_id: str) -> Optional[AtomEntry]: 410 + """Find an entry by its ID.""" 411 + for _username, entry in self.entries: 412 + if entry.id == entry_id: 413 + return entry 414 + return None 415 + 416 + def _get_shared_references(self, entry_id: str, positions: Union[set[int], list[int]], timeline_items: list[dict[str, Any]]) -> list[dict[str, Any]]: 417 + """Get information about shared references between cross-thread entries.""" 418 + shared_refs = [] 419 + 420 + # Collect all referenced URLs from entries at these positions 421 + url_counts: dict[str, int] = {} 422 + referencing_entries: dict[str, list[str]] = {} # url -> [entry_ids] 423 + 424 + for pos in positions: 425 + item = timeline_items[pos] 426 + entries_to_check = [] 427 + 428 + if item["type"] == "post": 429 + entries_to_check.append(item["content"]["entry"]) 430 + elif item["type"] == "thread": 431 + entries_to_check.extend([ti["entry"] for ti in item["content"]]) 432 + 433 + for entry in entries_to_check: 434 + if entry.id in self.reference_details: 435 + for ref in self.reference_details[entry.id]: 436 + if ref["type"] == "outbound" and "target_id" in ref: 437 + target_entry = self._find_entry_by_id(ref["target_id"]) 438 + if target_entry and target_entry.link: 439 + url = str(target_entry.link) 440 + url_counts[url] = url_counts.get(url, 0) + 1 441 + if url not in referencing_entries: 442 + referencing_entries[url] = [] 443 + if entry.id not in referencing_entries[url]: 444 + referencing_entries[url].append(entry.id) 445 + 446 + # Find URLs referenced by multiple entries 447 + for url, count in url_counts.items(): 448 + if count > 1 and len(referencing_entries[url]) > 1: 449 + # Get the target entry info 450 + target_entry = None 451 + target_username = None 452 + for ref in (self.links_data or {}).get("references", []): 453 + if ref.get("target_url") == url: 454 + target_username = ref.get("target_username") 455 + if ref.get("target_entry_id"): 456 + target_entry = self._find_entry_by_id(ref["target_entry_id"]) 457 + break 458 + 459 + shared_refs.append({ 460 + "url": url, 461 + "count": count, 462 + "referencing_entries": referencing_entries[url], 463 + "target_username": target_username, 464 + "target_title": target_entry.title if target_entry else None 465 + }) 466 + 467 + return sorted(shared_refs, key=lambda x: x["count"], reverse=True) 468 + 469 + def generate_site(self) -> None: 470 + """Generate the static website.""" 471 + # Create output directory 472 + self.output_dir.mkdir(parents=True, exist_ok=True) 473 + 474 + # Create static directories 475 + (self.output_dir / "css").mkdir(exist_ok=True) 476 + (self.output_dir / "js").mkdir(exist_ok=True) 477 + 478 + # Generate CSS 479 + css_template = self.env.get_template("style.css") 480 + css_content = css_template.render() 481 + with open(self.output_dir / "css" / "style.css", "w") as f: 482 + f.write(css_content) 483 + 484 + # Generate JavaScript 485 + js_template = self.env.get_template("script.js") 486 + js_content = js_template.render() 487 + with open(self.output_dir / "js" / "script.js", "w") as f: 488 + f.write(js_content) 489 + 490 + # Prepare common template data 491 + base_data = { 492 + "title": "Energy & Environment Group", 493 + "generated_at": datetime.now().isoformat(), 494 + "get_display_name": self.get_display_name, 495 + "get_user_homepage": self.get_user_homepage, 496 + "clean_html_summary": self.clean_html_summary, 497 + "safe_anchor_id": safe_anchor_id, 498 + } 499 + 500 + # Build unified timeline 501 + timeline_items = [] 502 + 503 + # Only consider the threads that will actually be displayed 504 + displayed_threads = self.threads[:20] # Limit to 20 threads 505 + 506 + # Track which entries are part of displayed threads 507 + threaded_entry_ids = set() 508 + for thread in displayed_threads: 509 + for item in thread: 510 + threaded_entry_ids.add(item["entry_id"]) 511 + 512 + # Add threads to timeline (using the date of the most recent post) 513 + for thread in displayed_threads: 514 + most_recent_date = max( 515 + item["entry"].updated or item["entry"].published or datetime.min 516 + for item in thread 517 + ) 518 + timeline_items.append({ 519 + "type": "thread", 520 + "date": most_recent_date, 521 + "content": thread 522 + }) 523 + 524 + # Add individual posts (not in threads) 525 + for username, entry in self.entries[:50]: 526 + if entry.id not in threaded_entry_ids: 527 + # Check if this entry has references 528 + has_refs = ( 529 + entry.id in self.reference_details 530 + if hasattr(self, "reference_details") 531 + else False 532 + ) 533 + 534 + refs = [] 535 + if has_refs: 536 + refs = self.reference_details.get(entry.id, []) 537 + refs = [ 538 + r for r in refs 539 + if r.get("target_id") != entry.id 540 + and r.get("source_id") != entry.id 541 + ] 542 + 543 + timeline_items.append({ 544 + "type": "post", 545 + "date": entry.updated or entry.published or datetime.min, 546 + "content": { 547 + "username": username, 548 + "display_name": self.get_display_name(username), 549 + "entry": entry, 550 + "references": refs if refs else None 551 + } 552 + }) 553 + 554 + # Sort unified timeline by date (newest first) 555 + timeline_items.sort(key=lambda x: x["date"], reverse=True) 556 + 557 + # Limit timeline to what will actually be rendered 558 + timeline_items = timeline_items[:50] # Limit to 50 items total 559 + 560 + # Add cross-thread linking for repeat blog references 561 + self._add_cross_thread_links(timeline_items) 562 + 563 + # Prepare outgoing links data 564 + outgoing_links = [] 565 + if self.links_data and "links" in self.links_data: 566 + for url, link_info in self.links_data["links"].items(): 567 + referencing_entries = [] 568 + for entry_id in link_info.get("referencing_entries", []): 569 + for username, entry in self.entries: 570 + if entry.id == entry_id: 571 + referencing_entries.append( 572 + (self.get_display_name(username), entry) 573 + ) 574 + break 575 + 576 + if referencing_entries: 577 + # Sort by date - prioritize updated over published 578 + referencing_entries.sort( 579 + key=lambda x: x[1].updated or x[1].published or datetime.min, reverse=True 580 + ) 581 + outgoing_links.append( 582 + { 583 + "url": url, 584 + "target_username": link_info.get("target_username"), 585 + "entries": referencing_entries, 586 + } 587 + ) 588 + 589 + # Sort links by most recent reference - prioritize updated over published 590 + outgoing_links.sort( 591 + key=lambda x: x["entries"][0][1].updated 592 + or x["entries"][0][1].published or datetime.min, 593 + reverse=True, 594 + ) 595 + 596 + # Prepare users data 597 + users: list[UserData] = [] 598 + if self.index: 599 + for username, user_metadata in self.index.users.items(): 600 + # Get recent entries for this user with display names 601 + user_entries = [ 602 + (self.get_display_name(u), e) 603 + for u, e in self.entries 604 + if u == username 605 + ][:5] 606 + users.append( 607 + {"metadata": user_metadata, "recent_entries": user_entries} 608 + ) 609 + # Sort by entry count 610 + users.sort(key=lambda x: x["metadata"].entry_count, reverse=True) 611 + 612 + # Generate timeline page 613 + timeline_template = self.env.get_template("timeline.html") 614 + timeline_content = timeline_template.render( 615 + **base_data, 616 + page="timeline", 617 + timeline_items=timeline_items, # Already limited above 618 + ) 619 + with open(self.output_dir / "timeline.html", "w") as f: 620 + f.write(timeline_content) 621 + 622 + # Generate links page 623 + links_template = self.env.get_template("links.html") 624 + links_content = links_template.render( 625 + **base_data, 626 + page="links", 627 + outgoing_links=outgoing_links[:100], 628 + ) 629 + with open(self.output_dir / "links.html", "w") as f: 630 + f.write(links_content) 631 + 632 + # Generate users page 633 + users_template = self.env.get_template("users.html") 634 + users_content = users_template.render( 635 + **base_data, 636 + page="users", 637 + users=users, 638 + ) 639 + with open(self.output_dir / "users.html", "w") as f: 640 + f.write(users_content) 641 + 642 + # Generate main index page (redirect to timeline) 643 + index_template = self.env.get_template("index.html") 644 + index_content = index_template.render(**base_data) 645 + with open(self.output_dir / "index.html", "w") as f: 646 + f.write(index_content) 647 + 648 + console.print(f"[green]✓[/green] Generated website at {self.output_dir}") 649 + console.print(f" - {len(self.entries)} entries") 650 + console.print(f" - {len(self.threads)} conversation threads") 651 + console.print(f" - {len(outgoing_links)} outgoing links") 652 + console.print(f" - {len(users)} users") 653 + console.print( 654 + " - Generated pages: index.html, timeline.html, links.html, users.html" 655 + ) 656 + 657 + 658 + @app.command() 659 + def generate( 660 + output: Path = typer.Option( 661 + Path("./thicket-site"), 662 + "--output", 663 + "-o", 664 + help="Output directory for the generated website", 665 + ), 666 + force: bool = typer.Option( 667 + False, "--force", "-f", help="Overwrite existing output directory" 668 + ), 669 + config_file: Path = typer.Option( 670 + Path("thicket.yaml"), "--config", help="Configuration file path" 671 + ), 672 + ) -> None: 673 + """Generate a static HTML website from thicket data.""" 674 + config = load_config(config_file) 675 + 676 + if not config.git_store: 677 + console.print("[red]No git store path configured[/red]") 678 + raise typer.Exit(1) 679 + 680 + git_store = GitStore(config.git_store) 681 + 682 + # Check if output directory exists 683 + if output.exists() and not force: 684 + console.print( 685 + f"[red]Output directory {output} already exists. Use --force to overwrite.[/red]" 686 + ) 687 + raise typer.Exit(1) 688 + 689 + # Clean output directory if forcing 690 + if output.exists() and force: 691 + shutil.rmtree(output) 692 + 693 + try: 694 + generator = WebsiteGenerator(git_store, output) 695 + 696 + console.print("[bold]Generating static website...[/bold]") 697 + generator.load_data() 698 + generator.build_threads() 699 + generator.generate_site() 700 + 701 + except Exception as e: 702 + console.print(f"[red]Error generating website: {e}[/red]") 703 + raise typer.Exit(1) from e
+1 -1
src/thicket/cli/main.py
··· 47 47 48 48 49 49 # Import commands to register them 50 - from .commands import add, duplicates, index_cmd, info_cmd, init, links_cmd, list_cmd, sync 50 + from .commands import add, duplicates, generate, index_cmd, info_cmd, init, links_cmd, list_cmd, sync 51 51 52 52 if __name__ == "__main__": 53 53 app()
+150 -13
src/thicket/core/reference_parser.py
··· 179 179 return True 180 180 return False 181 181 182 + def _is_likely_blog_post_url(self, url: str) -> bool: 183 + """Check if a same-domain URL likely points to a blog post (not CSS, images, etc.).""" 184 + parsed_url = urlparse(url) 185 + path = parsed_url.path.lower() 186 + 187 + # Skip obvious non-blog content 188 + if any(path.endswith(ext) for ext in ['.css', '.js', '.png', '.jpg', '.jpeg', '.gif', '.svg', '.ico', '.pdf', '.xml', '.json']): 189 + return False 190 + 191 + # Skip common non-blog paths 192 + if any(segment in path for segment in ['/static/', '/assets/', '/css/', '/js/', '/images/', '/img/', '/media/', '/uploads/']): 193 + return False 194 + 195 + # Skip fragment-only links (same page anchors) 196 + if not path or path == '/': 197 + return False 198 + 199 + # Look for positive indicators of blog posts 200 + # Common blog post patterns: dates, slugs, post indicators 201 + blog_indicators = [ 202 + r'/\d{4}/', # Year in path 203 + r'/\d{4}/\d{2}/', # Year/month in path 204 + r'/blog/', 205 + r'/post/', 206 + r'/posts/', 207 + r'/articles?/', 208 + r'/notes?/', 209 + r'/entries/', 210 + r'/writing/', 211 + ] 212 + 213 + for pattern in blog_indicators: 214 + if re.search(pattern, path): 215 + return True 216 + 217 + # If it has a reasonable path depth and doesn't match exclusions, likely a blog post 218 + path_segments = [seg for seg in path.split('/') if seg] 219 + return len(path_segments) >= 1 # At least one meaningful path segment 220 + 182 221 def resolve_target_user( 183 222 self, url: str, user_domains: dict[str, set[str]] 184 223 ) -> Optional[str]: ··· 209 248 links = self.extract_links_from_html(content) 210 249 211 250 for url, _link_text in links: 212 - # Skip internal links (same domain as the entry) 213 251 entry_domain = ( 214 252 urlparse(str(entry.link)).netloc.lower() if entry.link else "" 215 253 ) 216 254 link_domain = urlparse(url).netloc.lower() 217 255 218 - if link_domain == entry_domain: 219 - continue 220 - 221 256 # Check if this looks like a blog URL 222 257 if not self.is_blog_url(url): 223 258 continue 259 + 260 + # For same-domain links, apply additional filtering to avoid non-blog content 261 + if link_domain == entry_domain: 262 + # Only include same-domain links that look like blog posts 263 + if not self._is_likely_blog_post_url(url): 264 + continue 224 265 225 266 # Try to resolve to a known user 226 - target_username = self.resolve_target_user(url, user_domains) 267 + if link_domain == entry_domain: 268 + # Same domain - target user is the same as source user 269 + target_username: Optional[str] = username 270 + else: 271 + # Different domain - try to resolve 272 + target_username = self.resolve_target_user(url, user_domains) 227 273 228 274 ref = BlogReference( 229 275 source_entry_id=entry.id, ··· 261 307 262 308 return user_domains 263 309 310 + def _build_url_to_entry_mapping(self, git_store: "GitStore") -> dict[str, str]: 311 + """Build a comprehensive mapping from URLs to entry IDs using git store data. 312 + 313 + This creates a bidirectional mapping that handles: 314 + - Entry link URLs -> Entry IDs 315 + - URL variations (with/without www, http/https) 316 + - Multiple URLs pointing to the same entry 317 + """ 318 + url_to_entry: dict[str, str] = {} 319 + 320 + # Load index to get all users 321 + index = git_store._load_index() 322 + 323 + for username in index.users.keys(): 324 + entries = git_store.list_entries(username) 325 + 326 + for entry in entries: 327 + if entry.link: 328 + link_url = str(entry.link) 329 + entry_id = entry.id 330 + 331 + # Map the canonical link URL 332 + url_to_entry[link_url] = entry_id 333 + 334 + # Handle common URL variations 335 + parsed = urlparse(link_url) 336 + if parsed.netloc and parsed.path: 337 + # Add version without www 338 + if parsed.netloc.startswith('www.'): 339 + no_www_url = f"{parsed.scheme}://{parsed.netloc[4:]}{parsed.path}" 340 + if parsed.query: 341 + no_www_url += f"?{parsed.query}" 342 + if parsed.fragment: 343 + no_www_url += f"#{parsed.fragment}" 344 + url_to_entry[no_www_url] = entry_id 345 + 346 + # Add version with www if not present 347 + elif not parsed.netloc.startswith('www.'): 348 + www_url = f"{parsed.scheme}://www.{parsed.netloc}{parsed.path}" 349 + if parsed.query: 350 + www_url += f"?{parsed.query}" 351 + if parsed.fragment: 352 + www_url += f"#{parsed.fragment}" 353 + url_to_entry[www_url] = entry_id 354 + 355 + # Add http/https variations 356 + if parsed.scheme == 'https': 357 + http_url = link_url.replace('https://', 'http://', 1) 358 + url_to_entry[http_url] = entry_id 359 + elif parsed.scheme == 'http': 360 + https_url = link_url.replace('http://', 'https://', 1) 361 + url_to_entry[https_url] = entry_id 362 + 363 + return url_to_entry 364 + 365 + def _normalize_url(self, url: str) -> str: 366 + """Normalize URL for consistent matching. 367 + 368 + Handles common variations like trailing slashes, fragments, etc. 369 + """ 370 + parsed = urlparse(url) 371 + 372 + # Remove trailing slash from path 373 + path = parsed.path.rstrip('/') if parsed.path != '/' else parsed.path 374 + 375 + # Reconstruct without fragment for consistent matching 376 + normalized = f"{parsed.scheme}://{parsed.netloc}{path}" 377 + if parsed.query: 378 + normalized += f"?{parsed.query}" 379 + 380 + return normalized 381 + 264 382 def resolve_target_entry_ids( 265 383 self, references: list[BlogReference], git_store: "GitStore" 266 384 ) -> list[BlogReference]: 267 - """Resolve target_entry_id for references that have target_username but no target_entry_id.""" 385 + """Resolve target_entry_id for references using comprehensive URL mapping.""" 268 386 resolved_refs = [] 387 + 388 + # Build comprehensive URL to entry ID mapping 389 + url_to_entry = self._build_url_to_entry_mapping(git_store) 269 390 270 391 for ref in references: 271 392 # If we already have a target_entry_id, keep the reference as-is ··· 278 399 resolved_refs.append(ref) 279 400 continue 280 401 281 - # Try to find the entry by matching the URL 282 - entries = git_store.list_entries(ref.target_username) 402 + # Try to resolve using URL mapping 283 403 resolved_entry_id = None 284 404 285 - for entry in entries: 286 - # Check if the entry's link matches the target URL 287 - if entry.link and str(entry.link) == ref.target_url: 288 - resolved_entry_id = entry.id 289 - break 405 + # First, try exact match 406 + if ref.target_url in url_to_entry: 407 + resolved_entry_id = url_to_entry[ref.target_url] 408 + else: 409 + # Try normalized URL matching 410 + normalized_target = self._normalize_url(ref.target_url) 411 + if normalized_target in url_to_entry: 412 + resolved_entry_id = url_to_entry[normalized_target] 413 + else: 414 + # Try URL variations 415 + for mapped_url, entry_id in url_to_entry.items(): 416 + if self._normalize_url(mapped_url) == normalized_target: 417 + resolved_entry_id = entry_id 418 + break 419 + 420 + # Verify the resolved entry belongs to the target username 421 + if resolved_entry_id: 422 + # Double-check by loading the actual entry 423 + entries = git_store.list_entries(ref.target_username) 424 + entry_found = any(entry.id == resolved_entry_id for entry in entries) 425 + if not entry_found: 426 + resolved_entry_id = None 290 427 291 428 # Create a new reference with the resolved target_entry_id 292 429 resolved_ref = BlogReference(
+31
src/thicket/templates/base.html
··· 1 + <!DOCTYPE html> 2 + <html lang="en"> 3 + <head> 4 + <meta charset="UTF-8"> 5 + <meta name="viewport" content="width=device-width, initial-scale=1.0"> 6 + <title>{% block page_title %}{{ title }}{% endblock %}</title> 7 + <link rel="stylesheet" href="css/style.css"> 8 + </head> 9 + <body> 10 + <header class="site-header"> 11 + <div class="header-content"> 12 + <h1 class="site-title">{{ title }}</h1> 13 + <nav class="site-nav"> 14 + <a href="timeline.html" class="nav-link {% if page == 'timeline' %}active{% endif %}">Timeline</a> 15 + <a href="links.html" class="nav-link {% if page == 'links' %}active{% endif %}">Links</a> 16 + <a href="users.html" class="nav-link {% if page == 'users' %}active{% endif %}">Users</a> 17 + </nav> 18 + </div> 19 + </header> 20 + 21 + <main class="main-content"> 22 + {% block content %}{% endblock %} 23 + </main> 24 + 25 + <footer class="site-footer"> 26 + <p>Generated on {{ generated_at }} by <a href="https://github.com/avsm/thicket">Thicket</a></p> 27 + </footer> 28 + 29 + <script src="js/script.js"></script> 30 + </body> 31 + </html>
+13
src/thicket/templates/index.html
··· 1 + <!DOCTYPE html> 2 + <html lang="en"> 3 + <head> 4 + <meta charset="UTF-8"> 5 + <meta name="viewport" content="width=device-width, initial-scale=1.0"> 6 + <title>{{ title }}</title> 7 + <meta http-equiv="refresh" content="0; url=timeline.html"> 8 + <link rel="canonical" href="timeline.html"> 9 + </head> 10 + <body> 11 + <p>Redirecting to <a href="timeline.html">Timeline</a>...</p> 12 + </body> 13 + </html>
+38
src/thicket/templates/links.html
··· 1 + {% extends "base.html" %} 2 + 3 + {% block page_title %}Outgoing Links - {{ title }}{% endblock %} 4 + 5 + {% block content %} 6 + <div class="page-content"> 7 + <h2>Outgoing Links</h2> 8 + <p class="page-description">External links referenced in blog posts, ordered by most recent reference.</p> 9 + 10 + {% for link in outgoing_links %} 11 + <article class="link-group"> 12 + <h3 class="link-url"> 13 + <a href="{{ link.url }}" target="_blank">{{ link.url|truncate(80) }}</a> 14 + {% if link.target_username %} 15 + <span class="target-user">({{ link.target_username }})</span> 16 + {% endif %} 17 + </h3> 18 + <div class="referencing-entries"> 19 + <span class="ref-count">Referenced in {{ link.entries|length }} post(s):</span> 20 + <ul> 21 + {% for display_name, entry in link.entries[:5] %} 22 + <li> 23 + <span class="author">{{ display_name }}</span> - 24 + <a href="{{ entry.link }}" target="_blank">{{ entry.title }}</a> 25 + <time datetime="{{ entry.updated or entry.published }}"> 26 + ({{ (entry.updated or entry.published).strftime('%Y-%m-%d') }}) 27 + </time> 28 + </li> 29 + {% endfor %} 30 + {% if link.entries|length > 5 %} 31 + <li class="more">... and {{ link.entries|length - 5 }} more</li> 32 + {% endif %} 33 + </ul> 34 + </div> 35 + </article> 36 + {% endfor %} 37 + </div> 38 + {% endblock %}
+88
src/thicket/templates/script.js
··· 1 + // Enhanced functionality for thicket website 2 + document.addEventListener('DOMContentLoaded', function() { 3 + 4 + // Enhance thread collapsing (optional feature) 5 + const threadHeaders = document.querySelectorAll('.thread-header'); 6 + threadHeaders.forEach(header => { 7 + header.style.cursor = 'pointer'; 8 + header.addEventListener('click', function() { 9 + const thread = this.parentElement; 10 + const entries = thread.querySelectorAll('.thread-entry'); 11 + 12 + // Toggle visibility of all but the first entry 13 + for (let i = 1; i < entries.length; i++) { 14 + entries[i].style.display = entries[i].style.display === 'none' ? 'block' : 'none'; 15 + } 16 + 17 + // Update thread count text 18 + const count = this.querySelector('.thread-count'); 19 + if (entries[1] && entries[1].style.display === 'none') { 20 + count.textContent = count.textContent.replace('posts', 'posts (collapsed)'); 21 + } else { 22 + count.textContent = count.textContent.replace(' (collapsed)', ''); 23 + } 24 + }); 25 + }); 26 + 27 + // Add relative time display 28 + const timeElements = document.querySelectorAll('time'); 29 + timeElements.forEach(timeEl => { 30 + const datetime = new Date(timeEl.getAttribute('datetime')); 31 + const now = new Date(); 32 + const diffMs = now - datetime; 33 + const diffDays = Math.floor(diffMs / (1000 * 60 * 60 * 24)); 34 + 35 + let relativeTime; 36 + if (diffDays === 0) { 37 + const diffHours = Math.floor(diffMs / (1000 * 60 * 60)); 38 + if (diffHours === 0) { 39 + const diffMinutes = Math.floor(diffMs / (1000 * 60)); 40 + relativeTime = diffMinutes === 0 ? 'just now' : `${diffMinutes}m ago`; 41 + } else { 42 + relativeTime = `${diffHours}h ago`; 43 + } 44 + } else if (diffDays === 1) { 45 + relativeTime = 'yesterday'; 46 + } else if (diffDays < 7) { 47 + relativeTime = `${diffDays}d ago`; 48 + } else if (diffDays < 30) { 49 + const weeks = Math.floor(diffDays / 7); 50 + relativeTime = weeks === 1 ? '1w ago' : `${weeks}w ago`; 51 + } else if (diffDays < 365) { 52 + const months = Math.floor(diffDays / 30); 53 + relativeTime = months === 1 ? '1mo ago' : `${months}mo ago`; 54 + } else { 55 + const years = Math.floor(diffDays / 365); 56 + relativeTime = years === 1 ? '1y ago' : `${years}y ago`; 57 + } 58 + 59 + // Add relative time as title attribute 60 + timeEl.setAttribute('title', timeEl.textContent); 61 + timeEl.textContent = relativeTime; 62 + }); 63 + 64 + // Enhanced anchor link scrolling for shared references 65 + document.querySelectorAll('a[href^="#"]').forEach(anchor => { 66 + anchor.addEventListener('click', function (e) { 67 + e.preventDefault(); 68 + const target = document.querySelector(this.getAttribute('href')); 69 + if (target) { 70 + target.scrollIntoView({ 71 + behavior: 'smooth', 72 + block: 'center' 73 + }); 74 + 75 + // Highlight the target briefly 76 + const timelineEntry = target.closest('.timeline-entry'); 77 + if (timelineEntry) { 78 + timelineEntry.style.outline = '2px solid var(--primary-color)'; 79 + timelineEntry.style.borderRadius = '8px'; 80 + setTimeout(() => { 81 + timelineEntry.style.outline = ''; 82 + timelineEntry.style.borderRadius = ''; 83 + }, 2000); 84 + } 85 + } 86 + }); 87 + }); 88 + });
+754
src/thicket/templates/style.css
··· 1 + /* Modern, clean design with high-density text and readable theme */ 2 + 3 + :root { 4 + --primary-color: #2c3e50; 5 + --secondary-color: #3498db; 6 + --accent-color: #e74c3c; 7 + --background: #ffffff; 8 + --surface: #f8f9fa; 9 + --text-primary: #2c3e50; 10 + --text-secondary: #7f8c8d; 11 + --border-color: #e0e0e0; 12 + --thread-indent: 20px; 13 + --max-width: 1200px; 14 + } 15 + 16 + * { 17 + margin: 0; 18 + padding: 0; 19 + box-sizing: border-box; 20 + } 21 + 22 + body { 23 + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Helvetica Neue', Arial, sans-serif; 24 + font-size: 14px; 25 + line-height: 1.6; 26 + color: var(--text-primary); 27 + background-color: var(--background); 28 + } 29 + 30 + /* Header */ 31 + .site-header { 32 + background-color: var(--surface); 33 + border-bottom: 1px solid var(--border-color); 34 + padding: 0.75rem 0; 35 + position: sticky; 36 + top: 0; 37 + z-index: 100; 38 + } 39 + 40 + .header-content { 41 + max-width: var(--max-width); 42 + margin: 0 auto; 43 + padding: 0 2rem; 44 + display: flex; 45 + justify-content: space-between; 46 + align-items: center; 47 + } 48 + 49 + .site-title { 50 + font-size: 1.5rem; 51 + font-weight: 600; 52 + color: var(--primary-color); 53 + margin: 0; 54 + } 55 + 56 + /* Navigation */ 57 + .site-nav { 58 + display: flex; 59 + gap: 1.5rem; 60 + } 61 + 62 + .nav-link { 63 + text-decoration: none; 64 + color: var(--text-secondary); 65 + font-weight: 500; 66 + font-size: 0.95rem; 67 + padding: 0.5rem 0.75rem; 68 + border-radius: 4px; 69 + transition: all 0.2s ease; 70 + } 71 + 72 + .nav-link:hover { 73 + color: var(--primary-color); 74 + background-color: var(--background); 75 + } 76 + 77 + .nav-link.active { 78 + color: var(--secondary-color); 79 + background-color: var(--background); 80 + font-weight: 600; 81 + } 82 + 83 + /* Main Content */ 84 + .main-content { 85 + max-width: var(--max-width); 86 + margin: 2rem auto; 87 + padding: 0 2rem; 88 + } 89 + 90 + .page-content { 91 + margin: 0; 92 + } 93 + 94 + .page-description { 95 + color: var(--text-secondary); 96 + margin-bottom: 1.5rem; 97 + font-style: italic; 98 + } 99 + 100 + /* Sections */ 101 + section { 102 + margin-bottom: 2rem; 103 + } 104 + 105 + h2 { 106 + font-size: 1.3rem; 107 + font-weight: 600; 108 + margin-bottom: 0.75rem; 109 + color: var(--primary-color); 110 + } 111 + 112 + h3 { 113 + font-size: 1.1rem; 114 + font-weight: 600; 115 + margin-bottom: 0.75rem; 116 + color: var(--primary-color); 117 + } 118 + 119 + /* Entries and Threads */ 120 + article { 121 + margin-bottom: 1.5rem; 122 + padding: 1rem; 123 + background-color: var(--surface); 124 + border-radius: 4px; 125 + border: 1px solid var(--border-color); 126 + } 127 + 128 + /* Timeline-style entries */ 129 + .timeline-entry { 130 + margin-bottom: 0.5rem; 131 + padding: 0.5rem 0.75rem; 132 + border: none; 133 + background: transparent; 134 + transition: background-color 0.2s ease; 135 + } 136 + 137 + .timeline-entry:hover { 138 + background-color: var(--surface); 139 + } 140 + 141 + .timeline-meta { 142 + display: inline-flex; 143 + gap: 0.5rem; 144 + align-items: center; 145 + font-size: 0.75rem; 146 + color: var(--text-secondary); 147 + margin-bottom: 0.25rem; 148 + } 149 + 150 + .timeline-time { 151 + font-family: 'SF Mono', Monaco, Consolas, 'Courier New', monospace; 152 + font-size: 0.75rem; 153 + color: var(--text-secondary); 154 + } 155 + 156 + .timeline-author { 157 + font-weight: 600; 158 + color: var(--primary-color); 159 + font-size: 0.8rem; 160 + text-decoration: none; 161 + } 162 + 163 + .timeline-author:hover { 164 + color: var(--secondary-color); 165 + text-decoration: underline; 166 + } 167 + 168 + .timeline-content { 169 + line-height: 1.4; 170 + } 171 + 172 + .timeline-title { 173 + font-size: 0.95rem; 174 + font-weight: 600; 175 + } 176 + 177 + .timeline-title a { 178 + color: var(--primary-color); 179 + text-decoration: none; 180 + } 181 + 182 + .timeline-title a:hover { 183 + color: var(--secondary-color); 184 + text-decoration: underline; 185 + } 186 + 187 + .timeline-summary { 188 + color: var(--text-secondary); 189 + font-size: 0.9rem; 190 + line-height: 1.4; 191 + } 192 + 193 + /* Legacy styles for other sections */ 194 + .entry-meta, .thread-header { 195 + display: flex; 196 + gap: 1rem; 197 + align-items: center; 198 + margin-bottom: 0.5rem; 199 + font-size: 0.85rem; 200 + color: var(--text-secondary); 201 + } 202 + 203 + .author { 204 + font-weight: 600; 205 + color: var(--primary-color); 206 + } 207 + 208 + time { 209 + font-size: 0.85rem; 210 + } 211 + 212 + h4 { 213 + font-size: 1.1rem; 214 + font-weight: 600; 215 + margin-bottom: 0.5rem; 216 + } 217 + 218 + h4 a { 219 + color: var(--primary-color); 220 + text-decoration: none; 221 + } 222 + 223 + h4 a:hover { 224 + color: var(--secondary-color); 225 + text-decoration: underline; 226 + } 227 + 228 + .entry-summary { 229 + color: var(--text-primary); 230 + line-height: 1.5; 231 + margin-top: 0.5rem; 232 + } 233 + 234 + /* Enhanced Threading Styles */ 235 + 236 + /* Conversation Clusters */ 237 + .conversation-cluster { 238 + background-color: var(--background); 239 + border: 2px solid var(--border-color); 240 + border-radius: 8px; 241 + margin-bottom: 2rem; 242 + overflow: hidden; 243 + box-shadow: 0 2px 4px rgba(0, 0, 0, 0.05); 244 + } 245 + 246 + .conversation-header { 247 + background: linear-gradient(135deg, var(--surface) 0%, #f1f3f4 100%); 248 + padding: 0.75rem 1rem; 249 + border-bottom: 1px solid var(--border-color); 250 + } 251 + 252 + .conversation-meta { 253 + display: flex; 254 + justify-content: space-between; 255 + align-items: center; 256 + flex-wrap: wrap; 257 + gap: 0.5rem; 258 + } 259 + 260 + .conversation-count { 261 + font-weight: 600; 262 + color: var(--secondary-color); 263 + font-size: 0.9rem; 264 + } 265 + 266 + .conversation-participants { 267 + font-size: 0.8rem; 268 + color: var(--text-secondary); 269 + flex: 1; 270 + text-align: right; 271 + } 272 + 273 + .conversation-flow { 274 + padding: 0.5rem; 275 + } 276 + 277 + /* Threaded Conversation Entries */ 278 + .conversation-entry { 279 + position: relative; 280 + margin-bottom: 0.75rem; 281 + display: flex; 282 + align-items: flex-start; 283 + } 284 + 285 + .conversation-entry.level-0 { 286 + margin-left: 0; 287 + } 288 + 289 + .conversation-entry.level-1 { 290 + margin-left: 1.5rem; 291 + } 292 + 293 + .conversation-entry.level-2 { 294 + margin-left: 3rem; 295 + } 296 + 297 + .conversation-entry.level-3 { 298 + margin-left: 4.5rem; 299 + } 300 + 301 + .conversation-entry.level-4 { 302 + margin-left: 6rem; 303 + } 304 + 305 + .entry-connector { 306 + width: 3px; 307 + background-color: var(--secondary-color); 308 + margin-right: 0.75rem; 309 + margin-top: 0.25rem; 310 + min-height: 2rem; 311 + border-radius: 2px; 312 + opacity: 0.6; 313 + } 314 + 315 + .conversation-entry.level-0 .entry-connector { 316 + background-color: var(--accent-color); 317 + opacity: 0.8; 318 + } 319 + 320 + .entry-content { 321 + flex: 1; 322 + background-color: var(--surface); 323 + padding: 0.75rem; 324 + border-radius: 6px; 325 + border: 1px solid var(--border-color); 326 + transition: all 0.2s ease; 327 + } 328 + 329 + .entry-content:hover { 330 + border-color: var(--secondary-color); 331 + box-shadow: 0 2px 8px rgba(52, 152, 219, 0.1); 332 + } 333 + 334 + /* Reference Indicators */ 335 + .reference-indicators { 336 + display: inline-flex; 337 + gap: 0.25rem; 338 + margin-left: 0.5rem; 339 + } 340 + 341 + .ref-out, .ref-in { 342 + display: inline-block; 343 + width: 1rem; 344 + height: 1rem; 345 + border-radius: 50%; 346 + text-align: center; 347 + line-height: 1rem; 348 + font-size: 0.7rem; 349 + font-weight: bold; 350 + } 351 + 352 + .ref-out { 353 + background-color: #e8f5e8; 354 + color: #2d8f2d; 355 + } 356 + 357 + .ref-in { 358 + background-color: #e8f0ff; 359 + color: #1f5fbf; 360 + } 361 + 362 + /* Reference Badges for Individual Posts */ 363 + .timeline-entry.with-references { 364 + background-color: var(--surface); 365 + } 366 + 367 + /* Conversation posts in unified timeline */ 368 + .timeline-entry.conversation-post { 369 + background: transparent; 370 + border: none; 371 + margin-bottom: 0.5rem; 372 + padding: 0.5rem 0.75rem; 373 + } 374 + 375 + .timeline-entry.conversation-post.level-0 { 376 + margin-left: 0; 377 + border-left: 2px solid var(--accent-color); 378 + padding-left: 0.75rem; 379 + } 380 + 381 + .timeline-entry.conversation-post.level-1 { 382 + margin-left: 1.5rem; 383 + border-left: 2px solid var(--secondary-color); 384 + padding-left: 0.75rem; 385 + } 386 + 387 + .timeline-entry.conversation-post.level-2 { 388 + margin-left: 3rem; 389 + border-left: 2px solid var(--text-secondary); 390 + padding-left: 0.75rem; 391 + } 392 + 393 + .timeline-entry.conversation-post.level-3 { 394 + margin-left: 4.5rem; 395 + border-left: 2px solid var(--text-secondary); 396 + padding-left: 0.75rem; 397 + } 398 + 399 + .timeline-entry.conversation-post.level-4 { 400 + margin-left: 6rem; 401 + border-left: 2px solid var(--text-secondary); 402 + padding-left: 0.75rem; 403 + } 404 + 405 + /* Cross-thread linking */ 406 + .cross-thread-links { 407 + margin-top: 0.5rem; 408 + padding-top: 0.5rem; 409 + border-top: 1px solid var(--border-color); 410 + } 411 + 412 + .cross-thread-indicator { 413 + font-size: 0.75rem; 414 + color: var(--text-secondary); 415 + background-color: var(--surface); 416 + padding: 0.25rem 0.5rem; 417 + border-radius: 12px; 418 + border: 1px solid var(--border-color); 419 + display: inline-block; 420 + } 421 + 422 + /* Inline shared references styling */ 423 + .inline-shared-refs { 424 + margin-left: 0.5rem; 425 + font-size: 0.85rem; 426 + color: var(--text-secondary); 427 + } 428 + 429 + .shared-ref-link { 430 + color: var(--primary-color); 431 + text-decoration: none; 432 + font-weight: 500; 433 + transition: color 0.2s ease; 434 + } 435 + 436 + .shared-ref-link:hover { 437 + color: var(--secondary-color); 438 + text-decoration: underline; 439 + } 440 + 441 + .shared-ref-more { 442 + font-style: italic; 443 + color: var(--text-secondary); 444 + font-size: 0.8rem; 445 + margin-left: 0.25rem; 446 + } 447 + 448 + .user-anchor, .post-anchor { 449 + position: absolute; 450 + margin-top: -60px; /* Offset for fixed header */ 451 + pointer-events: none; 452 + } 453 + 454 + .cross-thread-link { 455 + color: var(--primary-color); 456 + text-decoration: none; 457 + font-weight: 500; 458 + transition: color 0.2s ease; 459 + } 460 + 461 + .cross-thread-link:hover { 462 + color: var(--secondary-color); 463 + text-decoration: underline; 464 + } 465 + 466 + .reference-badges { 467 + display: flex; 468 + gap: 0.25rem; 469 + margin-left: 0.5rem; 470 + flex-wrap: wrap; 471 + } 472 + 473 + .ref-badge { 474 + display: inline-block; 475 + padding: 0.1rem 0.4rem; 476 + border-radius: 12px; 477 + font-size: 0.7rem; 478 + font-weight: 600; 479 + text-transform: uppercase; 480 + letter-spacing: 0.05em; 481 + } 482 + 483 + .ref-badge.ref-outbound { 484 + background-color: #e8f5e8; 485 + color: #2d8f2d; 486 + border: 1px solid #c3e6c3; 487 + } 488 + 489 + .ref-badge.ref-inbound { 490 + background-color: #e8f0ff; 491 + color: #1f5fbf; 492 + border: 1px solid #b3d9ff; 493 + } 494 + 495 + /* Author Color Coding */ 496 + .timeline-author { 497 + position: relative; 498 + } 499 + 500 + .timeline-author::before { 501 + content: ''; 502 + display: inline-block; 503 + width: 8px; 504 + height: 8px; 505 + border-radius: 50%; 506 + margin-right: 0.5rem; 507 + background-color: var(--secondary-color); 508 + } 509 + 510 + /* Generate consistent colors for authors */ 511 + .author-avsm::before { background-color: #e74c3c; } 512 + .author-mort::before { background-color: #3498db; } 513 + .author-mte::before { background-color: #2ecc71; } 514 + .author-ryan::before { background-color: #f39c12; } 515 + .author-mwd::before { background-color: #9b59b6; } 516 + .author-dra::before { background-color: #1abc9c; } 517 + .author-pf341::before { background-color: #34495e; } 518 + .author-sadiqj::before { background-color: #e67e22; } 519 + .author-martinkl::before { background-color: #8e44ad; } 520 + .author-jonsterling::before { background-color: #27ae60; } 521 + .author-jon::before { background-color: #f1c40f; } 522 + .author-onkar::before { background-color: #e91e63; } 523 + .author-gabriel::before { background-color: #00bcd4; } 524 + .author-jess::before { background-color: #ff5722; } 525 + .author-ibrahim::before { background-color: #607d8b; } 526 + .author-andres::before { background-color: #795548; } 527 + .author-eeg::before { background-color: #ff9800; } 528 + 529 + /* Section Headers */ 530 + .conversations-section h3, 531 + .referenced-posts-section h3, 532 + .individual-posts-section h3 { 533 + border-bottom: 2px solid var(--border-color); 534 + padding-bottom: 0.5rem; 535 + margin-bottom: 1.5rem; 536 + position: relative; 537 + } 538 + 539 + .conversations-section h3::before { 540 + content: "💬"; 541 + margin-right: 0.5rem; 542 + } 543 + 544 + .referenced-posts-section h3::before { 545 + content: "🔗"; 546 + margin-right: 0.5rem; 547 + } 548 + 549 + .individual-posts-section h3::before { 550 + content: "📝"; 551 + margin-right: 0.5rem; 552 + } 553 + 554 + /* Legacy thread styles (for backward compatibility) */ 555 + .thread { 556 + background-color: var(--background); 557 + border: 1px solid var(--border-color); 558 + padding: 0; 559 + overflow: hidden; 560 + margin-bottom: 1rem; 561 + } 562 + 563 + .thread-header { 564 + background-color: var(--surface); 565 + padding: 0.5rem 0.75rem; 566 + border-bottom: 1px solid var(--border-color); 567 + } 568 + 569 + .thread-count { 570 + font-weight: 600; 571 + color: var(--secondary-color); 572 + } 573 + 574 + .thread-entry { 575 + padding: 0.5rem 0.75rem; 576 + border-bottom: 1px solid var(--border-color); 577 + } 578 + 579 + .thread-entry:last-child { 580 + border-bottom: none; 581 + } 582 + 583 + .thread-entry.reply { 584 + margin-left: var(--thread-indent); 585 + border-left: 3px solid var(--secondary-color); 586 + background-color: var(--surface); 587 + } 588 + 589 + /* Links Section */ 590 + .link-group { 591 + background-color: var(--background); 592 + } 593 + 594 + .link-url { 595 + font-size: 1rem; 596 + word-break: break-word; 597 + } 598 + 599 + .link-url a { 600 + color: var(--secondary-color); 601 + text-decoration: none; 602 + } 603 + 604 + .link-url a:hover { 605 + text-decoration: underline; 606 + } 607 + 608 + .target-user { 609 + font-size: 0.9rem; 610 + color: var(--text-secondary); 611 + font-weight: normal; 612 + } 613 + 614 + .referencing-entries { 615 + margin-top: 0.75rem; 616 + } 617 + 618 + .ref-count { 619 + font-weight: 600; 620 + color: var(--text-secondary); 621 + font-size: 0.9rem; 622 + } 623 + 624 + .referencing-entries ul { 625 + list-style: none; 626 + margin-top: 0.5rem; 627 + padding-left: 1rem; 628 + } 629 + 630 + .referencing-entries li { 631 + margin-bottom: 0.25rem; 632 + font-size: 0.9rem; 633 + } 634 + 635 + .referencing-entries .more { 636 + font-style: italic; 637 + color: var(--text-secondary); 638 + } 639 + 640 + /* Users Section */ 641 + .user-card { 642 + background-color: var(--background); 643 + } 644 + 645 + .user-header { 646 + display: flex; 647 + gap: 1rem; 648 + align-items: start; 649 + margin-bottom: 1rem; 650 + } 651 + 652 + .user-icon { 653 + width: 48px; 654 + height: 48px; 655 + border-radius: 50%; 656 + object-fit: cover; 657 + } 658 + 659 + .user-info h3 { 660 + margin-bottom: 0.25rem; 661 + } 662 + 663 + .username { 664 + font-size: 0.9rem; 665 + color: var(--text-secondary); 666 + font-weight: normal; 667 + } 668 + 669 + .user-meta { 670 + font-size: 0.9rem; 671 + color: var(--text-secondary); 672 + } 673 + 674 + .user-meta a { 675 + color: var(--secondary-color); 676 + text-decoration: none; 677 + } 678 + 679 + .user-meta a:hover { 680 + text-decoration: underline; 681 + } 682 + 683 + .separator { 684 + margin: 0 0.5rem; 685 + } 686 + 687 + .post-count { 688 + font-weight: 600; 689 + } 690 + 691 + .user-recent h4 { 692 + font-size: 0.95rem; 693 + margin-bottom: 0.5rem; 694 + color: var(--text-secondary); 695 + } 696 + 697 + .user-recent ul { 698 + list-style: none; 699 + padding-left: 0; 700 + } 701 + 702 + .user-recent li { 703 + margin-bottom: 0.25rem; 704 + font-size: 0.9rem; 705 + } 706 + 707 + /* Footer */ 708 + .site-footer { 709 + max-width: var(--max-width); 710 + margin: 3rem auto 2rem; 711 + padding: 1rem 2rem; 712 + text-align: center; 713 + color: var(--text-secondary); 714 + font-size: 0.85rem; 715 + border-top: 1px solid var(--border-color); 716 + } 717 + 718 + .site-footer a { 719 + color: var(--secondary-color); 720 + text-decoration: none; 721 + } 722 + 723 + .site-footer a:hover { 724 + text-decoration: underline; 725 + } 726 + 727 + /* Responsive */ 728 + @media (max-width: 768px) { 729 + .site-title { 730 + font-size: 1.3rem; 731 + } 732 + 733 + .header-content { 734 + flex-direction: column; 735 + gap: 0.75rem; 736 + align-items: flex-start; 737 + } 738 + 739 + .site-nav { 740 + gap: 1rem; 741 + } 742 + 743 + .main-content { 744 + padding: 0 1rem; 745 + } 746 + 747 + .thread-entry.reply { 748 + margin-left: calc(var(--thread-indent) / 2); 749 + } 750 + 751 + .user-header { 752 + flex-direction: column; 753 + } 754 + }
+141
src/thicket/templates/timeline.html
··· 1 + {% extends "base.html" %} 2 + 3 + {% block page_title %}Timeline - {{ title }}{% endblock %} 4 + 5 + {% block content %} 6 + {% set seen_users = [] %} 7 + <div class="page-content"> 8 + <h2>Recent Posts & Conversations</h2> 9 + 10 + <section class="unified-timeline"> 11 + {% for item in timeline_items %} 12 + {% if item.type == "post" %} 13 + <!-- Individual Post --> 14 + <article class="timeline-entry {% if item.content.references %}with-references{% endif %}"> 15 + <div class="timeline-meta"> 16 + <time datetime="{{ item.content.entry.updated or item.content.entry.published }}" class="timeline-time"> 17 + {{ (item.content.entry.updated or item.content.entry.published).strftime('%Y-%m-%d %H:%M') }} 18 + </time> 19 + {% set homepage = get_user_homepage(item.content.username) %} 20 + {% if item.content.username not in seen_users %} 21 + <a id="{{ item.content.username }}" class="user-anchor"></a> 22 + {% set _ = seen_users.append(item.content.username) %} 23 + {% endif %} 24 + <a id="post-{{ loop.index0 }}-{{ safe_anchor_id(item.content.entry.id) }}" class="post-anchor"></a> 25 + {% if homepage %} 26 + <a href="{{ homepage }}" target="_blank" class="timeline-author">{{ item.content.display_name }}</a> 27 + {% else %} 28 + <span class="timeline-author">{{ item.content.display_name }}</span> 29 + {% endif %} 30 + {% if item.content.references %} 31 + <div class="reference-badges"> 32 + {% for ref in item.content.references %} 33 + {% if ref.type == 'outbound' %} 34 + <span class="ref-badge ref-outbound" title="References {{ ref.target_username or 'external post' }}"> 35 + → {{ ref.target_username or 'ext' }} 36 + </span> 37 + {% elif ref.type == 'inbound' %} 38 + <span class="ref-badge ref-inbound" title="Referenced by {{ ref.source_username or 'external post' }}"> 39 + ← {{ ref.source_username or 'ext' }} 40 + </span> 41 + {% endif %} 42 + {% endfor %} 43 + </div> 44 + {% endif %} 45 + </div> 46 + <div class="timeline-content"> 47 + <strong class="timeline-title"> 48 + <a href="{{ item.content.entry.link }}" target="_blank">{{ item.content.entry.title }}</a> 49 + </strong> 50 + {% if item.content.entry.summary %} 51 + <span class="timeline-summary">— {{ clean_html_summary(item.content.entry.summary, 250) }}</span> 52 + {% endif %} 53 + {% if item.content.shared_references %} 54 + <span class="inline-shared-refs"> 55 + {% for ref in item.content.shared_references[:3] %} 56 + {% if ref.target_username %} 57 + <a href="#{{ ref.target_username }}" class="shared-ref-link" title="Referenced by {{ ref.count }} entries">@{{ ref.target_username }}</a>{% if not loop.last %}, {% endif %} 58 + {% endif %} 59 + {% endfor %} 60 + {% if item.content.shared_references|length > 3 %} 61 + <span class="shared-ref-more">+{{ item.content.shared_references|length - 3 }} more</span> 62 + {% endif %} 63 + </span> 64 + {% endif %} 65 + {% if item.content.cross_thread_links %} 66 + <div class="cross-thread-links"> 67 + <span class="cross-thread-indicator">🔗 Also appears: </span> 68 + {% for link in item.content.cross_thread_links %} 69 + <a href="#{{ link.anchor_id }}" class="cross-thread-link" title="{{ link.title }}">{{ link.context }}</a>{% if not loop.last %}, {% endif %} 70 + {% endfor %} 71 + </div> 72 + {% endif %} 73 + </div> 74 + </article> 75 + 76 + {% elif item.type == "thread" %} 77 + <!-- Conversation Thread --> 78 + {% set outer_loop_index = loop.index0 %} 79 + {% for thread_item in item.content %} 80 + <article class="timeline-entry conversation-post level-{{ thread_item.thread_level }}"> 81 + <div class="timeline-meta"> 82 + <time datetime="{{ thread_item.entry.updated or thread_item.entry.published }}" class="timeline-time"> 83 + {{ (thread_item.entry.updated or thread_item.entry.published).strftime('%Y-%m-%d %H:%M') }} 84 + </time> 85 + {% set homepage = get_user_homepage(thread_item.username) %} 86 + {% if thread_item.username not in seen_users %} 87 + <a id="{{ thread_item.username }}" class="user-anchor"></a> 88 + {% set _ = seen_users.append(thread_item.username) %} 89 + {% endif %} 90 + <a id="post-{{ outer_loop_index }}-{{ loop.index0 }}-{{ safe_anchor_id(thread_item.entry.id) }}" class="post-anchor"></a> 91 + {% if homepage %} 92 + <a href="{{ homepage }}" target="_blank" class="timeline-author author-{{ thread_item.username }}">{{ thread_item.display_name }}</a> 93 + {% else %} 94 + <span class="timeline-author author-{{ thread_item.username }}">{{ thread_item.display_name }}</span> 95 + {% endif %} 96 + {% if thread_item.references_to or thread_item.referenced_by %} 97 + <span class="reference-indicators"> 98 + {% if thread_item.references_to %} 99 + <span class="ref-out" title="References other posts">→</span> 100 + {% endif %} 101 + {% if thread_item.referenced_by %} 102 + <span class="ref-in" title="Referenced by other posts">←</span> 103 + {% endif %} 104 + </span> 105 + {% endif %} 106 + </div> 107 + <div class="timeline-content"> 108 + <strong class="timeline-title"> 109 + <a href="{{ thread_item.entry.link }}" target="_blank">{{ thread_item.entry.title }}</a> 110 + </strong> 111 + {% if thread_item.entry.summary %} 112 + <span class="timeline-summary">— {{ clean_html_summary(thread_item.entry.summary, 300) }}</span> 113 + {% endif %} 114 + {% if thread_item.shared_references %} 115 + <span class="inline-shared-refs"> 116 + {% for ref in thread_item.shared_references[:3] %} 117 + {% if ref.target_username %} 118 + <a href="#{{ ref.target_username }}" class="shared-ref-link" title="Referenced by {{ ref.count }} entries">@{{ ref.target_username }}</a>{% if not loop.last %}, {% endif %} 119 + {% endif %} 120 + {% endfor %} 121 + {% if thread_item.shared_references|length > 3 %} 122 + <span class="shared-ref-more">+{{ thread_item.shared_references|length - 3 }} more</span> 123 + {% endif %} 124 + </span> 125 + {% endif %} 126 + {% if thread_item.cross_thread_links %} 127 + <div class="cross-thread-links"> 128 + <span class="cross-thread-indicator">🔗 Also appears: </span> 129 + {% for link in thread_item.cross_thread_links %} 130 + <a href="#{{ link.anchor_id }}" class="cross-thread-link" title="{{ link.title }}">{{ link.context }}</a>{% if not loop.last %}, {% endif %} 131 + {% endfor %} 132 + </div> 133 + {% endif %} 134 + </div> 135 + </article> 136 + {% endfor %} 137 + {% endif %} 138 + {% endfor %} 139 + </section> 140 + </div> 141 + {% endblock %}
+57
src/thicket/templates/users.html
··· 1 + {% extends "base.html" %} 2 + 3 + {% block page_title %}Users - {{ title }}{% endblock %} 4 + 5 + {% block content %} 6 + <div class="page-content"> 7 + <h2>Users</h2> 8 + <p class="page-description">All users contributing to this thicket, ordered by post count.</p> 9 + 10 + {% for user_info in users %} 11 + <article class="user-card"> 12 + <div class="user-header"> 13 + {% if user_info.metadata.icon and user_info.metadata.icon != "None" %} 14 + <img src="{{ user_info.metadata.icon }}" alt="{{ user_info.metadata.username }}" class="user-icon"> 15 + {% endif %} 16 + <div class="user-info"> 17 + <h3> 18 + {% if user_info.metadata.display_name %} 19 + {{ user_info.metadata.display_name }} 20 + <span class="username">({{ user_info.metadata.username }})</span> 21 + {% else %} 22 + {{ user_info.metadata.username }} 23 + {% endif %} 24 + </h3> 25 + <div class="user-meta"> 26 + {% if user_info.metadata.homepage %} 27 + <a href="{{ user_info.metadata.homepage }}" target="_blank">{{ user_info.metadata.homepage }}</a> 28 + {% endif %} 29 + {% if user_info.metadata.email %} 30 + <span class="separator">•</span> 31 + <a href="mailto:{{ user_info.metadata.email }}">{{ user_info.metadata.email }}</a> 32 + {% endif %} 33 + <span class="separator">•</span> 34 + <span class="post-count">{{ user_info.metadata.entry_count }} posts</span> 35 + </div> 36 + </div> 37 + </div> 38 + 39 + {% if user_info.recent_entries %} 40 + <div class="user-recent"> 41 + <h4>Recent posts:</h4> 42 + <ul> 43 + {% for display_name, entry in user_info.recent_entries %} 44 + <li> 45 + <a href="{{ entry.link }}" target="_blank">{{ entry.title }}</a> 46 + <time datetime="{{ entry.updated or entry.published }}"> 47 + ({{ (entry.updated or entry.published).strftime('%Y-%m-%d') }}) 48 + </time> 49 + </li> 50 + {% endfor %} 51 + </ul> 52 + </div> 53 + {% endif %} 54 + </article> 55 + {% endfor %} 56 + </div> 57 + {% endblock %}
tests/__init__.py

This is a binary file and will not be displayed.

-84
tests/conftest.py
··· 1 - """Test configuration and fixtures for thicket.""" 2 - 3 - import tempfile 4 - from pathlib import Path 5 - 6 - import pytest 7 - 8 - from thicket.models import ThicketConfig, UserConfig 9 - 10 - 11 - @pytest.fixture 12 - def temp_dir(): 13 - """Create a temporary directory for tests.""" 14 - with tempfile.TemporaryDirectory() as tmp_dir: 15 - yield Path(tmp_dir) 16 - 17 - 18 - @pytest.fixture 19 - def sample_config(temp_dir): 20 - """Create a sample configuration for testing.""" 21 - git_store = temp_dir / "git_store" 22 - cache_dir = temp_dir / "cache" 23 - 24 - return ThicketConfig( 25 - git_store=git_store, 26 - cache_dir=cache_dir, 27 - users=[ 28 - UserConfig( 29 - username="testuser", 30 - feeds=["https://example.com/feed.xml"], 31 - email="test@example.com", 32 - display_name="Test User", 33 - ) 34 - ], 35 - ) 36 - 37 - 38 - @pytest.fixture 39 - def sample_atom_feed(): 40 - """Sample Atom feed XML for testing.""" 41 - return """<?xml version="1.0" encoding="utf-8"?> 42 - <feed xmlns="http://www.w3.org/2005/Atom"> 43 - <title>Test Feed</title> 44 - <link href="https://example.com/"/> 45 - <updated>2025-01-01T00:00:00Z</updated> 46 - <author> 47 - <name>Test Author</name> 48 - <email>author@example.com</email> 49 - </author> 50 - <id>https://example.com/</id> 51 - 52 - <entry> 53 - <title>Test Entry</title> 54 - <link href="https://example.com/entry/1"/> 55 - <id>https://example.com/entry/1</id> 56 - <updated>2025-01-01T00:00:00Z</updated> 57 - <summary>This is a test entry.</summary> 58 - <content type="html"> 59 - <![CDATA[<p>This is the content of the test entry.</p>]]> 60 - </content> 61 - </entry> 62 - </feed>""" 63 - 64 - 65 - @pytest.fixture 66 - def sample_rss_feed(): 67 - """Sample RSS feed XML for testing.""" 68 - return """<?xml version="1.0" encoding="UTF-8"?> 69 - <rss version="2.0"> 70 - <channel> 71 - <title>Test RSS Feed</title> 72 - <link>https://example.com/</link> 73 - <description>Test RSS feed for testing</description> 74 - <managingEditor>editor@example.com</managingEditor> 75 - 76 - <item> 77 - <title>Test RSS Entry</title> 78 - <link>https://example.com/rss/entry/1</link> 79 - <description>This is a test RSS entry.</description> 80 - <pubDate>Mon, 01 Jan 2025 00:00:00 GMT</pubDate> 81 - <guid>https://example.com/rss/entry/1</guid> 82 - </item> 83 - </channel> 84 - </rss>"""
-131
tests/test_feed_parser.py
··· 1 - """Tests for feed parser functionality.""" 2 - 3 - from pydantic import HttpUrl 4 - 5 - from thicket.core.feed_parser import FeedParser 6 - from thicket.models import AtomEntry, FeedMetadata 7 - 8 - 9 - class TestFeedParser: 10 - """Test the FeedParser class.""" 11 - 12 - def test_init(self): 13 - """Test parser initialization.""" 14 - parser = FeedParser() 15 - assert parser.user_agent == "thicket/0.1.0" 16 - assert "a" in parser.allowed_tags 17 - assert "href" in parser.allowed_attributes["a"] 18 - 19 - def test_parse_atom_feed(self, sample_atom_feed): 20 - """Test parsing an Atom feed.""" 21 - parser = FeedParser() 22 - metadata, entries = parser.parse_feed(sample_atom_feed) 23 - 24 - # Check metadata 25 - assert isinstance(metadata, FeedMetadata) 26 - assert metadata.title == "Test Feed" 27 - assert metadata.author_name == "Test Author" 28 - assert metadata.author_email == "author@example.com" 29 - assert metadata.link == HttpUrl("https://example.com/") 30 - 31 - # Check entries 32 - assert len(entries) == 1 33 - entry = entries[0] 34 - assert isinstance(entry, AtomEntry) 35 - assert entry.title == "Test Entry" 36 - assert entry.id == "https://example.com/entry/1" 37 - assert entry.link == HttpUrl("https://example.com/entry/1") 38 - assert entry.summary == "This is a test entry." 39 - assert "<p>This is the content of the test entry.</p>" in entry.content 40 - 41 - def test_parse_rss_feed(self, sample_rss_feed): 42 - """Test parsing an RSS feed.""" 43 - parser = FeedParser() 44 - metadata, entries = parser.parse_feed(sample_rss_feed) 45 - 46 - # Check metadata 47 - assert isinstance(metadata, FeedMetadata) 48 - assert metadata.title == "Test RSS Feed" 49 - assert metadata.link == HttpUrl("https://example.com/") 50 - assert metadata.author_email == "editor@example.com" 51 - 52 - # Check entries 53 - assert len(entries) == 1 54 - entry = entries[0] 55 - assert isinstance(entry, AtomEntry) 56 - assert entry.title == "Test RSS Entry" 57 - assert entry.id == "https://example.com/rss/entry/1" 58 - assert entry.summary == "This is a test RSS entry." 59 - 60 - def test_sanitize_entry_id(self): 61 - """Test entry ID sanitization.""" 62 - parser = FeedParser() 63 - 64 - # Test URL ID 65 - url_id = "https://example.com/posts/2025/01/test-post" 66 - sanitized = parser.sanitize_entry_id(url_id) 67 - assert sanitized == "posts_2025_01_test-post" 68 - 69 - # Test problematic characters 70 - bad_id = "test/with\\bad:chars|and<more>" 71 - sanitized = parser.sanitize_entry_id(bad_id) 72 - assert sanitized == "test_with_bad_chars_and_more_" 73 - 74 - # Test empty ID 75 - empty_id = "" 76 - sanitized = parser.sanitize_entry_id(empty_id) 77 - assert sanitized == "entry" 78 - 79 - # Test very long ID 80 - long_id = "a" * 300 81 - sanitized = parser.sanitize_entry_id(long_id) 82 - assert len(sanitized) == 200 83 - 84 - def test_sanitize_html(self): 85 - """Test HTML sanitization.""" 86 - parser = FeedParser() 87 - 88 - # Test allowed tags 89 - safe_html = "<p>This is <strong>safe</strong> HTML</p>" 90 - sanitized = parser._sanitize_html(safe_html) 91 - assert sanitized == safe_html 92 - 93 - # Test dangerous tags 94 - dangerous_html = "<script>alert('xss')</script><p>Safe content</p>" 95 - sanitized = parser._sanitize_html(dangerous_html) 96 - assert "<script>" not in sanitized 97 - assert "<p>Safe content</p>" in sanitized 98 - 99 - # Test attributes 100 - html_with_attrs = '<a href="https://example.com" onclick="alert()">Link</a>' 101 - sanitized = parser._sanitize_html(html_with_attrs) 102 - assert 'href="https://example.com"' in sanitized 103 - assert 'onclick' not in sanitized 104 - 105 - def test_extract_feed_metadata(self): 106 - """Test feed metadata extraction.""" 107 - parser = FeedParser() 108 - 109 - # Test with feedparser parsed data 110 - import feedparser 111 - parsed = feedparser.parse("""<?xml version="1.0" encoding="utf-8"?> 112 - <feed xmlns="http://www.w3.org/2005/Atom"> 113 - <title>Test Feed</title> 114 - <link href="https://example.com/"/> 115 - <author> 116 - <name>Test Author</name> 117 - <email>author@example.com</email> 118 - <uri>https://example.com/about</uri> 119 - </author> 120 - <logo>https://example.com/logo.png</logo> 121 - <icon>https://example.com/icon.png</icon> 122 - </feed>""") 123 - 124 - metadata = parser._extract_feed_metadata(parsed.feed) 125 - assert metadata.title == "Test Feed" 126 - assert metadata.author_name == "Test Author" 127 - assert metadata.author_email == "author@example.com" 128 - assert metadata.author_uri == HttpUrl("https://example.com/about") 129 - assert metadata.link == HttpUrl("https://example.com/") 130 - assert metadata.logo == HttpUrl("https://example.com/logo.png") 131 - assert metadata.icon == HttpUrl("https://example.com/icon.png")
-275
tests/test_git_store.py
··· 1 - """Tests for Git store functionality.""" 2 - 3 - import json 4 - from datetime import datetime 5 - 6 - from pydantic import HttpUrl 7 - 8 - from thicket.core.git_store import GitStore 9 - from thicket.models import AtomEntry, DuplicateMap, UserMetadata 10 - 11 - 12 - class TestGitStore: 13 - """Test the GitStore class.""" 14 - 15 - def test_init_new_repo(self, temp_dir): 16 - """Test initializing a new Git repository.""" 17 - repo_path = temp_dir / "test_repo" 18 - store = GitStore(repo_path) 19 - 20 - assert store.repo_path == repo_path 21 - assert store.repo is not None 22 - assert repo_path.exists() 23 - assert (repo_path / ".git").exists() 24 - assert (repo_path / "index.json").exists() 25 - assert (repo_path / "duplicates.json").exists() 26 - 27 - def test_init_existing_repo(self, temp_dir): 28 - """Test initializing with existing repository.""" 29 - repo_path = temp_dir / "test_repo" 30 - 31 - # Create first store 32 - store1 = GitStore(repo_path) 33 - store1.add_user("testuser", display_name="Test User") 34 - 35 - # Create second store pointing to same repo 36 - store2 = GitStore(repo_path) 37 - user = store2.get_user("testuser") 38 - 39 - assert user is not None 40 - assert user.username == "testuser" 41 - assert user.display_name == "Test User" 42 - 43 - def test_add_user(self, temp_dir): 44 - """Test adding a user to the Git store.""" 45 - store = GitStore(temp_dir / "test_repo") 46 - 47 - user = store.add_user( 48 - username="testuser", 49 - display_name="Test User", 50 - email="test@example.com", 51 - homepage="https://example.com", 52 - icon="https://example.com/icon.png", 53 - feeds=["https://example.com/feed.xml"], 54 - ) 55 - 56 - assert isinstance(user, UserMetadata) 57 - assert user.username == "testuser" 58 - assert user.display_name == "Test User" 59 - assert user.email == "test@example.com" 60 - assert user.homepage == "https://example.com" 61 - assert user.icon == "https://example.com/icon.png" 62 - assert user.feeds == ["https://example.com/feed.xml"] 63 - assert user.directory == "testuser" 64 - 65 - # Check that user directory was created 66 - user_dir = store.repo_path / "testuser" 67 - assert user_dir.exists() 68 - 69 - # Check user exists in index 70 - stored_user = store.get_user("testuser") 71 - assert stored_user is not None 72 - assert stored_user.username == "testuser" 73 - assert stored_user.display_name == "Test User" 74 - 75 - def test_get_user(self, temp_dir): 76 - """Test getting user metadata.""" 77 - store = GitStore(temp_dir / "test_repo") 78 - 79 - # Add user 80 - store.add_user("testuser", display_name="Test User") 81 - 82 - # Get user 83 - user = store.get_user("testuser") 84 - assert user is not None 85 - assert user.username == "testuser" 86 - assert user.display_name == "Test User" 87 - 88 - # Try to get non-existent user 89 - non_user = store.get_user("nonexistent") 90 - assert non_user is None 91 - 92 - def test_store_entry(self, temp_dir): 93 - """Test storing an entry.""" 94 - store = GitStore(temp_dir / "test_repo") 95 - 96 - # Add user first 97 - store.add_user("testuser") 98 - 99 - # Create test entry 100 - entry = AtomEntry( 101 - id="https://example.com/entry/1", 102 - title="Test Entry", 103 - link=HttpUrl("https://example.com/entry/1"), 104 - updated=datetime.now(), 105 - summary="Test entry summary", 106 - content="<p>Test content</p>", 107 - ) 108 - 109 - # Store entry 110 - result = store.store_entry("testuser", entry) 111 - assert result is True 112 - 113 - # Check that entry file was created 114 - user_dir = store.repo_path / "testuser" 115 - entry_files = list(user_dir.glob("*.json")) 116 - entry_files = [f for f in entry_files if f.name != "metadata.json"] 117 - assert len(entry_files) == 1 118 - 119 - # Check entry content 120 - with open(entry_files[0]) as f: 121 - stored_entry = json.load(f) 122 - assert stored_entry["title"] == "Test Entry" 123 - assert stored_entry["id"] == "https://example.com/entry/1" 124 - 125 - def test_get_entry(self, temp_dir): 126 - """Test retrieving an entry.""" 127 - store = GitStore(temp_dir / "test_repo") 128 - 129 - # Add user and entry 130 - store.add_user("testuser") 131 - entry = AtomEntry( 132 - id="https://example.com/entry/1", 133 - title="Test Entry", 134 - link=HttpUrl("https://example.com/entry/1"), 135 - updated=datetime.now(), 136 - ) 137 - store.store_entry("testuser", entry) 138 - 139 - # Get entry 140 - retrieved = store.get_entry("testuser", "https://example.com/entry/1") 141 - assert retrieved is not None 142 - assert retrieved.title == "Test Entry" 143 - assert retrieved.id == "https://example.com/entry/1" 144 - 145 - # Try to get non-existent entry 146 - non_entry = store.get_entry("testuser", "https://example.com/nonexistent") 147 - assert non_entry is None 148 - 149 - def test_list_entries(self, temp_dir): 150 - """Test listing entries for a user.""" 151 - store = GitStore(temp_dir / "test_repo") 152 - 153 - # Add user 154 - store.add_user("testuser") 155 - 156 - # Add multiple entries 157 - for i in range(3): 158 - entry = AtomEntry( 159 - id=f"https://example.com/entry/{i}", 160 - title=f"Test Entry {i}", 161 - link=HttpUrl(f"https://example.com/entry/{i}"), 162 - updated=datetime.now(), 163 - ) 164 - store.store_entry("testuser", entry) 165 - 166 - # List all entries 167 - entries = store.list_entries("testuser") 168 - assert len(entries) == 3 169 - 170 - # List with limit 171 - limited = store.list_entries("testuser", limit=2) 172 - assert len(limited) == 2 173 - 174 - # List for non-existent user 175 - none_entries = store.list_entries("nonexistent") 176 - assert len(none_entries) == 0 177 - 178 - def test_duplicates(self, temp_dir): 179 - """Test duplicate management.""" 180 - store = GitStore(temp_dir / "test_repo") 181 - 182 - # Get initial duplicates (should be empty) 183 - duplicates = store.get_duplicates() 184 - assert isinstance(duplicates, DuplicateMap) 185 - assert len(duplicates.duplicates) == 0 186 - 187 - # Add duplicate 188 - store.add_duplicate("https://example.com/dup", "https://example.com/canonical") 189 - 190 - # Check duplicate was added 191 - duplicates = store.get_duplicates() 192 - assert len(duplicates.duplicates) == 1 193 - assert duplicates.is_duplicate("https://example.com/dup") 194 - assert duplicates.get_canonical("https://example.com/dup") == "https://example.com/canonical" 195 - 196 - # Remove duplicate 197 - result = store.remove_duplicate("https://example.com/dup") 198 - assert result is True 199 - 200 - # Check duplicate was removed 201 - duplicates = store.get_duplicates() 202 - assert len(duplicates.duplicates) == 0 203 - assert not duplicates.is_duplicate("https://example.com/dup") 204 - 205 - def test_search_entries(self, temp_dir): 206 - """Test searching entries.""" 207 - store = GitStore(temp_dir / "test_repo") 208 - 209 - # Add user 210 - store.add_user("testuser") 211 - 212 - # Add entries with different content 213 - entries_data = [ 214 - ("Test Python Programming", "Learning Python basics"), 215 - ("JavaScript Tutorial", "Advanced JavaScript concepts"), 216 - ("Python Web Development", "Building web apps with Python"), 217 - ] 218 - 219 - for title, summary in entries_data: 220 - entry = AtomEntry( 221 - id=f"https://example.com/entry/{title.lower().replace(' ', '-')}", 222 - title=title, 223 - link=HttpUrl(f"https://example.com/entry/{title.lower().replace(' ', '-')}"), 224 - updated=datetime.now(), 225 - summary=summary, 226 - ) 227 - store.store_entry("testuser", entry) 228 - 229 - # Search for Python entries 230 - results = store.search_entries("Python") 231 - assert len(results) == 2 232 - 233 - # Search for specific user 234 - results = store.search_entries("Python", username="testuser") 235 - assert len(results) == 2 236 - 237 - # Search with limit 238 - results = store.search_entries("Python", limit=1) 239 - assert len(results) == 1 240 - 241 - # Search for non-existent term 242 - results = store.search_entries("NonExistent") 243 - assert len(results) == 0 244 - 245 - def test_get_stats(self, temp_dir): 246 - """Test getting repository statistics.""" 247 - store = GitStore(temp_dir / "test_repo") 248 - 249 - # Get initial stats 250 - stats = store.get_stats() 251 - assert stats["total_users"] == 0 252 - assert stats["total_entries"] == 0 253 - assert stats["total_duplicates"] == 0 254 - 255 - # Add user and entries 256 - store.add_user("testuser") 257 - for i in range(3): 258 - entry = AtomEntry( 259 - id=f"https://example.com/entry/{i}", 260 - title=f"Test Entry {i}", 261 - link=HttpUrl(f"https://example.com/entry/{i}"), 262 - updated=datetime.now(), 263 - ) 264 - store.store_entry("testuser", entry) 265 - 266 - # Add duplicate 267 - store.add_duplicate("https://example.com/dup", "https://example.com/canonical") 268 - 269 - # Get updated stats 270 - stats = store.get_stats() 271 - assert stats["total_users"] == 1 272 - assert stats["total_entries"] == 3 273 - assert stats["total_duplicates"] == 1 274 - assert "last_updated" in stats 275 - assert "repository_size" in stats
-352
tests/test_models.py
··· 1 - """Tests for pydantic models.""" 2 - 3 - from datetime import datetime 4 - 5 - import pytest 6 - from pydantic import HttpUrl, ValidationError 7 - 8 - from thicket.models import ( 9 - AtomEntry, 10 - DuplicateMap, 11 - FeedMetadata, 12 - ThicketConfig, 13 - UserConfig, 14 - UserMetadata, 15 - ) 16 - 17 - 18 - class TestUserConfig: 19 - """Test UserConfig model.""" 20 - 21 - def test_valid_user_config(self): 22 - """Test creating valid user config.""" 23 - config = UserConfig( 24 - username="testuser", 25 - feeds=["https://example.com/feed.xml"], 26 - email="test@example.com", 27 - homepage="https://example.com", 28 - display_name="Test User", 29 - ) 30 - 31 - assert config.username == "testuser" 32 - assert len(config.feeds) == 1 33 - assert config.feeds[0] == HttpUrl("https://example.com/feed.xml") 34 - assert config.email == "test@example.com" 35 - assert config.display_name == "Test User" 36 - 37 - def test_invalid_email(self): 38 - """Test validation of invalid email.""" 39 - with pytest.raises(ValidationError): 40 - UserConfig( 41 - username="testuser", 42 - feeds=["https://example.com/feed.xml"], 43 - email="invalid-email", 44 - ) 45 - 46 - def test_invalid_feed_url(self): 47 - """Test validation of invalid feed URL.""" 48 - with pytest.raises(ValidationError): 49 - UserConfig( 50 - username="testuser", 51 - feeds=["not-a-url"], 52 - ) 53 - 54 - def test_optional_fields(self): 55 - """Test optional fields with None values.""" 56 - config = UserConfig( 57 - username="testuser", 58 - feeds=["https://example.com/feed.xml"], 59 - ) 60 - 61 - assert config.email is None 62 - assert config.homepage is None 63 - assert config.icon is None 64 - assert config.display_name is None 65 - 66 - 67 - class TestThicketConfig: 68 - """Test ThicketConfig model.""" 69 - 70 - def test_valid_config(self, temp_dir): 71 - """Test creating valid configuration.""" 72 - config = ThicketConfig( 73 - git_store=temp_dir / "git_store", 74 - cache_dir=temp_dir / "cache", 75 - users=[ 76 - UserConfig( 77 - username="testuser", 78 - feeds=["https://example.com/feed.xml"], 79 - ) 80 - ], 81 - ) 82 - 83 - assert config.git_store == temp_dir / "git_store" 84 - assert config.cache_dir == temp_dir / "cache" 85 - assert len(config.users) == 1 86 - assert config.users[0].username == "testuser" 87 - 88 - def test_find_user(self, temp_dir): 89 - """Test finding user by username.""" 90 - config = ThicketConfig( 91 - git_store=temp_dir / "git_store", 92 - cache_dir=temp_dir / "cache", 93 - users=[ 94 - UserConfig(username="user1", feeds=["https://example.com/feed1.xml"]), 95 - UserConfig(username="user2", feeds=["https://example.com/feed2.xml"]), 96 - ], 97 - ) 98 - 99 - user = config.find_user("user1") 100 - assert user is not None 101 - assert user.username == "user1" 102 - 103 - non_user = config.find_user("nonexistent") 104 - assert non_user is None 105 - 106 - def test_add_user(self, temp_dir): 107 - """Test adding a new user.""" 108 - config = ThicketConfig( 109 - git_store=temp_dir / "git_store", 110 - cache_dir=temp_dir / "cache", 111 - users=[], 112 - ) 113 - 114 - new_user = UserConfig( 115 - username="newuser", 116 - feeds=["https://example.com/feed.xml"], 117 - ) 118 - 119 - config.add_user(new_user) 120 - assert len(config.users) == 1 121 - assert config.users[0].username == "newuser" 122 - 123 - def test_add_feed_to_user(self, temp_dir): 124 - """Test adding feed to existing user.""" 125 - config = ThicketConfig( 126 - git_store=temp_dir / "git_store", 127 - cache_dir=temp_dir / "cache", 128 - users=[ 129 - UserConfig(username="testuser", feeds=["https://example.com/feed1.xml"]), 130 - ], 131 - ) 132 - 133 - result = config.add_feed_to_user("testuser", HttpUrl("https://example.com/feed2.xml")) 134 - assert result is True 135 - 136 - user = config.find_user("testuser") 137 - assert len(user.feeds) == 2 138 - assert HttpUrl("https://example.com/feed2.xml") in user.feeds 139 - 140 - # Test adding to non-existent user 141 - result = config.add_feed_to_user("nonexistent", HttpUrl("https://example.com/feed.xml")) 142 - assert result is False 143 - 144 - 145 - class TestAtomEntry: 146 - """Test AtomEntry model.""" 147 - 148 - def test_valid_entry(self): 149 - """Test creating valid Atom entry.""" 150 - entry = AtomEntry( 151 - id="https://example.com/entry/1", 152 - title="Test Entry", 153 - link=HttpUrl("https://example.com/entry/1"), 154 - updated=datetime.now(), 155 - published=datetime.now(), 156 - summary="Test summary", 157 - content="<p>Test content</p>", 158 - content_type="html", 159 - author={"name": "Test Author"}, 160 - categories=["test", "example"], 161 - ) 162 - 163 - assert entry.id == "https://example.com/entry/1" 164 - assert entry.title == "Test Entry" 165 - assert entry.summary == "Test summary" 166 - assert entry.content == "<p>Test content</p>" 167 - assert entry.content_type == "html" 168 - assert entry.author["name"] == "Test Author" 169 - assert "test" in entry.categories 170 - 171 - def test_minimal_entry(self): 172 - """Test creating minimal Atom entry.""" 173 - entry = AtomEntry( 174 - id="https://example.com/entry/1", 175 - title="Test Entry", 176 - link=HttpUrl("https://example.com/entry/1"), 177 - updated=datetime.now(), 178 - ) 179 - 180 - assert entry.id == "https://example.com/entry/1" 181 - assert entry.title == "Test Entry" 182 - assert entry.published is None 183 - assert entry.summary is None 184 - assert entry.content is None 185 - assert entry.content_type == "html" # default 186 - assert entry.author is None 187 - assert entry.categories == [] 188 - 189 - 190 - class TestDuplicateMap: 191 - """Test DuplicateMap model.""" 192 - 193 - def test_empty_duplicates(self): 194 - """Test empty duplicate map.""" 195 - dup_map = DuplicateMap() 196 - assert len(dup_map.duplicates) == 0 197 - assert not dup_map.is_duplicate("test") 198 - assert dup_map.get_canonical("test") == "test" 199 - 200 - def test_add_duplicate(self): 201 - """Test adding duplicate mapping.""" 202 - dup_map = DuplicateMap() 203 - dup_map.add_duplicate("dup1", "canonical1") 204 - 205 - assert len(dup_map.duplicates) == 1 206 - assert dup_map.is_duplicate("dup1") 207 - assert dup_map.get_canonical("dup1") == "canonical1" 208 - assert dup_map.get_canonical("canonical1") == "canonical1" 209 - 210 - def test_remove_duplicate(self): 211 - """Test removing duplicate mapping.""" 212 - dup_map = DuplicateMap() 213 - dup_map.add_duplicate("dup1", "canonical1") 214 - 215 - result = dup_map.remove_duplicate("dup1") 216 - assert result is True 217 - assert len(dup_map.duplicates) == 0 218 - assert not dup_map.is_duplicate("dup1") 219 - 220 - # Test removing non-existent duplicate 221 - result = dup_map.remove_duplicate("nonexistent") 222 - assert result is False 223 - 224 - def test_get_duplicates_for_canonical(self): 225 - """Test getting all duplicates for a canonical ID.""" 226 - dup_map = DuplicateMap() 227 - dup_map.add_duplicate("dup1", "canonical1") 228 - dup_map.add_duplicate("dup2", "canonical1") 229 - dup_map.add_duplicate("dup3", "canonical2") 230 - 231 - dups = dup_map.get_duplicates_for_canonical("canonical1") 232 - assert len(dups) == 2 233 - assert "dup1" in dups 234 - assert "dup2" in dups 235 - 236 - dups = dup_map.get_duplicates_for_canonical("canonical2") 237 - assert len(dups) == 1 238 - assert "dup3" in dups 239 - 240 - dups = dup_map.get_duplicates_for_canonical("nonexistent") 241 - assert len(dups) == 0 242 - 243 - 244 - class TestFeedMetadata: 245 - """Test FeedMetadata model.""" 246 - 247 - def test_valid_metadata(self): 248 - """Test creating valid feed metadata.""" 249 - metadata = FeedMetadata( 250 - title="Test Feed", 251 - author_name="Test Author", 252 - author_email="author@example.com", 253 - author_uri=HttpUrl("https://example.com/author"), 254 - link=HttpUrl("https://example.com"), 255 - description="Test description", 256 - ) 257 - 258 - assert metadata.title == "Test Feed" 259 - assert metadata.author_name == "Test Author" 260 - assert metadata.author_email == "author@example.com" 261 - assert metadata.link == HttpUrl("https://example.com") 262 - 263 - def test_to_user_config(self): 264 - """Test converting metadata to user config.""" 265 - metadata = FeedMetadata( 266 - title="Test Feed", 267 - author_name="Test Author", 268 - author_email="author@example.com", 269 - author_uri=HttpUrl("https://example.com/author"), 270 - link=HttpUrl("https://example.com"), 271 - logo=HttpUrl("https://example.com/logo.png"), 272 - ) 273 - 274 - feed_url = HttpUrl("https://example.com/feed.xml") 275 - user_config = metadata.to_user_config("testuser", feed_url) 276 - 277 - assert user_config.username == "testuser" 278 - assert user_config.feeds == [feed_url] 279 - assert user_config.display_name == "Test Author" 280 - assert user_config.email == "author@example.com" 281 - assert user_config.homepage == HttpUrl("https://example.com/author") 282 - assert user_config.icon == HttpUrl("https://example.com/logo.png") 283 - 284 - def test_to_user_config_fallbacks(self): 285 - """Test fallback logic in to_user_config.""" 286 - metadata = FeedMetadata( 287 - title="Test Feed", 288 - link=HttpUrl("https://example.com"), 289 - icon=HttpUrl("https://example.com/icon.png"), 290 - ) 291 - 292 - feed_url = HttpUrl("https://example.com/feed.xml") 293 - user_config = metadata.to_user_config("testuser", feed_url) 294 - 295 - assert user_config.display_name == "Test Feed" # Falls back to title 296 - assert user_config.homepage == HttpUrl("https://example.com") # Falls back to link 297 - assert user_config.icon == HttpUrl("https://example.com/icon.png") 298 - assert user_config.email is None 299 - 300 - 301 - class TestUserMetadata: 302 - """Test UserMetadata model.""" 303 - 304 - def test_valid_metadata(self): 305 - """Test creating valid user metadata.""" 306 - now = datetime.now() 307 - metadata = UserMetadata( 308 - username="testuser", 309 - directory="testuser", 310 - created=now, 311 - last_updated=now, 312 - feeds=["https://example.com/feed.xml"], 313 - entry_count=5, 314 - ) 315 - 316 - assert metadata.username == "testuser" 317 - assert metadata.directory == "testuser" 318 - assert metadata.entry_count == 5 319 - assert len(metadata.feeds) == 1 320 - 321 - def test_update_timestamp(self): 322 - """Test updating timestamp.""" 323 - now = datetime.now() 324 - metadata = UserMetadata( 325 - username="testuser", 326 - directory="testuser", 327 - created=now, 328 - last_updated=now, 329 - ) 330 - 331 - original_time = metadata.last_updated 332 - metadata.update_timestamp() 333 - 334 - assert metadata.last_updated > original_time 335 - 336 - def test_increment_entry_count(self): 337 - """Test incrementing entry count.""" 338 - metadata = UserMetadata( 339 - username="testuser", 340 - directory="testuser", 341 - created=datetime.now(), 342 - last_updated=datetime.now(), 343 - entry_count=5, 344 - ) 345 - 346 - original_count = metadata.entry_count 347 - original_time = metadata.last_updated 348 - 349 - metadata.increment_entry_count(3) 350 - 351 - assert metadata.entry_count == original_count + 3 352 - assert metadata.last_updated > original_time
+82
uv.lock
··· 334 334 ] 335 335 336 336 [[package]] 337 + name = "jinja2" 338 + version = "3.1.6" 339 + source = { registry = "https://pypi.org/simple" } 340 + dependencies = [ 341 + { name = "markupsafe" }, 342 + ] 343 + sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } 344 + wheels = [ 345 + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, 346 + ] 347 + 348 + [[package]] 337 349 name = "markdown-it-py" 338 350 version = "3.0.0" 339 351 source = { registry = "https://pypi.org/simple" } ··· 343 355 sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } 344 356 wheels = [ 345 357 { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, 358 + ] 359 + 360 + [[package]] 361 + name = "markupsafe" 362 + version = "3.0.2" 363 + source = { registry = "https://pypi.org/simple" } 364 + sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } 365 + wheels = [ 366 + { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" }, 367 + { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393, upload-time = "2024-10-18T15:20:52.426Z" }, 368 + { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" }, 369 + { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" }, 370 + { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" }, 371 + { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" }, 372 + { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" }, 373 + { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" }, 374 + { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065, upload-time = "2024-10-18T15:21:00.307Z" }, 375 + { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514, upload-time = "2024-10-18T15:21:01.122Z" }, 376 + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, 377 + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, 378 + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, 379 + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, 380 + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, 381 + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, 382 + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, 383 + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, 384 + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, 385 + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, 386 + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, 387 + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, 388 + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, 389 + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, 390 + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, 391 + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, 392 + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, 393 + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, 394 + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, 395 + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, 396 + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, 397 + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, 398 + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, 399 + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, 400 + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, 401 + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, 402 + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, 403 + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, 404 + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, 405 + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, 406 + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, 407 + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, 408 + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, 409 + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, 410 + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, 411 + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, 412 + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, 413 + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, 414 + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, 415 + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, 416 + { url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", size = 14344, upload-time = "2024-10-18T15:21:43.721Z" }, 417 + { url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", size = 12389, upload-time = "2024-10-18T15:21:44.666Z" }, 418 + { url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", size = 21607, upload-time = "2024-10-18T15:21:45.452Z" }, 419 + { url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", size = 20728, upload-time = "2024-10-18T15:21:46.295Z" }, 420 + { url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", size = 20826, upload-time = "2024-10-18T15:21:47.134Z" }, 421 + { url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", size = 21843, upload-time = "2024-10-18T15:21:48.334Z" }, 422 + { url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", size = 21219, upload-time = "2024-10-18T15:21:49.587Z" }, 423 + { url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", size = 20946, upload-time = "2024-10-18T15:21:50.441Z" }, 424 + { url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", size = 15063, upload-time = "2024-10-18T15:21:51.385Z" }, 425 + { url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", size = 15506, upload-time = "2024-10-18T15:21:52.974Z" }, 346 426 ] 347 427 348 428 [[package]] ··· 875 955 { name = "feedparser" }, 876 956 { name = "gitpython" }, 877 957 { name = "httpx" }, 958 + { name = "jinja2" }, 878 959 { name = "pendulum" }, 879 960 { name = "platformdirs" }, 880 961 { name = "pydantic" }, ··· 903 984 { name = "feedparser", specifier = ">=6.0.11" }, 904 985 { name = "gitpython", specifier = ">=3.1.40" }, 905 986 { name = "httpx", specifier = ">=0.28.0" }, 987 + { name = "jinja2", specifier = ">=3.1.6" }, 906 988 { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.13.0" }, 907 989 { name = "pendulum", specifier = ">=3.0.0" }, 908 990 { name = "platformdirs", specifier = ">=4.0.0" },