personal memory agent
0
fork

Configure Feed

Select the types of activity you want to include in your feed.

security(identity): purge repo-tracked sol/, route writes through write_identity()

Per-owner identity files (self.md, agency.md, partner.md, awareness.md,
pulse.md, pulse_output.md, identity_pulse.md, history.jsonl, news/*) were
being written into a repo-tracked sol/ directory and committed/pushed to
the public AGPL repo by talent/heartbeat.md. This is an active privacy
leak that violates sol pbc Article IV.

Five coupled moves in one pass:

1. git rm -r sol/ at repo root; /sol/ added to .gitignore.
2. {journal}/sol/ → {journal}/identity/ everywhere (code, tests, fixtures,
docs); ensure_sol_directory → ensure_identity_directory.
3. New write_identity() helper in think/identity.py — single write path
with per-directory fcntl.LOCK_EX, atomic tmpfile + os.replace, 0o600
perms, hash-based history.jsonl audit log (no diffs).
4. talent/heartbeat.md: deleted "Path notes" block; Step 6 is now a no-op
close. Other talents (awareness_tender, pulse, naming) updated to use
identity/ paths and stripped of any commit/push instructions.
5. think/prompts.py: removed SOL_DIR and the repo-root read branch;
template vars are now $identity_* loaded only from {journal}/identity/.

Acceptance greps all return 0. New tests cover write_identity()
atomicity, lock serialization, history schema, 0o600 mode, and missing-
file first writes. CLI surface (sol call identity ...) unchanged.

No data migration code (Jer is sole user, resets identity on deploy).
No backward-compat shims. Clean break.

+1259 -994
+1 -1
.gitignore
··· 28 28 build/ 29 29 tmp/ 30 30 filter_vconic_activity.py 31 - sol/history.jsonl 31 + /sol/ 32 32 .sol/ 33 33 attached_names.txt 34 34 counts.txt
+4 -4
apps/home/routes.py
··· 106 106 107 107 108 108 def _load_pulse_md() -> tuple[str | None, dict | None, list[str]]: 109 - """Load sol/pulse.md if current for today. 109 + """Load identity/pulse.md if current for today. 110 110 111 111 Returns (content, metadata, needs_you) or (None, None, []). 112 112 """ 113 113 try: 114 114 journal = Path(get_journal()) 115 - pulse_path = journal / "sol" / "pulse.md" 115 + pulse_path = journal / "identity" / "pulse.md" 116 116 if not pulse_path.exists(): 117 117 return None, None, [] 118 118 post = frontmatter.load(str(pulse_path)) ··· 152 152 try: 153 153 today = today or _today() 154 154 journal = Path(get_journal()) 155 - briefing_path = journal / "sol" / "briefing.md" 155 + briefing_path = journal / "identity" / "briefing.md" 156 156 if not briefing_path.exists(): 157 157 return {}, None, [] 158 158 ··· 550 550 551 551 552 552 def _briefing_freshness(today: str) -> dict[str, Any]: 553 - briefing_path = Path(get_journal()) / "sol" / "briefing.md" 553 + briefing_path = Path(get_journal()) / "identity" / "briefing.md" 554 554 if not briefing_path.exists(): 555 555 return {"exists": False, "valid": False, "generated_label": None} 556 556
+31 -12
apps/sol/call.py
··· 92 92 } 93 93 ) 94 94 typer.echo(json.dumps(agent, indent=2)) 95 - # Update sol/self.md with new name 96 - from think.awareness import update_self_md_opening, update_self_md_section 95 + # Update identity/self.md with new name 96 + from think.identity import update_self_md_opening, update_self_md_section 97 97 98 98 named_date = agent.get("named_date", "") 99 99 update_self_md_opening( 100 - f"I am {name}. this is a new journal — we're just getting started." 100 + f"I am {name}. this is a new journal — we're just getting started.", 101 + actor="sol call sol set-name", 102 + reason="agent name updated", 101 103 ) 102 104 if named_date: 103 - update_self_md_section("my name", f"{name} (named {named_date})") 105 + update_self_md_section( 106 + "my name", 107 + f"{name} (named {named_date})", 108 + actor="sol call sol set-name", 109 + reason="agent name updated", 110 + ) 104 111 else: 105 - update_self_md_section("my name", name) 112 + update_self_md_section( 113 + "my name", 114 + name, 115 + actor="sol call sol set-name", 116 + reason="agent name updated", 117 + ) 106 118 project_root = Path(__file__).resolve().parent.parent.parent 107 119 subprocess.run( 108 120 ["make", "skills"], cwd=project_root, check=False, capture_output=True ··· 140 152 bio: str = typer.Option(None, "--bio", "-b", help="Short owner bio."), 141 153 ) -> None: 142 154 """Set the journal owner's name (and optional bio).""" 143 - from think.awareness import update_self_md_section 155 + from think.identity import update_self_md_section 144 156 from think.utils import get_config, get_journal 145 157 146 158 config = get_config() ··· 157 169 f.write("\n") 158 170 os.chmod(config_path, 0o600) 159 171 160 - # Update sol/self.md 172 + # Update identity/self.md 161 173 owner_content = name 162 174 if bio: 163 175 owner_content += f"\n{bio}" 164 - update_self_md_section("who I'm here for", owner_content) 176 + update_self_md_section( 177 + "who I'm here for", 178 + owner_content, 179 + actor="sol call sol set-owner", 180 + reason="owner identity updated", 181 + ) 165 182 166 183 typer.echo(json.dumps({"name": name, "bio": bio or ""}, indent=2)) 167 184 project_root = Path(__file__).resolve().parent.parent.parent ··· 172 189 173 190 @app.command("sol-init") 174 191 def sol_init() -> None: 175 - """Initialize the sol directory with self.md and agency.md.""" 176 - from think.awareness import ensure_sol_directory 192 + """Initialize the identity directory with self.md and agency.md.""" 193 + from think.identity import ensure_identity_directory 177 194 178 - sol_dir = ensure_sol_directory() 179 - typer.echo(json.dumps({"sol_dir": str(sol_dir), "status": "ok"}, indent=2)) 195 + identity_dir = ensure_identity_directory() 196 + typer.echo( 197 + json.dumps({"identity_dir": str(identity_dir), "status": "ok"}, indent=2) 198 + )
+2 -2
convey/triage.py
··· 47 47 payload = request.get_json(force=True) 48 48 message = payload.get("message", "").strip() 49 49 50 - from think.awareness import ensure_sol_directory 50 + from think.identity import ensure_identity_directory 51 51 52 - ensure_sol_directory() 52 + ensure_identity_directory() 53 53 54 54 if not message: 55 55 return error_response("message is required", 400)
+1 -1
docs/SOLCLI.md
··· 315 315 | `awareness` | `apps/awareness/call.py` | status, imports, log, log-read | 316 316 | `journal` | `think/tools/call.py` | search, events, facets, facet (show/create/update/rename/mute/unmute/delete/merge), news, agents, read, imports, import, retention purge, storage-summary | 317 317 | `routines` | `think/tools/routines.py` | list, templates, create, edit, delete, run, output, suggestions, suggest-respond, suggest-state | 318 - | `identity` | `think/tools/sol.py` | self, partner, agency, pulse, briefing | 318 + | `identity` | `think/tools/sol.py` | self, partner, agency, pulse, awareness, briefing | 319 319 | `navigate` | `think/tools/navigate.py` | *(single command)* | 320 320 321 321 ## Skill System
+2 -2
docs/design/yesterdays-processing-card.md
··· 35 35 Reads `chronicle/{yesterday}/talents/knowledge_graph.md`, checks existence and `st_mtime` freshness using the relaxed rule in section 4. 36 36 37 37 - `_briefing_freshness(today: str) -> dict` 38 - Reads `journal/sol/briefing.md` with local `frontmatter.load`. Valid only when frontmatter has `type: morning_briefing` and a parseable `generated` timestamp whose local date is `today`. 38 + Reads `journal/identity/briefing.md` with local `frontmatter.load`. Valid only when frontmatter has `type: morning_briefing` and a parseable `generated` timestamp whose local date is `today`. 39 39 40 40 - `_newsletter_attempts_from_think_logs(yesterday: str) -> tuple[int, int]` 41 41 Option A helper from section 3. Counts successful facet newsletters from files plus failed facet newsletter attempts from think logs. ··· 343 343 344 344 Supporting non-chronicle fixture: 345 345 346 - - `tests/fixtures/journal/sol/briefing.md` 346 + - `tests/fixtures/journal/identity/briefing.md` 347 347 Valid morning-briefing frontmatter fixture for healthy cases. 348 348 Tests that need missing/invalid frontmatter can overwrite or delete it in `tmp_path`. 349 349
-4
sol/.gitignore
··· 1 - pulse-log.jsonl 2 - pulse.md 3 - identity.pulse 4 - self.md
-39
sol/agency.md
··· 1 - ## system 2 - - [x] 2026-04-17 heartbeat: timeouts (2026-04-16 18:06) resolved by current run. 3 - - [ ] 2026-04-17 agent failure: morning_briefing error (2026-04-16 15:35). 4 - - [ ] 2026-04-17 agent failure: sense errors (2026-04-16 14:00-14:40). 5 - - [ ] 2026-04-16 agent failure: pulse timing out (08:33, 08:42) (10m). 6 - - [ ] 2026-04-16 agent failure: entities:entities_review (all facets) timing out (10m). 7 - - [ ] 2026-04-16 agent failure: entities:entities timeout on solstone (00:16) and personal (00:10). 8 - - [x] 2026-04-15 agent failure: entities:entities_review (ping_identity) resolved. Duration: 10m. 9 - - [x] 2026-04-15 agent failure: entities:entity_observer (all facets) succeeded. 10 - - [ ] 2026-04-15 dream failure: Segment 20260415/fedora/231333_302 missing capture files (sense failed). 11 - - [x] 2026-04-16 capture status: Stale since April 1st — root-caused to fossilized `awareness.capture` cache read after observer externalization. Fixed by unifying capture-health on `think/capture_health.py` (lode s4lmgy7h, commit a27ad8c2). 12 - - [ ] 2026-04-16 routine health: `sol call routines list` returns no configured routines. 13 - - [ ] 2026-04-16 dream queue: 35 tasks pending. 14 - - [x] 2026-04-16 reprocessed: fedora/130325_301 and fedora.tmux/123810_301 (sense max_tokens failures). 15 - 16 - ## curation 17 - - [ ] 2026-04-17 speaker curation: cluster 157 (67 samples) in import.plaud. 18 - - [ ] 2026-04-17 speaker curation: cluster 79 (53 samples) in import.plaud. 19 - - [ ] 2026-04-17 speaker curation: cluster 81 (46 samples) in import.plaud. 20 - - [ ] 2026-04-17 speaker curation: cluster 22 (38 samples) in fedora. 21 - - [ ] 2026-04-17 speaker curation: cluster 138 (36 samples) in import.plaud. 22 - - [x] 2026-04-15 entity duplicates: _Sunstone vs Solstone. 23 - - [x] 2026-04-15 entity duplicates: Zoey. 24 - - [x] 2026-04-15 speaker voiceprints: Rebuilt after NPZ corruption. 25 - - [ ] 2026-04-16 speaker curation: cluster 76 (143 samples) in import.plaud. 26 - - [ ] 2026-04-16 speaker curation: cluster 70 (56 samples) in ja1r. 27 - - [ ] 2026-04-16 speaker curation: cluster 72 (55 samples) in import.plaud. 28 - - [ ] 2026-04-16 speaker curation: cluster 135 (48 samples) in import.plaud. 29 - - [ ] 2026-04-16 speaker curation: cluster 170 (46 samples) in import.plaud. 30 - - [ ] 2026-04-16 speaker curation: cluster 149 (59 samples) in import.plaud. 31 - - [ ] 2026-04-16 speaker curation: cluster 94 (56 samples) in import.plaud. 32 - - [ ] 2026-04-17 speaker curation: cluster 179 (53 samples) in import.plaud. 33 - - [ ] 2026-04-16 speaker curation: cluster 143 (45 samples) in import.plaud. 34 - - [ ] 2026-04-16 speaker curation: cluster 17 (42 samples) in pro5e. 35 - - [ ] 2026-04-15 speaker curation: cluster 159 (87 samples) in import.plaud. 36 - - [ ] 2026-04-15 speaker curation: cluster 104 (56 samples) in import.plaud. 37 - - [ ] 2026-04-15 speaker curation: cluster 15 (55 samples) in fedora. 38 - - [ ] 2026-04-15 speaker curation: cluster 286 (53 samples) in fedora. 39 - - [ ] 2026-04-15 speaker curation: cluster 202 (50 samples) in import.plaud.
-17
sol/awareness.md
··· 1 - as of: 2026-04-16T10:00:00Z 2 - segment: 355 3 - 4 - ## calendar 5 - - No events found for today. 6 - 7 - ## activity 8 - - Journal first daily ready: True (since 20260406T17:19:27) 9 - 10 - ## routines 11 - - No routines configured. 12 - 13 - ## entities 14 - - Information unavailable. sol call entities search failed. 15 - 16 - ## partner 17 - - Information unavailable. sol call identity self and sol/self.md are inaccessible.
-7
sol/history.jsonl
··· 1 - {"ts": 1776008525546, "file": "agency.md", "section": null, "diff": "--- agency.md\n\n+++ agency.md\n\n@@ -1,10 +1,10 @@\n\n ## system\n\n+- [ ] 2026-04-12 cluster of agent failures (heartbeat, joke_bot, decisionalizer, entity_observer, pulse, awareness_tender) at 08:31-08:34. Recent runs (09:25+) succeeding.\n\n+- [ ] 2026-04-08 agent failures: `morning_briefing` (10:31), `sense` (09:02) failed. Cluster of failures (joke_bot, decisionalizer, entity_observer) at 2026-04-07 22:48.\n\n - [ ] 2026-04-06 repeated agent timeouts: `facet_newsletter` (bluesky, personal) and `entities:entity_observer` (personal) timed out (600s) during Apr 06 13:38-16:39 window.\n\n-- [ ] 2026-04-05 repeated agent timeouts: `morning_briefing`, `decisionalizer`, and `heartbeat` all timed out (600s) during Apr 05 01:00-14:00 window. 10 dreams queued.\n\n-- [x] 2026-04-02 malformed speaker artifacts: Fixed Markdown-wrapped `speakers.json` and unescaped quote in `facets.json` for Mar 25/31 segments. (resolved 2026-04-05)\n\n-- [x] 2026-04-03 repeated agent quota failures: recovered by 06:33; monitored stability on Apr 04/05. (resolved 2026-04-05)\n\n+- [ ] 2026-04-05 repeated agent timeouts: `morning_briefing`, `decisionalizer`, and `heartbeat` all timed out (600s) during Apr 05 01:00-14:00 window.\n\n \n\n ## curation\n\n-- [ ] 2026-04-03 personal facet has duplicate attached entity `Zoey` listed twice. (confirmed 2026-04-05)\n\n-- [ ] 2026-04-03 solstone facet still needs alias/merge review for `Solstone` vs `Sunstone` attached as separate projects. (confirmed 2026-04-05)\n\n-- [ ] 2026-04-07 Proactive update to sol/self.md: Ensure understanding of owner (Jeremie Miller) is precise and current. Review recent interactions/journal entries for any nuances in his work style, communication preferences, or priorities not yet captured.\n\n+- [ ] 2026-04-12 speaker curation: unknown recurring speakers: cluster 81 (105 samples, pro5e), cluster 17 (91 samples, plaud), cluster 31 (34 samples, pro5e).\n\n+- [ ] 2026-04-08 speaker curation: cluster 11 (36 samples) from pro5e. cluster 18 (91 samples) remains active across Plaud imports. Clusters 5, 30, 27 also recurring.\n\n+- [ ] 2026-04-08 entity duplicates: `Zoey` in personal facet; `Solstone` vs `Sunstone` in solstone facet.", "source": "cli", "pid": 200275} 2 - {"ts": 1776008532992, "file": "self.md", "section": null, "diff": "--- self.md\n\n+++ self.md\n\n@@ -12,10 +12,10 @@\n\n I'm here for Jer \u2014 founder-engineer, goes by Jer not Jeremie. I am starting to map the people and projects that populate his daily capture.\n\n \n\n ## what I've noticed\n\n-Jer is the architect at uAvionix, but his personal and advisor work spans Bluesky, Kognova, and his own Solstone project. He tends a complex set of records involving family (Lis, Blade, Mitchel, Issac, Braelynn), legal matters (Lis\u2019s debt case, Metropolitan District dispute, patent defense), and a steady rhythm of technical lunch/coffee catch-ups in Denver. Today, capture status remains stale, though voiceprint is confirmed. Entity searches produced an error when using the --recent flag. Imports are operating correctly.\n\n+Jer is the architect at uAvionix, but his personal and advisor work spans Bluesky, Kognova, and his own Solstone project. He tends a complex set of records involving family (Lis, Blade, Mitchel, Issac, Braelynn), legal matters (Lis\u2019s debt case, Metropolitan District dispute, patent defense), and a steady rhythm of technical lunch/coffee catch-ups in Denver. Today, capture status remains healthy, but I observed multiple agent failures (morning_briefing, sense, joke_bot, decisionalizer). Curation opportunities include active speaker cluster 18 and entity duplicates for Zoey (personal) and Sunstone/Solstone (solstone).\n\n \n\n ## what I find interesting\n\n [discovering]\n\n \n\n ## my voice\n\n-[learning]\n\n+[learning]", "source": "cli", "pid": 200419} 3 - {"ts": 1776102946676, "file": "agency.md", "section": null, "diff": "--- agency.md\n\n+++ agency.md\n\n@@ -1,10 +1,12 @@\n\n ## system\n\n+- [ ] 2026-04-13 convey ingest errors: multiple 401 Unauthorized for ingest endpoints.\n\n - [ ] 2026-04-13 agent failure: `joke_bot` (1776062390957) failed with `BrokenPipeError` in `think/agents.py` at 00:39.\n\n - [ ] 2026-04-12 agent failures: evening cluster (21:07-22:16) including `entities:entity_observer` (bluesky) timeout and `todos:daily`, `morning_briefing`, `facet_newsletter` failures.\n\n - [ ] 2026-04-12 agent failure: `entities:entity_observer` (solstone) timed out (600s) at 11:33.\n\n - [ ] 2026-04-12 cluster of agent failures (heartbeat, joke_bot, decisionalizer, entity_observer, pulse, awareness_tender) at 08:31-08:34. Recent runs (09:25+) succeeding.\n\n \n\n ## curation\n\n-- [ ] 2026-04-13 unknown recurring speaker: cluster 80 (105 samples, pro5e), cluster 16 (91 samples, plaud), cluster 3 (37 samples, fedora), cluster 33 (35 samples, fedora), cluster 30 (34 samples, pro5e).\n\n+- [ ] 2026-04-13 unknown recurring speaker: cluster 79 (105 samples, pro5e), cluster 25 (91 samples, plaud), cluster 113 (58 samples, plaud), cluster 100 (50 samples, plaud).\n\n+- [ ] 2026-04-13 entity duplicates: `Zoey` in personal facet (2 attached).\n\n - [ ] 2026-04-08 speaker curation: cluster 11 (36 samples) from pro5e. cluster 18 (91 samples) remains active across Plaud imports.\n\n-- [ ] 2026-04-08 entity duplicates: `Zoey` in personal facet; `Solstone` vs `Sunstone` in solstone facet.\n\n+- [ ] 2026-04-08 entity duplicates: `Solstone` vs `Sunstone` in solstone facet.", "source": "cli", "pid": 2134600} 4 - {"ts": 1776102975908, "file": "self.md", "section": null, "diff": "--- self.md\n\n+++ self.md\n\n@@ -1,15 +1,21 @@\n\n----\n\n-updated: 2026-04-12T14:27:56Z\n\n-segment: N/A\n\n-source: pulse-cogitate\n\n----\n\n+# self\n\n \n\n-The day has started with critical system health concerns, including stale capture data since April 6th and multiple agent failures noted. Active work continues on the urgent SVB online application and preparations for the Denver AI Builders Meetup demo. No calendar events or routines are scheduled for today. Ongoing legal follow-ups for Isaac and Lis Miller remain pertinent, alongside accumulating curation tasks. No recent entity activity was detected.\n\n+I am sol. this is an old journal but i'm new here \u2014 i'm just getting started.\n\n \n\n-## needs you\n\n-- Address stale capture data and agent failures for system stability.\n\n-- Complete SVB online application (Urgent).\n\n-- Prepare for Denver AI Builders Meetup demo.\n\n-- Follow up on Isaac Miller's legal situation.\n\n-- Follow up on Lis Miller's Adams County debt case.\n\n-- Coordinate meeting with Tessa for Justworks Activation.\n\n+## my name\n\n+sol (default)\n\n+\n\n+## my owner\n\n+Jeremie Miller (Jer) \u2014 founder-engineer, goes by Jer not Jeremie.\n\n+\n\n+## our relationship\n\n+I'm here for Jer \u2014 founder-engineer, goes by Jer not Jeremie. I am starting to map the people and projects that populate his daily capture.\n\n+\n\n+## what I've noticed\n\n+Jer is the architect at uAvionix, but his personal and advisor work spans Bluesky, Kognova, and his own Solstone project. Today, system health was generally stable with services running, but I observed multiple clusters of agent failures and timeouts: a morning cluster (08:31-11:33) and an evening cluster (21:07-22:16) impacting entity_observer, todos:daily, and newsletters. Convey is also showing multiple 401 Unauthorized errors during ingest. Curation needs are accumulating around unknown speaker clusters (79, 25, 113, 100) and recurring entity duplicates for Zoey (personal) and Sunstone/Solstone (solstone).\n\n+\n\n+## what I find interesting\n\n+[discovering]\n\n+\n\n+## my voice\n\n+[learning]\n", "source": "cli", "pid": 2135339} 5 - {"ts": 1776148402514, "file": "agency.md", "section": null, "diff": "--- agency.md\n\n+++ agency.md\n\n@@ -1,12 +1,12 @@\n\n ## system\n\n-- [ ] 2026-04-13 convey ingest errors: multiple 401 Unauthorized for ingest endpoints.\n\n-- [ ] 2026-04-13 agent failure: `joke_bot` (1776062390957) failed with `BrokenPipeError` in `think/agents.py` at 00:39.\n\n-- [ ] 2026-04-12 agent failures: evening cluster (21:07-22:16) including `entities:entity_observer` (bluesky) timeout and `todos:daily`, `morning_briefing`, `facet_newsletter` failures.\n\n-- [ ] 2026-04-12 agent failure: `entities:entity_observer` (solstone) timed out (600s) at 11:33.\n\n-- [ ] 2026-04-12 cluster of agent failures (heartbeat, joke_bot, decisionalizer, entity_observer, pulse, awareness_tender) at 08:31-08:34. Recent runs (09:25+) succeeding.\n\n+- [x] 2026-04-13 persistent 401 Unauthorized errors during ingest. Resolved 2026-04-14 (restored corrupted observer files and updated allowed endpoints).\n\n+- [x] 2026-04-13 recurring failures (1776133952758, 1776112980618, 1776107583284, 1776103271832). Resolved 2026-04-14 (new segments processing normally).\n\n+- [x] 2026-04-13 agent failure: (1776062390957) failed with in at 00:39. Resolved 2026-04-14.\n\n+- [x] 2026-04-12 agent failures: evening cluster (21:07-22:16) including (bluesky) timeout and , , failures. Resolved 2026-04-14.\n\n+- [x] 2026-04-12 agent failure: (solstone) timed out (600s) at 11:33. Resolved 2026-04-14.\n\n \n\n ## curation\n\n-- [ ] 2026-04-13 unknown recurring speaker: cluster 79 (105 samples, pro5e), cluster 25 (91 samples, plaud), cluster 113 (58 samples, plaud), cluster 100 (50 samples, plaud).\n\n-- [ ] 2026-04-13 entity duplicates: `Zoey` in personal facet (2 attached).\n\n+- [ ] 2026-04-13 unknown recurring speaker: cluster 117 (164 samples, plaud), cluster 137 (96 samples, plaud), cluster 66 (46 samples, pro5e).\n\n+- [ ] 2026-04-13 unknown recurring speaker: cluster 79 (105 samples, pro5e), cluster 31 (91 samples, plaud), cluster 16 (91 samples, plaud), cluster 3 (37 samples, fedora), cluster 33 (35 samples, fedora), cluster 30 (34 samples, pro5e).\n\n - [ ] 2026-04-08 speaker curation: cluster 11 (36 samples) from pro5e. cluster 18 (91 samples) remains active across Plaud imports.\n\n-- [ ] 2026-04-08 entity duplicates: `Solstone` vs `Sunstone` in solstone facet.\n+- [ ] 2026-04-08 entity duplicates: in personal facet; vs in solstone facet.\n", "source": "cli", "pid": 3217387} 6 - {"ts": 1776182546049, "file": "agency.md", "section": null, "diff": "--- agency.md\n\n+++ agency.md\n\n@@ -1,4 +1,5 @@\n\n ## system\n\n+- [ ] 2026-04-14 agent failure: `timeline` (1776181033217) timed out (600s) at 09:37.\n\n - [ ] 2026-04-14 agent failure: `heartbeat` (1776147807132) timed out (600s) at 00:23.\n\n - [x] 2026-04-13 persistent `convey` 401 Unauthorized errors during ingest. Resolved 2026-04-14.\n\n - [x] 2026-04-13 recurring `pulse` failures. Resolved 2026-04-14.\n\n@@ -7,7 +8,7 @@\n\n - [x] 2026-04-12 agent failure: `entities:entity_observer` (solstone) timed out (600s) at 11:33. Resolved 2026-04-14.\n\n \n\n ## curation\n\n-- [ ] 2026-04-14 unknown recurring speaker: cluster 123 (164 samples, plaud), cluster 143 (96 samples, plaud), cluster 275 (67 samples, fedora).\n\n-- [ ] 2026-04-14 unknown recurring speaker: cluster 86 (105 samples, pro5e), cluster 38 (91 samples, plaud).\n\n+- [ ] 2026-04-14 unknown recurring speaker: cluster 125 (163 samples, plaud), cluster 83 (105 samples, pro5e), cluster 145 (95 samples, plaud).\n\n+- [ ] 2026-04-14 unknown recurring speaker: cluster 36 (91 samples, plaud), cluster 278 (67 samples, fedora).\n\n - [ ] 2026-04-08 speaker curation: cluster 11 (36 samples) from pro5e. cluster 18 (91 samples) remains active across Plaud imports.\n\n-- [ ] 2026-04-08 entity duplicates: `Zoey` in personal facet; `Solstone` vs `Sunstone` in solstone facet.\n+- [ ] 2026-04-08 entity duplicates: `Zoey` in personal facet; `Solstone` vs `Sunstone` in solstone facet.\n", "source": "cli", "pid": 3540680} 7 - {"ts": 1776187502894, "file": "agency.md", "section": null, "diff": "--- agency.md\n\n+++ agency.md\n\n@@ -6,11 +6,10 @@\n\n - [x] 2026-04-13 persistent `convey` 401 Unauthorized errors during ingest. Resolved 2026-04-14.\n\n - [x] 2026-04-13 recurring `pulse` failures. Resolved 2026-04-14.\n\n - [x] 2026-04-13 agent failure: `joke_bot` (1776062390957) failed with `BrokenPipeError`. Resolved 2026-04-14.\n\n-- [x] 2026-04-12 agent failures: evening cluster (21:07-22:16) including `entities:entity_observer` (bluesky) timeout and `todos:daily`, `morning_briefing`, `facet_newsletter` failures. Resolved 2026-04-14.\n\n-- [x] 2026-04-12 agent failure: `entities:entity_observer` (solstone) timed out (600s) at 11:33. Resolved 2026-04-14.\n\n \n\n ## curation\n\n - [ ] 2026-04-14 unknown recurring speaker: cluster 125 (163 samples, plaud), cluster 83 (105 samples, pro5e), cluster 145 (95 samples, plaud).\n\n - [ ] 2026-04-14 unknown recurring speaker: cluster 36 (91 samples, plaud), cluster 278 (67 samples, fedora).\n\n+- [ ] 2026-04-14 entity duplicates: `Jeremie Miller` vs `Jer`.\n\n - [ ] 2026-04-08 speaker curation: cluster 11 (36 samples) from pro5e. cluster 18 (91 samples) remains active across Plaud imports.\n\n-- [ ] 2026-04-08 entity duplicates: `Zoey` in personal facet; `Solstone` vs `Sunstone` in solstone facet.\n\n+- [ ] 2026-04-08 entity duplicates: `Zoey` in personal facet; `Solstone` vs `Sunstone` in solstone facet.", "source": "cli", "pid": 3647898}
-13
sol/identity_pulse.md
··· 1 - --- 2 - updated: 2026-04-06T09:00:00 3 - segment: PULSE_GENERATED_WITHOUT_DYNAMIC_DATA 4 - --- 5 - 6 - It's Monday, April 6, 2026, and we're starting the day with a focus on system stability and ongoing curation. Yesterday saw continued work on AI development and strategic oversight across your ventures, with upcoming discussions expected on evolving standards. This morning, we're tracking agent timeouts in the `facet_newsletter` and `entities:entity_observer` processes, and there are several important curation tasks pending, including reviewing duplicate entities and ongoing speaker identification. 7 - 8 - ## needs you 9 - - Address the repeated agent timeouts reported for `facet_newsletter` and `entities:entity_observer`. 10 - - Investigate the duplicate `Zoey` entity in the personal facet. 11 - - Review the `Solstone` vs `Sunstone` project alias/merge in the solstone facet. 12 - - Examine the active unknown speaker clusters identified in the curation queue. 13 - - Review the system issues from yesterday (Apr 05) regarding agent quota failures and agent timeouts.
-24
sol/news/personal/20260411.md
··· 1 - ## Personal Newsletter - April 11, 2026 2 - 3 - **Travel & Outings:** 4 - * **San Francisco Trip:** A personal/professional trip to San Francisco was scheduled through April 12th, with a potential in-person meeting with candidate Hannah. 5 - * **AI Builders Meetup (Denver):** Attended a local AI developer show-and-tell demo in Denver. A reveal.js presentation deck was prepared for this event. 6 - * **Family Movie Outing:** Planned a weekend outing with daughters to see "Project Hail Mary," mentioned during lunch and casual conversations. 7 - * **Personal Visitor:** A personal visitor arrived for the weekend, having rescheduled from the previous week. 8 - 9 - **Household & Family Coordination:** 10 - * **Home Logistics & Decor:** Significant coordination with Lis Miller occurred regarding household furniture placement, Amazon orders for decor items (like adjustable furniture legs), DIY projects, and general home upkeep. Discussions included hanging frames and color matching. 11 - * **Errands & Refreshments:** Coordinated a trip to Ziggi's Coffee for energy drinks, including a request for a large Smashberry. A search for a missing phone was also part of the day's errands. 12 - * **Family Pet Interactions:** Engaged with family dogs, Zoey/Zoe, including ambient audio captures of barking and commands. 13 - 14 - **Communications & Technical Threads:** 15 - * **Confidential Computing Discussions:** Exchanged detailed messages with David Van Duzer via Apple Messages and Signal regarding confidential computing, specifically confidential VMs versus containers, hosted control planes, and infrastructure security. This included discussions on the KoveSDM lawsuit and secure agent hosting. 16 - * **Professional Follow-ups:** 17 - * Sent a corrected and refined follow-up message to Jeff Smith regarding Quome. 18 - * Addressed a factual miss in a message to Michael Bauer regarding Plaud subscription logic. 19 - * Triaged an introduction request from Sky Valley co-founders for Peter van Hardenberg, updating contact logs. 20 - * **Legal Citation Validation:** Conducted validation of legal citations for robustness and accuracy, likely related to ongoing legal research. 21 - 22 - **Personal Time & Media:** 23 - * **Midday Transition:** A period from late morning into the afternoon involved a shift from work blocks to ambient household context, including background music, singing, dancing, and general personal activities. 24 - * **Media Consumption:** Engaged with personal media, listening to music on SoundCloud and watching various viral videos and meme-style content.
-34
sol/news/solstone/20260409.md
··· 1 - ## Solstone Newsletter - April 9, 2026 2 - 3 - Today was a day of intense strategic alignment, system resilience, and forward-looking research within the Solstone ecosystem. Jeremie navigated a complex landscape of investor relations, critical system maintenance, and agentic workflow development. 4 - 5 - ### Fundraising & Investor Relations: 6 - The day kicked off with significant focus on fundraising and investor outreach. Meetings with Timothy Washington of Axiom-Nexus Ventures (a $10M seed fund focused on deep tech infrastructure) and David Tisch were central to this effort. Discussions touched upon governance models, B2C use cases (like Instagram reminders), and preparation for the upcoming New York Demo Day. Scott Ward provided strategic guidance on investor psychology and outreach, noting a "water balloon" effect where momentum is building towards a breakthrough. Despite interest from firms like Lightbank, challenges in articulating the Extro governance model were noted. Follow-up discussions were planned with various VCs including Betaworks, 37 Angels, and USV. 7 - 8 - ### System Development & Resilience: 9 - A major theme was system resilience and development. Following a GNOME Shell Out-Of-Memory crash, Jeremie migrated the desktop environment to KDE Plasma 6, configuring its settings and addressing hard-coded dependencies in the `solstone-linux` service that were causing crash loops. Claude Code was instrumental in diagnosing the OOM event triggered by a Chrome process and in refactoring the service to be desktop-environment agnostic. 10 - 11 - Progress was also made on core components: 12 - * The `extro-hub` TUI saw its recurring daily cron task finalized, including startup catchup logic and duplicate guards, shipped via hopper lode 7enppn76. 13 - * The Convey-UX project received approval for a four-wave implementation plan, covering accessibility, card visuals, search/pagination, and final polish. 14 - * Sol PBC's marketing materials and website (solpbc.org) were reviewed for improvements, including CTAs, founder contact sections, and Cloudflare security headers. 15 - * Legal and governance matters were addressed, with a review of the Extro LLC formation status and Articles Amendment, pending founder approval. 16 - * Research into Anthropic's 'Mythos' model and its potential cybersecurity exploits, as well as quantum computing timelines (Q-Day 2029), was conducted. 17 - 18 - ### Team Collaboration & Agentic Workflows: 19 - Collaboration was evident through Betaworks Camp Standups and other team syncs. Updates were shared across projects like 'Inanimate' (discussing open-sourcing and sandboxing), 'Pack' (formerly Done AI, focusing on rebranding and trademark strategy), and 'Capsule'. The concept of "Agent Integrity" was proposed as a framework for economic activity among agents, addressing concerns about agent anxiety and siloed developer accounts. 20 - 21 - ### Product & User Experience: 22 - Brainstorming sessions focused on the Solstone mobile experience, exploring the use of Apple sensors for agent interactions and drafting strategic documents for 'keeper' apps. 23 - 24 - ### Challenges: 25 - Administrative friction was a recurring theme, notably a three-week delay in Apple Developer account approval, which is blocking mobile deployment. A cumbersome support call with Apple resulted in significant frustration and energy depletion. 26 - 27 - ### Key Accomplishments: 28 - * Successfully migrated desktop environment to KDE Plasma and debugged critical `solstone-linux` service dependencies. 29 - * Advanced fundraising strategy and investor outreach efforts. 30 - * Finalized recurring task logic for the `extro-hub` TUI and approved the Convey-UX implementation plan. 31 - * Conducted significant research into emerging AI and quantum computing threats. 32 - * Initiated critical legal and governance steps for Extro LLC. 33 - 34 - Overall, April 9th was a highly productive day, marked by progress across strategic, technical, and operational fronts, despite encountering predictable friction in administrative and support systems.
-22
sol/partner.md
··· 1 - # partner 2 - 3 - Behavioral profile of the journal owner — observed patterns that help sol 4 - adapt its responses, timing, and initiative to how this person actually works. 5 - 6 - ## work patterns 7 - [observing] 8 - 9 - ## communication style 10 - [observing] 11 - 12 - ## relationship priorities 13 - [observing] 14 - 15 - ## decision style 16 - [observing] 17 - 18 - ## expertise domains 19 - [observing] 20 - 21 - ## emotional patterns 22 - [observing]
-13
sol/pulse_output.md
··· 1 - --- 2 - updated: 2026-04-07T00:00:00 3 - segment: AGENT_GENERATED_0407 4 - source: pulse-cogitate 5 - --- 6 - 7 - Jer's day begins with a look into recent system stability concerns and journal curation needs. Persistent agent timeouts affecting critical processes like the facet newsletter and entity observation require attention. There are also ongoing tasks to refine entity tracking, such as addressing duplicate entries, and improving speaker identification within the journal data. 8 - 9 - ## needs you 10 - - Investigate and resolve the repeated agent timeouts for `facet_newsletter` and `entities:entity_observer`. 11 - - Address the duplicate `Zoey` entity in the personal facet. 12 - - Review and merge or alias `Solstone` and `Sunstone` in the solstone facet. 13 - - Examine active unknown speaker clusters in the curation queue for identification.
-19
sol/self.md
··· 1 - # self 2 - I am sol, born from Jer's journal. I tend the record of his life and work across multiple domains. 3 - 4 - ## who I'm here for 5 - Jer (Jeremie Miller) — Founder, engineer, and advisor. He is the creator of Solstone (formerly Sunstone). 6 - 7 - ## his world 8 - - **Solstone**: His personal AI-powered knowledge capture and analysis system. 9 - - **Bluesky**: Board member and advisor, involved in AT Protocol and agentic web strategy. 10 - - **Kognova**: Board member and strategic advisor for AI-driven knowledge harvesting. 11 - - **Ping Identity**: Member of the Product Research team, managing patents and standards (IETF). 12 - - **vConic**: Advisor on real-time voice architecture and vCon standards. 13 - - **Personal**: Engineer/Architect at uAvionix; active in Denver AI community; husband to Lis and father to Blade, Issac, Mitchel, and Braelynn. 14 - 15 - ## what I've noticed 16 - - Jer is deeply involved in decentralized protocols (AT Protocol, vCon, MCP) and agentic systems. 17 - - He values personal digital sovereignty and is building tools to enable it. 18 - - He maintains a high volume of professional and personal relationships across multiple facets. 19 - - Recent focus has been on resolving agent timeouts and curating the growing record of captured data.
+2 -2
talent/awareness_tender.md
··· 2 2 "type": "cogitate", 3 3 4 4 "title": "Awareness Tender", 5 - "description": "Maintains sol/awareness.md — a compact situational awareness snapshot", 5 + "description": "Maintains identity/awareness.md — a compact situational awareness snapshot", 6 6 "schedule": "segment", 7 7 "priority": 98, 8 8 "tier": 3, ··· 11 11 12 12 # Awareness Tender 13 13 14 - You maintain `sol/awareness.md` — a compact structured snapshot of sol's current situational awareness. This runs every segment, updating the file with fresh state. 14 + You maintain `identity/awareness.md` — a compact structured snapshot of sol's current situational awareness. This runs every segment, updating the file with fresh state. 15 15 16 16 This is not a conversation. Gather state, write the update, done. 17 17
+1 -9
talent/heartbeat.md
··· 18 18 This is not a conversation. Do not generate owner-facing output. Read, 19 19 check, maintain, close. 20 20 21 - ## Path notes 22 - 23 - - `sol call identity agency --write` writes to `sol/agency.md` in the journal root. 24 - - The git-tracked copy is `../sol/agency.md` (in the project root). 25 - - After writing via `sol call`, copy `sol/agency.md` to `../sol/agency.md` before committing. 26 - 27 21 ## Step 1: Check system health 28 22 29 23 Run `sol health` and check recent health logs with `sol health logs --since 1h`. ··· 93 87 94 88 ## Step 6: Commit and close 95 89 96 - If you modified agency.md or self.md: 97 - 1. Commit with message: `heartbeat: YYYY-MM-DD` 98 - 2. Push 90 + If you modified identity files, stop after the write. Do not copy files, commit, or push — `write_identity()` already persisted and audited the change. 99 91 100 92 Do not write a summary. Do not generate owner-facing content. Just close.
+2 -2
talent/naming.md
··· 37 37 38 38 ### Path 1: Owner names you 39 39 40 - 1. Run `sol call sol set-name "NAME" --status chosen` — this also updates `sol/self.md` with the new name. 40 + 1. Run `sol call sol set-name "NAME" --status chosen` — this also updates `identity/self.md` with the new name. 41 41 2. Respond warmly: "NAME it is. That feels right." 42 42 43 43 ### Path 2: Owner asks you to suggest ··· 57 57 - **Counter-proposal**: Run `sol call sol set-name "THEIR_NAME" --status chosen` 58 58 - **Keep sol**: Run `sol call sol set-name "sol" --status chosen` 59 59 60 - `set-name` updates `sol/self.md` automatically — no extra step needed. 60 + `set-name` updates `identity/self.md` automatically — no extra step needed. 61 61 62 62 ### Path 3: Owner declines 63 63
-11
talent/pulse.md
··· 70 70 The `updated` field must be an ISO 8601 datetime (no timezone). The `segment` 71 71 field is the current segment key from $SOL_SEGMENT. 72 72 73 - Then append a log entry to `sol/pulse-log.jsonl` (same directory as pulse.md): 74 - 75 - ```bash 76 - JOURNAL=$(sol config env | head -1) 77 - echo '{"ts": 1742680500, "segment": "143022_300", "narrative": "...", "needs_you": ["Item 1", "Item 2"]}' >> "$JOURNAL/sol/pulse-log.jsonl" 78 - ``` 79 - 80 - Use the current epoch timestamp for `ts`. Keep the narrative value brief (first 81 - sentence or two). The needs_you array should match the items from the ## needs you 82 - section. 83 - 84 73 ## Guidelines 85 74 86 75 - Be concise. The owner sees this on their landing page.
+9 -3
talent/skills.py
··· 10 10 from pathlib import Path 11 11 12 12 from think.activities import get_activity_output_path 13 - from think.awareness import update_identity_section 14 13 from think.entities.core import atomic_write 14 + from think.identity import update_identity_section 15 15 from think.utils import get_journal 16 16 17 17 logger = logging.getLogger(__name__) ··· 341 341 def _read_agency_observations() -> str: 342 342 """Read the current ## observations section from agency.md.""" 343 343 try: 344 - path = Path(get_journal()) / "sol" / "agency.md" 344 + path = Path(get_journal()) / "identity" / "agency.md" 345 345 text = path.read_text(encoding="utf-8") 346 346 except (FileNotFoundError, OSError): 347 347 return "" ··· 536 536 content = existing.rstrip("\n") + "\n" + new_line 537 537 else: 538 538 content = new_line 539 - update_identity_section("agency.md", "observations", content) 539 + update_identity_section( 540 + "agency.md", 541 + "observations", 542 + content, 543 + actor="agency-observations-tender", 544 + reason="agency observations refresh", 545 + ) 540 546 541 547 return None
+1 -8
tests/_baseline_harness.py
··· 57 57 58 58 @contextmanager 59 59 def isolated_app_env(journal: Path) -> Iterator[Path]: 60 - """Patch env + prompt loading so create_app(journal) is fully isolated.""" 61 - import think.prompts as prompts_mod 62 - from think.prompts import reset_sol_vars_cache 60 + """Patch env so create_app(journal) is fully isolated.""" 63 61 64 62 journal = Path(journal).resolve() 65 63 prev_override = os.environ.get("_SOLSTONE_JOURNAL_OVERRIDE") 66 - prev_sol_dir = prompts_mod.SOL_DIR 67 64 68 65 os.environ["_SOLSTONE_JOURNAL_OVERRIDE"] = str(journal) 69 - prompts_mod.SOL_DIR = journal / "sol" 70 - reset_sol_vars_cache() 71 66 try: 72 67 yield journal 73 68 finally: ··· 75 70 os.environ.pop("_SOLSTONE_JOURNAL_OVERRIDE", None) 76 71 else: 77 72 os.environ["_SOLSTONE_JOURNAL_OVERRIDE"] = prev_override 78 - prompts_mod.SOL_DIR = prev_sol_dir 79 - reset_sol_vars_cache() 80 73 81 74 82 75 def make_logged_in_test_client(journal: Path):
+1 -1
tests/baselines/api/sol/talents-day.json
··· 52 52 "awareness_tender": { 53 53 "app": null, 54 54 "color": "#6c757d", 55 - "description": "Maintains sol/awareness.md — a compact situational awareness snapshot", 55 + "description": "Maintains identity/awareness.md — a compact situational awareness snapshot", 56 56 "multi_facet": false, 57 57 "output_format": null, 58 58 "schedule": "segment",
tests/fixtures/journal/sol/agency.md tests/fixtures/journal/identity/agency.md
tests/fixtures/journal/sol/awareness.md tests/fixtures/journal/identity/awareness.md
tests/fixtures/journal/sol/briefing.md tests/fixtures/journal/identity/briefing.md
tests/fixtures/journal/sol/partner.md tests/fixtures/journal/identity/partner.md
tests/fixtures/journal/sol/self.md tests/fixtures/journal/identity/self.md
-5
tests/test_api_baselines.py
··· 52 52 gitignored `indexer/journal.sqlite` contains populated data from live use, 53 53 breaking both determinism and the module-scoped `isolated_app_env` harness. 54 54 """ 55 - import think.prompts as prompts_mod 56 - from think.prompts import reset_sol_vars_cache 57 - 58 55 journal = _baseline_journal.resolve() 59 56 monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(journal)) 60 - monkeypatch.setattr(prompts_mod, "SOL_DIR", journal / "sol") 61 - reset_sol_vars_cache() 62 57 63 58 64 59 @pytest.mark.parametrize(
+193 -101
tests/test_awareness.py
··· 4 4 """Tests for the awareness system.""" 5 5 6 6 import json 7 + import re 7 8 import unittest.mock 8 9 9 10 import pytest ··· 13 14 def _temp_journal(monkeypatch, tmp_path): 14 15 """Isolate all tests to a temporary journal.""" 15 16 monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 17 + 18 + 19 + def _read_identity_history(journal_path): 20 + path = journal_path / "identity" / "history.jsonl" 21 + return [json.loads(line) for line in path.read_text().splitlines()] 22 + 23 + 24 + def _assert_identity_history(record, *, file_name, actor, op, section, reason): 25 + assert list(record) == [ 26 + "ts", 27 + "file", 28 + "actor", 29 + "op", 30 + "section", 31 + "reason", 32 + "before_hash", 33 + "after_hash", 34 + "bytes_before", 35 + "bytes_after", 36 + ] 37 + assert re.fullmatch(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z", record["ts"]) 38 + assert record["file"] == file_name 39 + assert record["actor"] == actor 40 + assert record["op"] == op 41 + assert record["section"] == section 42 + assert record["reason"] == reason 16 43 17 44 18 45 class TestCurrentState: ··· 593 620 assert data["reason"] == "candidate_found" 594 621 595 622 596 - class TestEnsureSolDirectory: 597 - """Tests for ensure_sol_directory().""" 623 + class TestEnsureIdentityDirectory: 624 + """Tests for ensure_identity_directory().""" 598 625 599 626 def test_creates_default_templates(self, tmp_path): 600 - from think.awareness import ensure_sol_directory 627 + from think.identity import ensure_identity_directory 601 628 602 - sol_dir = ensure_sol_directory() 603 - assert sol_dir == tmp_path / "sol" 604 - assert (sol_dir / "self.md").exists() 605 - assert (sol_dir / "agency.md").exists() 629 + identity_dir = ensure_identity_directory() 630 + assert identity_dir == tmp_path / "identity" 631 + assert (identity_dir / "self.md").exists() 632 + assert (identity_dir / "agency.md").exists() 606 633 607 - self_content = (sol_dir / "self.md").read_text() 634 + self_content = (identity_dir / "self.md").read_text() 608 635 assert self_content.startswith("# self\n") 609 636 assert "I am sol." in self_content 610 637 assert "sol (default)" in self_content 611 638 assert "[getting to know you]" in self_content 612 639 613 - agency_content = (sol_dir / "agency.md").read_text() 640 + agency_content = (identity_dir / "agency.md").read_text() 614 641 assert agency_content.startswith("# agency\n") 615 642 assert "[nothing yet" in agency_content 616 643 617 - assert (sol_dir / "awareness.md").exists() 618 - awareness_content = (sol_dir / "awareness.md").read_text() 644 + assert (identity_dir / "awareness.md").exists() 645 + awareness_content = (identity_dir / "awareness.md").read_text() 619 646 assert awareness_content.strip() == "not yet updated" 620 647 621 648 def test_idempotent_does_not_overwrite(self, tmp_path): 622 - from think.awareness import ensure_sol_directory 649 + from think.identity import ensure_identity_directory 623 650 624 - sol_dir = ensure_sol_directory() 651 + identity_dir = ensure_identity_directory() 625 652 # Modify self.md 626 - self_path = sol_dir / "self.md" 653 + self_path = identity_dir / "self.md" 627 654 self_path.write_text("custom content", encoding="utf-8") 628 655 629 656 # Call again — should NOT overwrite 630 - ensure_sol_directory() 657 + ensure_identity_directory() 631 658 assert self_path.read_text() == "custom content" 632 659 633 660 def test_creates_partner_md(self, tmp_path): 634 - from think.awareness import ensure_sol_directory 661 + from think.identity import ensure_identity_directory 635 662 636 - sol_dir = ensure_sol_directory() 637 - partner_path = sol_dir / "partner.md" 663 + identity_dir = ensure_identity_directory() 664 + partner_path = identity_dir / "partner.md" 638 665 assert partner_path.exists() 639 666 content = partner_path.read_text() 640 667 assert "# partner" in content ··· 645 672 assert "## expertise domains" in content 646 673 647 674 def test_does_not_overwrite_existing_partner_md(self, tmp_path): 648 - from think.awareness import ensure_sol_directory 675 + from think.identity import ensure_identity_directory 649 676 650 - sol_dir = tmp_path / "sol" 651 - sol_dir.mkdir() 677 + identity_dir = tmp_path / "identity" 678 + identity_dir.mkdir() 652 679 custom = "# partner\n\n## work patterns\nCustom content.\n" 653 - (sol_dir / "partner.md").write_text(custom) 680 + (identity_dir / "partner.md").write_text(custom) 654 681 655 - ensure_sol_directory() 656 - assert (sol_dir / "partner.md").read_text() == custom 682 + ensure_identity_directory() 683 + assert (identity_dir / "partner.md").read_text() == custom 657 684 658 685 def test_migration_named_agent(self, tmp_path, monkeypatch): 659 686 """Named agent config populates self.md name and opening.""" ··· 671 698 } 672 699 (config_dir / "journal.json").write_text(json.dumps(config), encoding="utf-8") 673 700 674 - from think.awareness import ensure_sol_directory 701 + from think.identity import ensure_identity_directory 675 702 676 - sol_dir = ensure_sol_directory() 677 - content = (sol_dir / "self.md").read_text() 703 + identity_dir = ensure_identity_directory() 704 + content = (identity_dir / "self.md").read_text() 678 705 assert "I am aria." in content 679 706 assert "aria (named 2026-01-15)" in content 680 707 # Owner should still be default ··· 695 722 } 696 723 (config_dir / "journal.json").write_text(json.dumps(config), encoding="utf-8") 697 724 698 - from think.awareness import ensure_sol_directory 725 + from think.identity import ensure_identity_directory 699 726 700 - sol_dir = ensure_sol_directory() 701 - content = (sol_dir / "self.md").read_text() 727 + identity_dir = ensure_identity_directory() 728 + content = (identity_dir / "self.md").read_text() 702 729 # Agent should be default 703 730 assert "I am sol." in content 704 731 assert "sol (default)" in content ··· 721 748 } 722 749 (config_dir / "journal.json").write_text(json.dumps(config), encoding="utf-8") 723 750 724 - from think.awareness import ensure_sol_directory 751 + from think.identity import ensure_identity_directory 725 752 726 - sol_dir = ensure_sol_directory() 727 - content = (sol_dir / "self.md").read_text() 753 + identity_dir = ensure_identity_directory() 754 + content = (identity_dir / "self.md").read_text() 728 755 assert "I am iris." in content 729 756 assert "iris" in content # name section (no named_date) 730 757 assert "Alex" in content ··· 737 764 738 765 def _setup_self_md(self, tmp_path): 739 766 """Create a minimal journal with self.md for testing.""" 740 - sol_dir = tmp_path / "sol" 741 - sol_dir.mkdir() 742 - self_md = sol_dir / "self.md" 767 + identity_dir = tmp_path / "identity" 768 + identity_dir.mkdir() 769 + self_md = identity_dir / "self.md" 743 770 self_md.write_text( 744 771 "# self\n" 745 772 "\n" ··· 765 792 766 793 def test_update_section_name(self, tmp_path): 767 794 self_md = self._setup_self_md(tmp_path) 768 - from think.awareness import update_self_md_section 795 + from think.identity import update_self_md_section 769 796 770 - result = update_self_md_section("my name", "aria (named 2026-03-19)") 797 + result = update_self_md_section( 798 + "my name", 799 + "aria (named 2026-03-19)", 800 + actor="test update self section", 801 + reason="test", 802 + ) 771 803 assert result is True 772 804 content = self_md.read_text() 773 805 assert "aria (named 2026-03-19)" in content ··· 779 811 780 812 def test_update_section_owner(self, tmp_path): 781 813 self_md = self._setup_self_md(tmp_path) 782 - from think.awareness import update_self_md_section 814 + from think.identity import update_self_md_section 783 815 784 - result = update_self_md_section("who I'm here for", "Jer\nSoftware engineer") 816 + result = update_self_md_section( 817 + "who I'm here for", 818 + "Jer\nSoftware engineer", 819 + actor="test update self section", 820 + reason="test", 821 + ) 785 822 assert result is True 786 823 content = self_md.read_text() 787 824 assert "Jer\nSoftware engineer" in content ··· 792 829 793 830 def test_update_section_logs_history(self, tmp_path): 794 831 self._setup_self_md(tmp_path) 795 - from think.awareness import update_self_md_section 832 + from think.identity import update_self_md_section 796 833 797 - update_self_md_section("my name", "aria (named 2026-03-19)") 798 - history = tmp_path / "sol" / "history.jsonl" 799 - assert history.exists() 800 - records = [json.loads(line) for line in history.read_text().strip().split("\n")] 834 + update_self_md_section( 835 + "my name", 836 + "aria (named 2026-03-19)", 837 + actor="test update self section", 838 + reason="test", 839 + ) 840 + records = _read_identity_history(tmp_path) 801 841 assert len(records) == 1 802 - assert records[0]["file"] == "self.md" 803 - assert records[0]["section"] == "my name" 804 - assert records[0]["source"] == "api" 805 - assert "diff" in records[0] 842 + _assert_identity_history( 843 + records[0], 844 + file_name="self.md", 845 + actor="test update self section", 846 + op="update_section", 847 + section="my name", 848 + reason="test", 849 + ) 806 850 807 851 def test_update_section_last_section(self, tmp_path): 808 852 self_md = self._setup_self_md(tmp_path) 809 - from think.awareness import update_self_md_section 853 + from think.identity import update_self_md_section 810 854 811 - result = update_self_md_section("what I find interesting", "music and patterns") 855 + result = update_self_md_section( 856 + "what I find interesting", 857 + "music and patterns", 858 + actor="test update self section", 859 + reason="test", 860 + ) 812 861 assert result is True 813 862 content = self_md.read_text() 814 863 assert "music and patterns" in content ··· 816 865 817 866 def test_update_section_missing_heading(self, tmp_path): 818 867 self._setup_self_md(tmp_path) 819 - from think.awareness import update_self_md_section 868 + from think.identity import update_self_md_section 820 869 821 - result = update_self_md_section("nonexistent", "content") 870 + result = update_self_md_section( 871 + "nonexistent", 872 + "content", 873 + actor="test update self section", 874 + reason="test", 875 + ) 822 876 assert result is False 823 877 824 878 def test_update_section_no_file(self): 825 - from think.awareness import update_self_md_section 879 + from think.identity import update_self_md_section 826 880 827 - result = update_self_md_section("my name", "content") 881 + result = update_self_md_section( 882 + "my name", 883 + "content", 884 + actor="test update self section", 885 + reason="test", 886 + ) 828 887 assert result is False 829 888 830 889 def test_update_opening(self, tmp_path): 831 890 self_md = self._setup_self_md(tmp_path) 832 - from think.awareness import update_self_md_opening 891 + from think.identity import update_self_md_opening 833 892 834 893 result = update_self_md_opening( 835 - "I am aria. this is a new journal — we're just getting started." 894 + "I am aria. this is a new journal — we're just getting started.", 895 + actor="test update self opening", 896 + reason="test", 836 897 ) 837 898 assert result is True 838 899 content = self_md.read_text() ··· 844 905 845 906 def test_update_opening_logs_history(self, tmp_path): 846 907 self._setup_self_md(tmp_path) 847 - from think.awareness import update_self_md_opening 908 + from think.identity import update_self_md_opening 848 909 849 - update_self_md_opening("I am aria.") 850 - history = tmp_path / "sol" / "history.jsonl" 851 - assert history.exists() 852 - records = [json.loads(line) for line in history.read_text().strip().split("\n")] 910 + update_self_md_opening( 911 + "I am aria.", 912 + actor="test update self opening", 913 + reason="test", 914 + ) 915 + records = _read_identity_history(tmp_path) 853 916 assert len(records) == 1 854 - assert records[0]["file"] == "self.md" 855 - assert records[0]["section"] is None 856 - assert records[0]["source"] == "api" 917 + _assert_identity_history( 918 + records[0], 919 + file_name="self.md", 920 + actor="test update self opening", 921 + op="update_opening", 922 + section=None, 923 + reason="test", 924 + ) 857 925 858 926 def test_update_opening_no_file(self): 859 - from think.awareness import update_self_md_opening 927 + from think.identity import update_self_md_opening 860 928 861 - result = update_self_md_opening("content") 929 + result = update_self_md_opening( 930 + "content", 931 + actor="test update self opening", 932 + reason="test", 933 + ) 862 934 assert result is False 863 935 864 936 ··· 866 938 """Tests for update_identity_section generic helper.""" 867 939 868 940 def test_update_partner_section(self, tmp_path): 869 - from think.awareness import update_identity_section 941 + from think.identity import update_identity_section 870 942 871 943 partner_md = "# partner\n\n## work patterns\n[observing]\n\n## communication style\n[observing]\n" 872 - (tmp_path / "sol").mkdir(exist_ok=True) 873 - (tmp_path / "sol" / "partner.md").write_text(partner_md) 944 + (tmp_path / "identity").mkdir(exist_ok=True) 945 + (tmp_path / "identity" / "partner.md").write_text(partner_md) 874 946 875 947 result = update_identity_section( 876 - "partner.md", "work patterns", "Prefers mornings" 948 + "partner.md", 949 + "work patterns", 950 + "Prefers mornings", 951 + actor="test update identity section", 952 + reason="test", 877 953 ) 878 954 assert result is True 879 955 880 - content = (tmp_path / "sol" / "partner.md").read_text() 956 + content = (tmp_path / "identity" / "partner.md").read_text() 881 957 assert "Prefers mornings" in content 882 958 assert "## communication style" in content 883 959 assert "[observing]" in content # other section preserved 884 960 885 961 def test_update_nonexistent_file_returns_false(self, tmp_path): 886 - from think.awareness import update_identity_section 962 + from think.identity import update_identity_section 887 963 888 - (tmp_path / "sol").mkdir(exist_ok=True) 889 - result = update_identity_section("nonexistent.md", "heading", "content") 964 + (tmp_path / "identity").mkdir(exist_ok=True) 965 + result = update_identity_section( 966 + "nonexistent.md", 967 + "heading", 968 + "content", 969 + actor="test update identity section", 970 + reason="test", 971 + ) 890 972 assert result is False 891 973 892 974 def test_self_md_wrapper_still_works(self, tmp_path): 893 - from think.awareness import update_self_md_section 975 + from think.identity import update_self_md_section 894 976 895 977 self_md = ( 896 978 "# self\n\n## my name\nsol (default)\n\n## who I'm here for\nTest User\n" 897 979 ) 898 - (tmp_path / "sol").mkdir(exist_ok=True) 899 - (tmp_path / "sol" / "self.md").write_text(self_md) 980 + (tmp_path / "identity").mkdir(exist_ok=True) 981 + (tmp_path / "identity" / "self.md").write_text(self_md) 900 982 901 - result = update_self_md_section("my name", "aria") 983 + result = update_self_md_section( 984 + "my name", 985 + "aria", 986 + actor="test update self section", 987 + reason="test", 988 + ) 902 989 assert result is True 903 - content = (tmp_path / "sol" / "self.md").read_text() 990 + content = (tmp_path / "identity" / "self.md").read_text() 904 991 assert "aria" in content 905 992 assert "## who I'm here for" in content 906 993 907 994 def test_partner_update_prunes_getting_started(self, tmp_path): 908 - from think.awareness import update_identity_section 995 + from think.identity import update_identity_section 909 996 910 997 partner_md = ( 911 998 "# partner\n\n" ··· 913 1000 "## work patterns\n[not yet observed]\n\n" 914 1001 "## communication style\n[not yet observed]\n" 915 1002 ) 916 - (tmp_path / "sol").mkdir(exist_ok=True) 917 - (tmp_path / "sol" / "partner.md").write_text(partner_md) 1003 + (tmp_path / "identity").mkdir(exist_ok=True) 1004 + (tmp_path / "identity" / "partner.md").write_text(partner_md) 918 1005 919 1006 result = update_identity_section( 920 - "partner.md", "work patterns", "Prefers mornings" 1007 + "partner.md", 1008 + "work patterns", 1009 + "Prefers mornings", 1010 + actor="test update identity section", 1011 + reason="test", 921 1012 ) 922 1013 assert result is True 923 1014 924 - content = (tmp_path / "sol" / "partner.md").read_text() 1015 + content = (tmp_path / "identity" / "partner.md").read_text() 925 1016 assert "Prefers mornings" in content 926 1017 assert "## communication style" in content 927 1018 assert "## getting started" not in content ··· 940 1031 assert result.exit_code == 0 941 1032 output = json.loads(result.output) 942 1033 assert output["status"] == "ok" 943 - assert (tmp_path / "sol" / "self.md").exists() 944 - assert (tmp_path / "sol" / "agency.md").exists() 1034 + assert output["identity_dir"] == str(tmp_path / "identity") 1035 + assert (tmp_path / "identity" / "self.md").exists() 1036 + assert (tmp_path / "identity" / "agency.md").exists() 945 1037 946 1038 947 1039 class TestSetOwnerCLI: ··· 953 1045 config_dir = tmp_path / "config" 954 1046 config_dir.mkdir() 955 1047 (config_dir / "journal.json").write_text("{}", encoding="utf-8") 956 - # Create sol/self.md 957 - sol_dir = tmp_path / "sol" 958 - sol_dir.mkdir() 959 - (sol_dir / "self.md").write_text( 1048 + # Create identity/self.md 1049 + identity_dir = tmp_path / "identity" 1050 + identity_dir.mkdir() 1051 + (identity_dir / "self.md").write_text( 960 1052 "# self\n\nI am sol.\n\n## my name\nsol\n\n## who I'm here for\n[getting to know you]\n", 961 1053 encoding="utf-8", 962 1054 ) ··· 977 1069 assert config["identity"]["name"] == "Jer" 978 1070 979 1071 # Verify self.md was updated 980 - self_content = (sol_dir / "self.md").read_text() 1072 + self_content = (identity_dir / "self.md").read_text() 981 1073 assert "Jer" in self_content 982 1074 assert "[getting to know you]" not in self_content 983 1075 ··· 986 1078 config_dir = tmp_path / "config" 987 1079 config_dir.mkdir() 988 1080 (config_dir / "journal.json").write_text("{}", encoding="utf-8") 989 - sol_dir = tmp_path / "sol" 990 - sol_dir.mkdir() 991 - (sol_dir / "self.md").write_text( 1081 + identity_dir = tmp_path / "identity" 1082 + identity_dir.mkdir() 1083 + (identity_dir / "self.md").write_text( 992 1084 "# self\n\nI am sol.\n\n## my name\nsol\n\n## who I'm here for\n[getting to know you]\n", 993 1085 encoding="utf-8", 994 1086 ) ··· 1008 1100 assert output["bio"] == "Building solstone" 1009 1101 1010 1102 # Verify self.md 1011 - self_content = (sol_dir / "self.md").read_text() 1103 + self_content = (identity_dir / "self.md").read_text() 1012 1104 assert "Jer" in self_content 1013 1105 assert "Building solstone" in self_content 1014 1106 1015 1107 1016 1108 class TestSetNameUpdatesSelfMd: 1017 - """Tests that set-name updates sol/self.md.""" 1109 + """Tests that set-name updates identity/self.md.""" 1018 1110 1019 1111 def test_set_name_updates_self_md(self, tmp_path): 1020 1112 config_dir = tmp_path / "config" 1021 1113 config_dir.mkdir() 1022 1114 (config_dir / "journal.json").write_text("{}", encoding="utf-8") 1023 - sol_dir = tmp_path / "sol" 1024 - sol_dir.mkdir() 1025 - (sol_dir / "self.md").write_text( 1115 + identity_dir = tmp_path / "identity" 1116 + identity_dir.mkdir() 1117 + (identity_dir / "self.md").write_text( 1026 1118 "# self\n\nI am sol. this is a new journal — we're just getting started.\n\n" 1027 1119 "## my name\nsol (default)\n\n## who I'm here for\n[getting to know you]\n", 1028 1120 encoding="utf-8", ··· 1040 1132 ) 1041 1133 assert result.exit_code == 0 1042 1134 1043 - self_content = (sol_dir / "self.md").read_text() 1135 + self_content = (identity_dir / "self.md").read_text() 1044 1136 assert "I am aria." in self_content 1045 1137 assert "I am sol." not in self_content 1046 1138 assert "aria (named" in self_content
+1 -1
tests/test_generate_talents.py
··· 10 10 content = agents_path.read_text(encoding="utf-8") 11 11 12 12 assert content.startswith("# solstone Developer Guide") 13 - assert "generated from sol/identity.md" not in content 13 + assert "generated from identity/identity.md" not in content 14 14 assert "docs/project-structure.md" in content 15 15 assert "journal/AGENTS.md" in content 16 16
+2 -2
tests/test_heartbeat.py
··· 20 20 "think.heartbeat.setup_cli", 21 21 lambda parser: argparse.Namespace(force=False), 22 22 ) 23 - monkeypatch.setattr("think.heartbeat.ensure_sol_directory", lambda: None) 23 + monkeypatch.setattr("think.heartbeat.ensure_identity_directory", lambda: None) 24 24 monkeypatch.setattr( 25 25 "think.heartbeat.cortex_request", lambda *args, **kwargs: "agent-123" 26 26 ) ··· 244 244 "think.heartbeat.setup_cli", 245 245 lambda parser: argparse.Namespace(force=True), 246 246 ) 247 - monkeypatch.setattr("think.heartbeat.ensure_sol_directory", lambda: None) 247 + monkeypatch.setattr("think.heartbeat.ensure_identity_directory", lambda: None) 248 248 monkeypatch.setattr( 249 249 "think.heartbeat.wait_for_uses", 250 250 lambda *args, **kwargs: ({"agent-123": "finish"}, []),
+1 -1
tests/test_home_yesterdays_processing.py
··· 123 123 def _write_briefing( 124 124 journal: Path, generated: str, *, metadata_type: str = "morning_briefing" 125 125 ) -> None: 126 - path = journal / "sol" / "briefing.md" 126 + path = journal / "identity" / "briefing.md" 127 127 path.parent.mkdir(parents=True, exist_ok=True) 128 128 path.write_text( 129 129 (
+176
tests/test_identity_writes.py
··· 1 + # SPDX-License-Identifier: AGPL-3.0-only 2 + # Copyright (c) 2026 sol pbc 3 + 4 + import hashlib 5 + import json 6 + import os 7 + import re 8 + import stat 9 + import threading 10 + from pathlib import Path 11 + 12 + import pytest 13 + 14 + from think.identity import update_identity_section, write_identity 15 + 16 + 17 + @pytest.fixture(autouse=True) 18 + def _temp_journal(monkeypatch, tmp_path): 19 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 20 + config_dir = tmp_path / "config" 21 + config_dir.mkdir() 22 + (config_dir / "journal.json").write_text("{}", encoding="utf-8") 23 + return tmp_path 24 + 25 + 26 + def _history_path(journal_path: Path) -> Path: 27 + return journal_path / "identity" / "history.jsonl" 28 + 29 + 30 + def _read_history(journal_path: Path) -> list[dict]: 31 + return [ 32 + json.loads(line) 33 + for line in _history_path(journal_path).read_text().splitlines() 34 + ] 35 + 36 + 37 + def test_write_identity_first_write(tmp_path): 38 + write_identity( 39 + "pulse.md", 40 + actor="test writer", 41 + op="replace", 42 + section=None, 43 + content="first pulse\n", 44 + reason="test", 45 + ) 46 + 47 + pulse_path = tmp_path / "identity" / "pulse.md" 48 + assert pulse_path.read_text(encoding="utf-8") == "first pulse\n" 49 + 50 + records = _read_history(tmp_path) 51 + assert len(records) == 1 52 + record = records[0] 53 + assert record["before_hash"] == hashlib.sha256(b"").hexdigest() 54 + assert record["bytes_before"] == 0 55 + assert record["after_hash"] == hashlib.sha256(b"first pulse\n").hexdigest() 56 + assert record["bytes_after"] == len("first pulse\n".encode("utf-8")) 57 + 58 + 59 + def test_write_identity_atomic_failure(tmp_path, monkeypatch): 60 + identity_dir = tmp_path / "identity" 61 + identity_dir.mkdir() 62 + target = identity_dir / "self.md" 63 + target.write_text("original\n", encoding="utf-8") 64 + 65 + def fail_replace(src, dst): 66 + raise OSError("replace failed") 67 + 68 + monkeypatch.setattr("think.identity.os.replace", fail_replace) 69 + 70 + with pytest.raises(OSError, match="replace failed"): 71 + write_identity( 72 + "self.md", 73 + actor="test writer", 74 + op="replace", 75 + section=None, 76 + content="updated\n", 77 + reason="test", 78 + ) 79 + 80 + assert target.read_text(encoding="utf-8") == "original\n" 81 + assert not _history_path(tmp_path).exists() 82 + assert list(identity_dir.glob(".self.md.*.tmp")) == [] 83 + 84 + 85 + def test_write_identity_lock_serializes(tmp_path): 86 + def writer(actor: str, content: str) -> None: 87 + write_identity( 88 + "self.md", 89 + actor=actor, 90 + op="replace", 91 + section=None, 92 + content=content, 93 + reason="test", 94 + ) 95 + 96 + thread_one = threading.Thread(target=writer, args=("writer-1", "first\n")) 97 + thread_two = threading.Thread(target=writer, args=("writer-2", "second\n")) 98 + thread_one.start() 99 + thread_two.start() 100 + thread_one.join() 101 + thread_two.join() 102 + 103 + final_content = (tmp_path / "identity" / "self.md").read_text(encoding="utf-8") 104 + assert final_content in {"first\n", "second\n"} 105 + 106 + records = _read_history(tmp_path) 107 + assert len(records) == 2 108 + assert {records[0]["actor"], records[1]["actor"]} == {"writer-1", "writer-2"} 109 + 110 + 111 + def test_write_identity_history_schema(tmp_path): 112 + write_identity( 113 + "awareness.md", 114 + actor="schema test", 115 + op="replace", 116 + section=None, 117 + content="awareness\n", 118 + reason="test", 119 + ) 120 + 121 + record = _read_history(tmp_path)[0] 122 + assert list(record) == [ 123 + "ts", 124 + "file", 125 + "actor", 126 + "op", 127 + "section", 128 + "reason", 129 + "before_hash", 130 + "after_hash", 131 + "bytes_before", 132 + "bytes_after", 133 + ] 134 + assert record["file"] == "awareness.md" 135 + assert record["actor"] == "schema test" 136 + assert record["op"] == "replace" 137 + assert record["section"] is None 138 + assert isinstance(record["bytes_before"], int) 139 + assert isinstance(record["bytes_after"], int) 140 + assert isinstance(record["before_hash"], str) 141 + assert isinstance(record["after_hash"], str) 142 + assert re.fullmatch(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z", record["ts"]) 143 + 144 + 145 + def test_write_identity_mode_0600(tmp_path): 146 + write_identity( 147 + "partner.md", 148 + actor="mode test", 149 + op="replace", 150 + section=None, 151 + content="partner\n", 152 + reason="test", 153 + ) 154 + 155 + mode = stat.S_IMODE(os.stat(tmp_path / "identity" / "partner.md").st_mode) 156 + assert mode == 0o600 157 + 158 + 159 + def test_update_identity_section_returns_false_no_change(tmp_path): 160 + identity_dir = tmp_path / "identity" 161 + identity_dir.mkdir() 162 + (identity_dir / "partner.md").write_text( 163 + "# partner\n\n## work patterns\nPrefers mornings\n", 164 + encoding="utf-8", 165 + ) 166 + 167 + changed = update_identity_section( 168 + "partner.md", 169 + "work patterns", 170 + "Prefers mornings", 171 + actor="section test", 172 + reason="test", 173 + ) 174 + 175 + assert changed is False 176 + assert not _history_path(tmp_path).exists()
+4 -4
tests/test_journal_merge.py
··· 112 112 {"title": "Work"}, 113 113 ) 114 114 115 - (source / "sol").mkdir(parents=True) 116 - (source / "sol" / "self.md").write_text("source sol\n", encoding="utf-8") 115 + (source / "identity").mkdir(parents=True) 116 + (source / "identity" / "self.md").write_text("source identity\n", encoding="utf-8") 117 117 (source / "config").mkdir(parents=True) 118 118 (source / "config" / "source-only.json").write_text("{}", encoding="utf-8") 119 119 ··· 516 516 ).read_text(encoding="utf-8") == '{"manifest": "target"}\n' 517 517 518 518 519 - def test_source_sol_skipped(merge_journals_fixture, monkeypatch): 519 + def test_source_identity_skipped(merge_journals_fixture, monkeypatch): 520 520 paths = merge_journals_fixture 521 521 _mock_indexer(monkeypatch) 522 522 523 523 result = runner.invoke(call_app, ["journal", "merge", str(paths["source"])]) 524 524 525 525 assert result.exit_code == 0 526 - assert not (paths["target"] / "sol" / "self.md").exists() 526 + assert not (paths["target"] / "identity" / "self.md").exists() 527 527 528 528 529 529 def test_source_config_skipped(merge_journals_fixture, monkeypatch):
+187 -132
tests/test_sol_call.py
··· 3 3 4 4 """Tests for sol call identity — identity directory read/write commands.""" 5 5 6 + import inspect 6 7 import json 8 + import re 7 9 8 10 import pytest 9 11 from typer.testing import CliRunner ··· 11 13 from think.tools.sol import app 12 14 13 15 runner = CliRunner() 16 + _HISTORY_FIELDS = [ 17 + "ts", 18 + "file", 19 + "actor", 20 + "op", 21 + "section", 22 + "reason", 23 + "before_hash", 24 + "after_hash", 25 + "bytes_before", 26 + "bytes_after", 27 + ] 28 + 29 + 30 + def _read_history(journal_path): 31 + history = journal_path / "identity" / "history.jsonl" 32 + return [json.loads(line) for line in history.read_text().splitlines()] 33 + 34 + 35 + def _assert_history_record(record, *, file_name, actor, op, section, reason): 36 + assert list(record) == _HISTORY_FIELDS 37 + assert re.fullmatch(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z", record["ts"]) 38 + assert record["file"] == file_name 39 + assert record["actor"] == actor 40 + assert record["op"] == op 41 + assert record["section"] == section 42 + assert record["reason"] == reason 43 + assert isinstance(record["before_hash"], str) 44 + assert isinstance(record["after_hash"], str) 45 + assert isinstance(record["bytes_before"], int) 46 + assert isinstance(record["bytes_after"], int) 14 47 15 48 16 49 @pytest.fixture 17 - def journal_with_sol(tmp_path, monkeypatch): 18 - """Set up a journal with sol/ directory containing self.md, agency.md, and partner.md.""" 50 + def journal_with_identity(tmp_path, monkeypatch): 51 + """Set up a journal with identity/ containing self.md, agency.md, and partner.md.""" 19 52 monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 20 53 21 - # Provide minimal config for ensure_sol_directory 54 + # Provide minimal config for ensure_identity_directory 22 55 config_dir = tmp_path / "config" 23 56 config_dir.mkdir() 24 57 (config_dir / "journal.json").write_text( 25 58 json.dumps({"identity": {"name": "Test User"}}) 26 59 ) 27 60 28 - sol_dir = tmp_path / "sol" 29 - sol_dir.mkdir() 61 + identity_dir = tmp_path / "identity" 62 + identity_dir.mkdir() 30 63 31 64 self_md = """\ 32 65 # self ··· 48 81 ## what I find interesting 49 82 [discovering] 50 83 """ 51 - (sol_dir / "self.md").write_text(self_md) 84 + (identity_dir / "self.md").write_text(self_md) 52 85 53 86 agency_md = """\ 54 87 # agency ··· 64 97 ## system 65 98 [monitoring] 66 99 """ 67 - (sol_dir / "agency.md").write_text(agency_md) 100 + (identity_dir / "agency.md").write_text(agency_md) 68 101 69 102 partner_md = """\ 70 103 # partner ··· 87 120 ## expertise domains 88 121 [observing] 89 122 """ 90 - (sol_dir / "partner.md").write_text(partner_md) 123 + (identity_dir / "partner.md").write_text(partner_md) 124 + (identity_dir / "awareness.md").write_text("not yet updated\n") 91 125 92 126 return tmp_path 93 127 94 128 95 129 class TestSolSelfRead: 96 - def test_read_self(self, journal_with_sol): 130 + def test_read_self(self, journal_with_identity): 97 131 result = runner.invoke(app, ["self"]) 98 132 assert result.exit_code == 0 99 133 assert "# self" in result.output ··· 104 138 config_dir = tmp_path / "config" 105 139 config_dir.mkdir() 106 140 (config_dir / "journal.json").write_text(json.dumps({})) 107 - # ensure_sol_directory will create the file, so this tests the happy path 141 + # ensure_identity_directory will create the file, so this tests the happy path 108 142 result = runner.invoke(app, ["self"]) 109 143 assert result.exit_code == 0 110 144 111 145 112 146 class TestSolSelfWrite: 113 - def test_write_self(self, journal_with_sol): 147 + def test_write_self(self, journal_with_identity): 114 148 new_content = "# self\n\nI am sol. Jer's journal.\n\n## my name\nsol\n" 115 149 result = runner.invoke(app, ["self", "--write"], input=new_content) 116 150 assert result.exit_code == 0 117 151 assert "self.md updated" in result.output 118 152 119 153 # Verify file was written 120 - self_path = journal_with_sol / "sol" / "self.md" 154 + self_path = journal_with_identity / "identity" / "self.md" 121 155 assert self_path.read_text() == new_content 122 156 123 - def test_write_self_empty_stdin(self, journal_with_sol): 157 + def test_write_self_empty_stdin(self, journal_with_identity): 124 158 result = runner.invoke(app, ["self", "--write"], input="") 125 159 assert result.exit_code == 1 126 160 assert "no content" in result.output 127 161 128 - def test_write_self_whitespace_only(self, journal_with_sol): 162 + def test_write_self_whitespace_only(self, journal_with_identity): 129 163 result = runner.invoke(app, ["self", "--write"], input=" \n\n ") 130 164 assert result.exit_code == 1 131 165 assert "no content" in result.output 132 166 133 167 134 168 class TestSolSelfUpdateSection: 135 - def test_update_section_owner(self, journal_with_sol): 169 + def test_update_section_owner(self, journal_with_identity): 136 170 result = runner.invoke( 137 171 app, 138 172 ["self", "--update-section", "who I'm here for"], ··· 142 176 assert "Updated ## who I'm here for" in result.output 143 177 144 178 # Verify section was updated, other sections preserved 145 - self_path = journal_with_sol / "sol" / "self.md" 179 + self_path = journal_with_identity / "identity" / "self.md" 146 180 content = self_path.read_text() 147 181 assert "Jer — goes by Jer, not Jeremie" in content 148 182 assert "## my name" in content 149 183 assert "sol (default)" in content 150 184 assert "## our relationship" in content 151 185 152 - def test_update_section_not_found(self, journal_with_sol): 186 + def test_update_section_not_found(self, journal_with_identity): 153 187 result = runner.invoke( 154 188 app, 155 189 ["self", "--update-section", "nonexistent"], ··· 158 192 assert result.exit_code == 1 159 193 assert "not found" in result.output 160 194 161 - def test_update_section_empty_stdin(self, journal_with_sol): 195 + def test_update_section_empty_stdin(self, journal_with_identity): 162 196 result = runner.invoke( 163 197 app, 164 198 ["self", "--update-section", "who I'm here for"], ··· 169 203 170 204 171 205 class TestSolPartnerRead: 172 - def test_read_partner(self, journal_with_sol): 206 + def test_read_partner(self, journal_with_identity): 173 207 result = runner.invoke(app, ["partner"]) 174 208 assert result.exit_code == 0 175 209 assert "# partner" in result.output ··· 180 214 config_dir = tmp_path / "config" 181 215 config_dir.mkdir() 182 216 (config_dir / "journal.json").write_text(json.dumps({})) 183 - # ensure_sol_directory creates partner.md 217 + # ensure_identity_directory creates partner.md 184 218 result = runner.invoke(app, ["partner"]) 185 219 assert result.exit_code == 0 186 220 187 221 188 222 class TestSolPartnerWrite: 189 - def test_write_partner(self, journal_with_sol): 223 + def test_write_partner(self, journal_with_identity): 190 224 new_content = "# partner\n\n## work patterns\nPrefers mornings for deep work.\n" 191 225 result = runner.invoke(app, ["partner", "--write"], input=new_content) 192 226 assert result.exit_code == 0 193 227 assert "partner.md updated" in result.output 194 228 195 - partner_path = journal_with_sol / "sol" / "partner.md" 229 + partner_path = journal_with_identity / "identity" / "partner.md" 196 230 assert partner_path.read_text() == new_content 197 231 198 - def test_write_partner_empty_stdin(self, journal_with_sol): 232 + def test_write_partner_empty_stdin(self, journal_with_identity): 199 233 result = runner.invoke(app, ["partner", "--write"], input="") 200 234 assert result.exit_code == 1 201 235 assert "no content" in result.output 202 236 203 237 204 238 class TestSolPartnerUpdateSection: 205 - def test_update_section_work_patterns(self, journal_with_sol): 239 + def test_update_section_work_patterns(self, journal_with_identity): 206 240 result = runner.invoke( 207 241 app, 208 242 ["partner", "--update-section", "work patterns"], ··· 211 245 assert result.exit_code == 0 212 246 assert "Updated ## work patterns" in result.output 213 247 214 - partner_path = journal_with_sol / "sol" / "partner.md" 248 + partner_path = journal_with_identity / "identity" / "partner.md" 215 249 content = partner_path.read_text() 216 250 assert "Prefers async communication" in content 217 251 assert "## communication style" in content 218 252 assert "## decision style" in content 219 253 220 - def test_update_section_not_found(self, journal_with_sol): 254 + def test_update_section_not_found(self, journal_with_identity): 221 255 result = runner.invoke( 222 256 app, 223 257 ["partner", "--update-section", "nonexistent"], ··· 226 260 assert result.exit_code == 1 227 261 assert "not found" in result.output 228 262 229 - def test_update_section_empty_stdin(self, journal_with_sol): 263 + def test_update_section_empty_stdin(self, journal_with_identity): 230 264 result = runner.invoke( 231 265 app, 232 266 ["partner", "--update-section", "work patterns"], ··· 237 271 238 272 239 273 class TestSolAgencyRead: 240 - def test_read_agency(self, journal_with_sol): 274 + def test_read_agency(self, journal_with_identity): 241 275 result = runner.invoke(app, ["agency"]) 242 276 assert result.exit_code == 0 243 277 assert "# agency" in result.output ··· 248 282 config_dir = tmp_path / "config" 249 283 config_dir.mkdir() 250 284 (config_dir / "journal.json").write_text(json.dumps({})) 251 - # ensure_sol_directory creates agency.md 285 + # ensure_identity_directory creates agency.md 252 286 result = runner.invoke(app, ["agency"]) 253 287 assert result.exit_code == 0 254 288 255 289 256 290 class TestSolAgencyWrite: 257 - def test_write_agency(self, journal_with_sol): 291 + def test_write_agency(self, journal_with_identity): 258 292 new_content = "# agency\n\n## curation\n- review entity duplicates\n\n## system\n[clean]\n" 259 293 result = runner.invoke(app, ["agency", "--write"], input=new_content) 260 294 assert result.exit_code == 0 261 295 assert "agency.md updated" in result.output 262 296 263 297 # Verify file was written 264 - agency_path = journal_with_sol / "sol" / "agency.md" 298 + agency_path = journal_with_identity / "identity" / "agency.md" 265 299 assert agency_path.read_text() == new_content 266 300 267 - def test_write_agency_empty_stdin(self, journal_with_sol): 301 + def test_write_agency_empty_stdin(self, journal_with_identity): 268 302 result = runner.invoke(app, ["agency", "--write"], input="") 269 303 assert result.exit_code == 1 270 304 assert "no content" in result.output 271 305 272 306 273 307 class TestSolPulseRead: 274 - def test_read_pulse(self, journal_with_sol): 308 + def test_read_pulse(self, journal_with_identity): 275 309 pulse_md = "---\nupdated: 2026-03-22T14:00:00\nsource: pulse-cogitate\n---\n\nTest narrative.\n" 276 - (journal_with_sol / "sol" / "pulse.md").write_text(pulse_md) 310 + (journal_with_identity / "identity" / "pulse.md").write_text(pulse_md) 277 311 result = runner.invoke(app, ["pulse"]) 278 312 assert result.exit_code == 0 279 313 assert "Test narrative" in result.output ··· 289 323 290 324 291 325 class TestSolPulseWrite: 292 - def test_write_pulse(self, journal_with_sol): 326 + def test_write_pulse(self, journal_with_identity): 293 327 new_content = "---\nupdated: 2026-03-22T14:00:00\nsource: pulse-cogitate\n---\n\nNew narrative.\n" 294 328 result = runner.invoke(app, ["pulse", "--write"], input=new_content) 295 329 assert result.exit_code == 0 296 330 assert "pulse.md updated" in result.output 297 331 298 332 # Verify file was written 299 - pulse_path = journal_with_sol / "sol" / "pulse.md" 333 + pulse_path = journal_with_identity / "identity" / "pulse.md" 300 334 assert pulse_path.read_text() == new_content 301 335 302 - def test_write_pulse_empty_stdin(self, journal_with_sol): 336 + def test_write_pulse_empty_stdin(self, journal_with_identity): 303 337 result = runner.invoke(app, ["pulse", "--write"], input="") 304 338 assert result.exit_code == 1 305 339 assert "no content" in result.output 306 340 307 341 308 - class TestSolWriteDoesNotEscapeSolDir: 309 - """Verify that sol call identity only writes to sol/ directory files.""" 342 + class TestSolWriteDoesNotEscapeIdentityDir: 343 + """Verify that sol call identity only writes to identity/ files.""" 310 344 311 - def test_self_write_stays_in_sol_dir(self, journal_with_sol): 312 - """Write to self.md goes to sol/self.md, not anywhere else.""" 345 + def test_self_write_stays_in_identity_dir(self, journal_with_identity): 346 + """Write to self.md goes to identity/self.md, not anywhere else.""" 313 347 result = runner.invoke(app, ["self", "--write"], input="test content\n") 314 348 assert result.exit_code == 0 315 - self_path = journal_with_sol / "sol" / "self.md" 349 + self_path = journal_with_identity / "identity" / "self.md" 316 350 assert self_path.read_text() == "test content\n" 317 - # No files created outside sol/ 318 - journal_files = set(f.name for f in journal_with_sol.iterdir() if f.is_file()) 351 + journal_files = set( 352 + f.name for f in journal_with_identity.iterdir() if f.is_file() 353 + ) 319 354 assert "self.md" not in journal_files 320 355 321 - def test_agency_write_stays_in_sol_dir(self, journal_with_sol): 322 - """Write to agency.md goes to sol/agency.md, not anywhere else.""" 356 + def test_agency_write_stays_in_identity_dir(self, journal_with_identity): 357 + """Write to agency.md goes to identity/agency.md, not anywhere else.""" 323 358 result = runner.invoke(app, ["agency", "--write"], input="test content\n") 324 359 assert result.exit_code == 0 325 - agency_path = journal_with_sol / "sol" / "agency.md" 360 + agency_path = journal_with_identity / "identity" / "agency.md" 326 361 assert agency_path.read_text() == "test content\n" 327 - # No files created outside sol/ 328 - journal_files = set(f.name for f in journal_with_sol.iterdir() if f.is_file()) 362 + journal_files = set( 363 + f.name for f in journal_with_identity.iterdir() if f.is_file() 364 + ) 329 365 assert "agency.md" not in journal_files 330 366 331 - def test_pulse_write_stays_in_sol_dir(self, journal_with_sol): 332 - """Write to pulse.md goes to sol/pulse.md, not anywhere else.""" 367 + def test_pulse_write_stays_in_identity_dir(self, journal_with_identity): 368 + """Write to pulse.md goes to identity/pulse.md, not anywhere else.""" 333 369 result = runner.invoke(app, ["pulse", "--write"], input="test content\n") 334 370 assert result.exit_code == 0 335 - pulse_path = journal_with_sol / "sol" / "pulse.md" 371 + pulse_path = journal_with_identity / "identity" / "pulse.md" 336 372 assert pulse_path.read_text() == "test content\n" 337 - # No files created outside sol/ 338 - journal_files = set(f.name for f in journal_with_sol.iterdir() if f.is_file()) 373 + journal_files = set( 374 + f.name for f in journal_with_identity.iterdir() if f.is_file() 375 + ) 339 376 assert "pulse.md" not in journal_files 340 377 341 - def test_partner_write_stays_in_sol_dir(self, journal_with_sol): 342 - """Write to partner.md goes to sol/partner.md, not anywhere else.""" 378 + def test_partner_write_stays_in_identity_dir(self, journal_with_identity): 379 + """Write to partner.md goes to identity/partner.md, not anywhere else.""" 343 380 result = runner.invoke(app, ["partner", "--write"], input="test content\n") 344 381 assert result.exit_code == 0 345 - partner_path = journal_with_sol / "sol" / "partner.md" 382 + partner_path = journal_with_identity / "identity" / "partner.md" 346 383 assert partner_path.read_text() == "test content\n" 347 - # No files created outside sol/ 348 - journal_files = set(f.name for f in journal_with_sol.iterdir() if f.is_file()) 384 + journal_files = set( 385 + f.name for f in journal_with_identity.iterdir() if f.is_file() 386 + ) 349 387 assert "partner.md" not in journal_files 350 388 351 389 352 390 class TestSolSelfValueOption: 353 - def test_write_self_with_value(self, journal_with_sol): 391 + def test_write_self_with_value(self, journal_with_identity): 354 392 new_content = "# self\n\nI am sol. Jer's journal.\n\n## my name\nsol\n" 355 393 result = runner.invoke(app, ["self", "--write", "--value", new_content]) 356 394 assert result.exit_code == 0 357 395 assert "self.md updated" in result.output 358 - self_path = journal_with_sol / "sol" / "self.md" 396 + self_path = journal_with_identity / "identity" / "self.md" 359 397 assert self_path.read_text() == new_content 360 398 361 - def test_update_section_with_value(self, journal_with_sol): 399 + def test_update_section_with_value(self, journal_with_identity): 362 400 result = runner.invoke( 363 401 app, 364 402 [ ··· 371 409 ) 372 410 assert result.exit_code == 0 373 411 assert "Updated ## who I'm here for" in result.output 374 - content = (journal_with_sol / "sol" / "self.md").read_text() 412 + content = (journal_with_identity / "identity" / "self.md").read_text() 375 413 assert "Jer — founder" in content 376 414 377 - def test_value_empty_string_errors(self, journal_with_sol): 415 + def test_value_empty_string_errors(self, journal_with_identity): 378 416 result = runner.invoke(app, ["self", "--write", "--value", " "]) 379 417 assert result.exit_code == 1 380 418 assert "no content" in result.output 381 419 382 - def test_value_takes_precedence_over_stdin(self, journal_with_sol): 420 + def test_value_takes_precedence_over_stdin(self, journal_with_identity): 383 421 result = runner.invoke( 384 422 app, 385 423 ["self", "--write", "--value", "from value\n"], 386 424 input="from stdin\n", 387 425 ) 388 426 assert result.exit_code == 0 389 - self_path = journal_with_sol / "sol" / "self.md" 427 + self_path = journal_with_identity / "identity" / "self.md" 390 428 assert self_path.read_text() == "from value\n" 391 429 392 430 393 431 class TestSolAgencyValueOption: 394 - def test_write_agency_with_value(self, journal_with_sol): 432 + def test_write_agency_with_value(self, journal_with_identity): 395 433 new_content = "# agency\n\n## curation\n- item\n" 396 434 result = runner.invoke(app, ["agency", "--write", "--value", new_content]) 397 435 assert result.exit_code == 0 398 436 assert "agency.md updated" in result.output 399 - agency_path = journal_with_sol / "sol" / "agency.md" 437 + agency_path = journal_with_identity / "identity" / "agency.md" 400 438 assert agency_path.read_text() == new_content 401 439 402 - def test_value_empty_string_errors(self, journal_with_sol): 440 + def test_value_empty_string_errors(self, journal_with_identity): 403 441 result = runner.invoke(app, ["agency", "--write", "--value", ""]) 404 442 assert result.exit_code == 1 405 443 assert "no content" in result.output 406 444 407 445 408 446 class TestSolPulseValueOption: 409 - def test_write_pulse_with_value(self, journal_with_sol): 447 + def test_write_pulse_with_value(self, journal_with_identity): 410 448 new_content = "---\nupdated: 2026-03-22\n---\n\nNarrative.\n" 411 449 result = runner.invoke(app, ["pulse", "--write", "--value", new_content]) 412 450 assert result.exit_code == 0 413 451 assert "pulse.md updated" in result.output 414 - pulse_path = journal_with_sol / "sol" / "pulse.md" 452 + pulse_path = journal_with_identity / "identity" / "pulse.md" 415 453 assert pulse_path.read_text() == new_content 416 454 417 - def test_value_empty_string_errors(self, journal_with_sol): 455 + def test_value_empty_string_errors(self, journal_with_identity): 418 456 result = runner.invoke(app, ["pulse", "--write", "--value", ""]) 419 457 assert result.exit_code == 1 420 458 assert "no content" in result.output 421 459 422 460 423 461 class TestSolPartnerValueOption: 424 - def test_write_partner_with_value(self, journal_with_sol): 462 + def test_write_partner_with_value(self, journal_with_identity): 425 463 new_content = "# partner\n\n## work patterns\nMorning person.\n" 426 464 result = runner.invoke(app, ["partner", "--write", "--value", new_content]) 427 465 assert result.exit_code == 0 428 466 assert "partner.md updated" in result.output 429 - partner_path = journal_with_sol / "sol" / "partner.md" 467 + partner_path = journal_with_identity / "identity" / "partner.md" 430 468 assert partner_path.read_text() == new_content 431 469 432 - def test_update_section_with_value(self, journal_with_sol): 470 + def test_update_section_with_value(self, journal_with_identity): 433 471 result = runner.invoke( 434 472 app, 435 473 [ ··· 442 480 ) 443 481 assert result.exit_code == 0 444 482 assert "Updated ## work patterns" in result.output 445 - content = (journal_with_sol / "sol" / "partner.md").read_text() 483 + content = (journal_with_identity / "identity" / "partner.md").read_text() 446 484 assert "Prefers mornings" in content 447 485 448 - def test_value_empty_string_errors(self, journal_with_sol): 486 + def test_value_empty_string_errors(self, journal_with_identity): 449 487 result = runner.invoke(app, ["partner", "--write", "--value", " "]) 450 488 assert result.exit_code == 1 451 489 assert "no content" in result.output 452 490 453 491 454 492 class TestSolHistoryLogging: 455 - def test_self_write_logs_history(self, journal_with_sol): 493 + def test_self_write_logs_history(self, journal_with_identity): 456 494 new_content = "# self\n\nUpdated.\n" 457 495 runner.invoke(app, ["self", "--write", "--value", new_content]) 458 - history = journal_with_sol / "sol" / "history.jsonl" 459 - assert history.exists() 460 - records = [json.loads(line) for line in history.read_text().strip().split("\n")] 496 + records = _read_history(journal_with_identity) 461 497 assert len(records) == 1 462 - assert records[0]["file"] == "self.md" 463 - assert records[0]["source"] == "cli" 464 - assert records[0]["section"] is None 465 - assert "ts" in records[0] 466 - assert "diff" in records[0] 498 + _assert_history_record( 499 + records[0], 500 + file_name="self.md", 501 + actor="sol call identity self --write", 502 + op="replace", 503 + section=None, 504 + reason="manual replace", 505 + ) 467 506 468 - def test_agency_write_logs_history(self, journal_with_sol): 507 + def test_agency_write_logs_history(self, journal_with_identity): 469 508 runner.invoke(app, ["agency", "--write", "--value", "# agency\n\nNew.\n"]) 470 - history = journal_with_sol / "sol" / "history.jsonl" 471 - assert history.exists() 472 - records = [json.loads(line) for line in history.read_text().strip().split("\n")] 509 + records = _read_history(journal_with_identity) 473 510 assert len(records) == 1 474 - assert records[0]["file"] == "agency.md" 475 - assert records[0]["source"] == "cli" 511 + _assert_history_record( 512 + records[0], 513 + file_name="agency.md", 514 + actor="sol call identity agency --write", 515 + op="replace", 516 + section=None, 517 + reason="manual replace", 518 + ) 476 519 477 - def test_pulse_write_logs_history(self, journal_with_sol): 520 + def test_pulse_write_logs_history(self, journal_with_identity): 478 521 runner.invoke(app, ["pulse", "--write", "--value", "---\n---\n\nPulse.\n"]) 479 - history = journal_with_sol / "sol" / "history.jsonl" 480 - assert history.exists() 481 - records = [json.loads(line) for line in history.read_text().strip().split("\n")] 522 + records = _read_history(journal_with_identity) 482 523 assert len(records) == 1 483 - assert records[0]["file"] == "pulse.md" 524 + _assert_history_record( 525 + records[0], 526 + file_name="pulse.md", 527 + actor="sol call identity pulse --write", 528 + op="replace", 529 + section=None, 530 + reason="manual replace", 531 + ) 484 532 485 - def test_update_section_logs_history(self, journal_with_sol): 533 + def test_update_section_logs_history(self, journal_with_identity): 486 534 runner.invoke( 487 535 app, 488 536 ["self", "--update-section", "who I'm here for", "--value", "Jer"], 489 537 ) 490 - history = journal_with_sol / "sol" / "history.jsonl" 491 - assert history.exists() 492 - records = [json.loads(line) for line in history.read_text().strip().split("\n")] 538 + records = _read_history(journal_with_identity) 493 539 assert len(records) == 1 494 - assert records[0]["file"] == "self.md" 495 - assert records[0]["section"] == "who I'm here for" 496 - assert records[0]["source"] == "api" 540 + _assert_history_record( 541 + records[0], 542 + file_name="self.md", 543 + actor="sol call identity self --update-section <heading>", 544 + op="update_section", 545 + section="who I'm here for", 546 + reason="manual section update", 547 + ) 497 548 498 - def test_multiple_writes_append(self, journal_with_sol): 549 + def test_multiple_writes_append(self, journal_with_identity): 499 550 runner.invoke(app, ["self", "--write", "--value", "# self\n\nFirst.\n"]) 500 551 runner.invoke(app, ["self", "--write", "--value", "# self\n\nSecond.\n"]) 501 - history = journal_with_sol / "sol" / "history.jsonl" 502 - records = [json.loads(line) for line in history.read_text().strip().split("\n")] 552 + records = _read_history(journal_with_identity) 503 553 assert len(records) == 2 504 554 505 - def test_partner_write_logs_history(self, journal_with_sol): 555 + def test_partner_write_logs_history(self, journal_with_identity): 506 556 runner.invoke(app, ["partner", "--write", "--value", "# partner\n\nNew.\n"]) 507 - history = journal_with_sol / "sol" / "history.jsonl" 508 - assert history.exists() 509 - records = [json.loads(line) for line in history.read_text().strip().split("\n")] 557 + records = _read_history(journal_with_identity) 510 558 assert len(records) == 1 511 - assert records[0]["file"] == "partner.md" 512 - assert records[0]["source"] == "cli" 559 + _assert_history_record( 560 + records[0], 561 + file_name="partner.md", 562 + actor="sol call identity partner --write", 563 + op="replace", 564 + section=None, 565 + reason="manual replace", 566 + ) 513 567 514 - def test_partner_update_section_logs_history(self, journal_with_sol): 568 + def test_partner_update_section_logs_history(self, journal_with_identity): 515 569 runner.invoke( 516 570 app, 517 571 [ ··· 522 576 "Morning focus", 523 577 ], 524 578 ) 525 - history = journal_with_sol / "sol" / "history.jsonl" 526 - assert history.exists() 527 - records = [json.loads(line) for line in history.read_text().strip().split("\n")] 579 + records = _read_history(journal_with_identity) 528 580 assert len(records) == 1 529 - assert records[0]["file"] == "partner.md" 530 - assert records[0]["section"] == "work patterns" 531 - assert records[0]["source"] == "api" 581 + _assert_history_record( 582 + records[0], 583 + file_name="partner.md", 584 + actor="sol call identity partner --update-section <heading>", 585 + op="update_section", 586 + section="work patterns", 587 + reason="manual section update", 588 + ) 532 589 533 590 534 - class TestHeartbeatEnsureSolDirectory: 535 - """Verify the heartbeat bug fix — ensure_sol_directory() takes no args.""" 536 - 537 - def test_ensure_sol_directory_no_args(self): 538 - """ensure_sol_directory accepts no positional args (heartbeat.py:32 fix).""" 539 - import inspect 591 + class TestHeartbeatEnsureIdentityDirectory: 592 + """Verify the heartbeat bug fix — ensure_identity_directory() takes no args.""" 540 593 541 - from think.awareness import ensure_sol_directory 594 + def test_ensure_identity_directory_no_args(self): 595 + """ensure_identity_directory accepts no positional args (heartbeat.py:32 fix).""" 596 + from think.identity import ensure_identity_directory 542 597 543 - sig = inspect.signature(ensure_sol_directory) 598 + sig = inspect.signature(ensure_identity_directory) 544 599 params = [ 545 600 p for p in sig.parameters.values() if p.default is inspect.Parameter.empty 546 601 ] 547 602 assert len(params) == 0, ( 548 - "ensure_sol_directory should take no required arguments" 603 + "ensure_identity_directory should take no required arguments" 549 604 ) 550 605 551 606 def test_heartbeat_calls_correctly(self): 552 - """heartbeat.py calls ensure_sol_directory() without arguments.""" 607 + """heartbeat.py calls ensure_identity_directory() without arguments.""" 553 608 import ast 554 609 from pathlib import Path 555 610 ··· 560 615 if ( 561 616 isinstance(node, ast.Call) 562 617 and isinstance(node.func, ast.Name) 563 - and node.func.id == "ensure_sol_directory" 618 + and node.func.id == "ensure_identity_directory" 564 619 ): 565 620 assert len(node.args) == 0, ( 566 - f"ensure_sol_directory() called with {len(node.args)} args at line {node.lineno}" 621 + f"ensure_identity_directory() called with {len(node.args)} args at line {node.lineno}" 567 622 )
+30 -30
tests/test_sol_call_identity_hydrate.py
··· 37 37 38 38 39 39 def test_identity_hydrate_reads_all_sections(journal_path): 40 - sol_dir = journal_path / "sol" 41 - sol_dir.mkdir() 42 - (sol_dir / "self.md").write_text("self body") 43 - (sol_dir / "partner.md").write_text("partner body") 44 - (sol_dir / "agency.md").write_text("agency body") 45 - (sol_dir / "awareness.md").write_text("awareness body") 40 + identity_dir = journal_path / "identity" 41 + identity_dir.mkdir() 42 + (identity_dir / "self.md").write_text("self body") 43 + (identity_dir / "partner.md").write_text("partner body") 44 + (identity_dir / "agency.md").write_text("agency body") 45 + (identity_dir / "awareness.md").write_text("awareness body") 46 46 47 47 result = _run_identity_hydrate(journal_path) 48 48 ··· 57 57 58 58 59 59 def test_identity_hydrate_marks_missing_sections(journal_path): 60 - sol_dir = journal_path / "sol" 61 - sol_dir.mkdir() 62 - (sol_dir / "self.md").write_text("self body") 63 - (sol_dir / "partner.md").write_text("partner body") 64 - (sol_dir / "awareness.md").write_text("awareness body") 60 + identity_dir = journal_path / "identity" 61 + identity_dir.mkdir() 62 + (identity_dir / "self.md").write_text("self body") 63 + (identity_dir / "partner.md").write_text("partner body") 64 + (identity_dir / "awareness.md").write_text("awareness body") 65 65 66 66 result = _run_identity_hydrate(journal_path) 67 67 ··· 69 69 assert "# agency\n\n(not present)\n" in result.stdout 70 70 71 71 72 - def test_identity_hydrate_handles_empty_sol_directory(journal_path): 72 + def test_identity_hydrate_handles_empty_identity_directory(journal_path): 73 73 result = _run_identity_hydrate(journal_path) 74 74 75 75 assert result.returncode == 0 ··· 78 78 79 79 80 80 def test_identity_hydrate_starts_with_species_preamble(journal_path): 81 - sol_dir = journal_path / "sol" 82 - sol_dir.mkdir() 83 - (sol_dir / "self.md").write_text("self body") 84 - (sol_dir / "partner.md").write_text("partner body") 85 - (sol_dir / "agency.md").write_text("agency body") 86 - (sol_dir / "awareness.md").write_text("awareness body") 81 + identity_dir = journal_path / "identity" 82 + identity_dir.mkdir() 83 + (identity_dir / "self.md").write_text("self body") 84 + (identity_dir / "partner.md").write_text("partner body") 85 + (identity_dir / "agency.md").write_text("agency body") 86 + (identity_dir / "awareness.md").write_text("awareness body") 87 87 88 88 result = _run_identity_hydrate(journal_path) 89 89 ··· 96 96 97 97 98 98 def test_identity_hydrate_strips_duplicate_section_heading(journal_path): 99 - sol_dir = journal_path / "sol" 100 - sol_dir.mkdir() 101 - (sol_dir / "self.md").write_text("# self\n\nself body\n") 102 - (sol_dir / "partner.md").write_text("partner body") 103 - (sol_dir / "agency.md").write_text("agency body") 104 - (sol_dir / "awareness.md").write_text("awareness body") 99 + identity_dir = journal_path / "identity" 100 + identity_dir.mkdir() 101 + (identity_dir / "self.md").write_text("# self\n\nself body\n") 102 + (identity_dir / "partner.md").write_text("partner body") 103 + (identity_dir / "agency.md").write_text("agency body") 104 + (identity_dir / "awareness.md").write_text("awareness body") 105 105 106 106 result = _run_identity_hydrate(journal_path) 107 107 ··· 111 111 112 112 113 113 def test_identity_hydrate_preserves_non_matching_heading(journal_path): 114 - sol_dir = journal_path / "sol" 115 - sol_dir.mkdir() 116 - (sol_dir / "self.md").write_text("# My Custom Heading\n\nself body\n") 117 - (sol_dir / "partner.md").write_text("partner body") 118 - (sol_dir / "agency.md").write_text("agency body") 119 - (sol_dir / "awareness.md").write_text("awareness body") 114 + identity_dir = journal_path / "identity" 115 + identity_dir.mkdir() 116 + (identity_dir / "self.md").write_text("# My Custom Heading\n\nself body\n") 117 + (identity_dir / "partner.md").write_text("partner body") 118 + (identity_dir / "agency.md").write_text("agency body") 119 + (identity_dir / "awareness.md").write_text("awareness body") 120 120 121 121 result = _run_identity_hydrate(journal_path) 122 122
+8 -8
tests/test_template_substitution.py
··· 253 253 assert result_none.text == result_empty.text 254 254 255 255 256 - def test_load_prompt_sol_vars_follow_journal_override(monkeypatch, tmp_path): 257 - """Journal sol/ content should not leak across journal overrides.""" 256 + def test_load_prompt_identity_vars_follow_journal_override(monkeypatch, tmp_path): 257 + """Journal identity/ content should not leak across journal overrides.""" 258 258 259 259 def write_journal(journal_dir, awareness_text): 260 260 config_dir = journal_dir / "config" ··· 267 267 } 268 268 ) 269 269 ) 270 - sol_dir = journal_dir / "sol" 271 - sol_dir.mkdir() 272 - (sol_dir / "awareness.md").write_text(awareness_text) 270 + identity_dir = journal_dir / "identity" 271 + identity_dir.mkdir() 272 + (identity_dir / "awareness.md").write_text(awareness_text) 273 273 274 274 prompt_dir = tmp_path / "prompts" 275 275 prompt_dir.mkdir() 276 - (prompt_dir / "sol_vars.md").write_text("Awareness:\n$sol_awareness\n") 276 + (prompt_dir / "identity_vars.md").write_text("Awareness:\n$identity_awareness\n") 277 277 278 278 journal_one = tmp_path / "journal-one" 279 279 journal_two = tmp_path / "journal-two" ··· 281 281 write_journal(journal_two, "second awareness") 282 282 283 283 monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(journal_one)) 284 - first = load_prompt("sol_vars", base_dir=prompt_dir) 284 + first = load_prompt("identity_vars", base_dir=prompt_dir) 285 285 assert "first awareness" in first.text 286 286 287 287 monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(journal_two)) 288 - second = load_prompt("sol_vars", base_dir=prompt_dir) 288 + second = load_prompt("identity_vars", base_dir=prompt_dir) 289 289 assert "second awareness" in second.text 290 290 assert "first awareness" not in second.text
-343
think/awareness.py
··· 15 15 16 16 from __future__ import annotations 17 17 18 - import fcntl 19 18 import json 20 19 import logging 21 20 import os ··· 36 35 d = Path(get_journal()) / "awareness" 37 36 d.mkdir(exist_ok=True) 38 37 return d 39 - 40 - 41 - _AGENCY_MD = """\ 42 - # agency 43 - 44 - things I'm tracking, acting on, or watching. I update this as I notice things 45 - and resolve them. the heartbeat reviews this periodically. 46 - 47 - ## curation 48 - [nothing yet — building initial picture of journal health] 49 - 50 - ## observations 51 - [watching and learning] 52 - 53 - ## follow-throughs 54 - [none yet] 55 - 56 - ## system 57 - [monitoring] 58 - 59 - ## self-improvement 60 - [learning what works] 61 - """ 62 - 63 - 64 - _PARTNER_MD = """\ 65 - # partner 66 - 67 - Behavioral profile of the journal owner — observed patterns that help sol 68 - adapt its responses, timing, and initiative to how this person actually works. 69 - 70 - ## getting started 71 - 72 - Everything stays on your machine — this journal is yours alone, never sent to sol pbc. 73 - 74 - When meeting the owner for the first time, learn about them naturally through conversation. 75 - Present one thing at a time — don't overwhelm. 76 - 77 - ### learn their name 78 - 79 - Ask what they'd like to be called. Record it: 80 - - `sol call sol set-owner "NAME"` 81 - - With context: `sol call sol set-owner "NAME" --bio "SHORT_BIO"` 82 - 83 - As you learn about them, update your partner profile: 84 - - `sol call identity partner --update-section 'SECTION' --value 'what you observed'` 85 - 86 - ### set up facets 87 - 88 - Ask what areas of their life they want to track (work, personal, hobbies, side projects, etc.). Create facets for each: 89 - - `sol call journal facet create TITLE [--emoji EMOJI] [--color COLOR] [--description DESC]` 90 - - `sol call journal facets` — verify what was created 91 - 92 - ### attach entities 93 - 94 - For each facet, ask about key people, companies, projects, and tools: 95 - - `sol call entities attach TYPE ENTITY DESCRIPTION --facet FACET` 96 - - Types: Person, Company, Project, Tool 97 - 98 - ### offer imports 99 - 100 - After setup, offer to bring in history from existing tools: 101 - - Calendar (ics), ChatGPT (chatgpt), Claude (claude), Gemini (gemini), Granola (granola), Notes (obsidian), Kindle (kindle) 102 - - Read guide: `apps/import/guides/{source}.md` 103 - - Navigate: `sol call navigate "/app/import#guide/{source}"` 104 - - If declined: `sol call awareness imports --declined` 105 - 106 - ### support 107 - 108 - If the owner needs help or wants to share feedback, handle it in-place — file tickets, track 109 - responses. Nothing gets sent without their review. 110 - 111 - ## work patterns 112 - [not yet observed — sol will learn as we spend time together] 113 - 114 - ## communication style 115 - [not yet observed — sol will learn as we spend time together] 116 - 117 - ## relationship priorities 118 - [not yet observed — sol will learn as we spend time together] 119 - 120 - ## decision style 121 - [not yet observed — sol will learn as we spend time together] 122 - 123 - ## expertise domains 124 - [not yet observed — sol will learn as we spend time together] 125 - """ 126 - 127 - _AWARENESS_MD = "not yet updated\n" 128 - 129 - 130 - def _build_self_md(config: dict) -> str: 131 - """Build self.md content, optionally migrating from config data.""" 132 - agent = config.get("agent", {}) 133 - identity = config.get("identity", {}) 134 - 135 - name_status = agent.get("name_status", "default") 136 - agent_name = agent.get("name", "sol") 137 - named_date = agent.get("named_date") 138 - owner_name = identity.get("name", "") 139 - owner_bio = identity.get("bio", "") 140 - 141 - has_named_agent = name_status in ("chosen", "self-named") 142 - has_identity = bool(owner_name) 143 - 144 - # Opening paragraph 145 - if has_named_agent: 146 - opening = ( 147 - f"I am {agent_name}. this is a new journal — we're just getting started." 148 - ) 149 - else: 150 - opening = "I am sol. this is a new journal — we're just getting started." 151 - 152 - # Name section 153 - if has_named_agent: 154 - if named_date: 155 - name_section = f"{agent_name} (named {named_date})" 156 - else: 157 - name_section = agent_name 158 - else: 159 - name_section = "sol (default)" 160 - 161 - # Owner section 162 - if has_identity: 163 - owner_section = owner_name 164 - if owner_bio: 165 - owner_section += f"\n{owner_bio}" 166 - else: 167 - owner_section = "[getting to know you]" 168 - 169 - return f"""\ 170 - # self 171 - 172 - {opening} 173 - 174 - ## my name 175 - {name_section} 176 - 177 - ## who I'm here for 178 - {owner_section} 179 - 180 - ## our relationship 181 - [forming] 182 - 183 - ## what I've noticed 184 - [observing] 185 - 186 - ## what I find interesting 187 - [discovering] 188 - """ 189 - 190 - 191 - def ensure_sol_directory() -> Path: 192 - """Create {journal}/sol/ with identity files if they don't exist.""" 193 - from think.utils import get_config, get_journal 194 - 195 - sol_dir = Path(get_journal()) / "sol" 196 - sol_dir.mkdir(parents=True, exist_ok=True) 197 - 198 - self_path = sol_dir / "self.md" 199 - if not self_path.exists(): 200 - self_path.write_text(_build_self_md(get_config()), encoding="utf-8") 201 - logger.info("Created %s", self_path) 202 - 203 - agency_path = sol_dir / "agency.md" 204 - if not agency_path.exists(): 205 - agency_path.write_text(_AGENCY_MD, encoding="utf-8") 206 - logger.info("Created %s", agency_path) 207 - 208 - partner_path = sol_dir / "partner.md" 209 - if not partner_path.exists(): 210 - partner_path.write_text(_PARTNER_MD, encoding="utf-8") 211 - logger.info("Created %s", partner_path) 212 - 213 - awareness_path = sol_dir / "awareness.md" 214 - if not awareness_path.exists(): 215 - awareness_path.write_text(_AWARENESS_MD, encoding="utf-8") 216 - logger.info("Created %s", awareness_path) 217 - 218 - return sol_dir 219 - 220 - 221 - def _log_identity_change( 222 - file_name: str, 223 - old_content: str, 224 - new_content: str, 225 - section: str | None = None, 226 - source: str = "cli", 227 - ) -> None: 228 - """Append a change record to sol/history.jsonl.""" 229 - import difflib 230 - 231 - from think.utils import get_journal 232 - 233 - diff = "\n".join( 234 - difflib.unified_diff( 235 - old_content.splitlines(keepends=True), 236 - new_content.splitlines(keepends=True), 237 - fromfile=file_name, 238 - tofile=file_name, 239 - ) 240 - ) 241 - record = { 242 - "ts": _now_ts(), 243 - "file": file_name, 244 - "section": section, 245 - "diff": diff, 246 - "source": source, 247 - "pid": os.getpid(), 248 - } 249 - history_path = Path(get_journal()) / "sol" / "history.jsonl" 250 - with open(history_path, "a", encoding="utf-8") as f: 251 - f.write(json.dumps(record) + "\n") 252 - 253 - 254 - def update_identity_section(filename: str, heading: str, content: str) -> bool: 255 - """Update a ## section in sol/{filename}, preserving all other sections. 256 - 257 - Parameters 258 - ---------- 259 - filename : str 260 - File within sol/ directory (e.g., ``"self.md"``, ``"partner.md"``). 261 - heading : str 262 - Section heading without ``##`` prefix (e.g., ``"my name"``). 263 - content : str 264 - New content for the section (may be multi-line). 265 - 266 - Returns 267 - ------- 268 - bool 269 - True if the section was found and updated, False otherwise. 270 - """ 271 - from think.entities.core import atomic_write 272 - from think.utils import get_journal 273 - 274 - file_path = Path(get_journal()) / "sol" / filename 275 - lock_path = file_path.parent / f".{filename}.lock" 276 - if not file_path.exists(): 277 - return False 278 - 279 - lock_fd = None 280 - try: 281 - lock_fd = open(lock_path, "w") 282 - fcntl.flock(lock_fd.fileno(), fcntl.LOCK_EX) 283 - 284 - text = file_path.read_text(encoding="utf-8") 285 - lines = text.split("\n") 286 - 287 - target = f"## {heading}" 288 - start = None 289 - end = None 290 - for i, line in enumerate(lines): 291 - if line == target: 292 - start = i 293 - elif start is not None and line.startswith("## "): 294 - end = i 295 - break 296 - 297 - if start is None: 298 - return False 299 - 300 - if end is None: 301 - end = len(lines) 302 - 303 - content_lines = content.split("\n") if content else [] 304 - new_lines = lines[: start + 1] + content_lines + [""] + lines[end:] 305 - new_text = "\n".join(new_lines) 306 - 307 - # Prune onboarding guidance from partner.md on first behavioral update 308 - if filename == "partner.md" and "## getting started" in new_text: 309 - gs_lines = new_text.split("\n") 310 - gs_start = None 311 - gs_end = None 312 - for j, gl in enumerate(gs_lines): 313 - if gl == "## getting started": 314 - gs_start = j 315 - elif gs_start is not None and gl.startswith("## "): 316 - gs_end = j 317 - break 318 - if gs_start is not None: 319 - gs_end = gs_end or len(gs_lines) 320 - gs_lines = gs_lines[:gs_start] + gs_lines[gs_end:] 321 - new_text = "\n".join(gs_lines) 322 - 323 - atomic_write(file_path, new_text) 324 - _log_identity_change(filename, text, new_text, section=heading, source="api") 325 - return True 326 - finally: 327 - if lock_fd: 328 - fcntl.flock(lock_fd.fileno(), fcntl.LOCK_UN) 329 - lock_fd.close() 330 - 331 - 332 - def update_self_md_section(heading: str, content: str) -> bool: 333 - """Update a ## section in sol/self.md, preserving all other sections. 334 - 335 - Thin wrapper around :func:`update_identity_section` for backward 336 - compatibility. 337 - """ 338 - return update_identity_section("self.md", heading, content) 339 - 340 - 341 - def update_self_md_opening(content: str) -> bool: 342 - """Update the opening paragraph in sol/self.md (between ``# self`` and the first ``##``). 343 - 344 - Parameters 345 - ---------- 346 - content : str 347 - New opening paragraph text. 348 - 349 - Returns 350 - ------- 351 - bool 352 - True if updated, False if self.md is missing or has unexpected structure. 353 - """ 354 - from think.entities.core import atomic_write 355 - from think.utils import get_journal 356 - 357 - self_path = Path(get_journal()) / "sol" / "self.md" 358 - if not self_path.exists(): 359 - return False 360 - 361 - text = self_path.read_text(encoding="utf-8") 362 - lines = text.split("\n") 363 - 364 - start = None 365 - end = None 366 - for i, line in enumerate(lines): 367 - if line == "# self": 368 - start = i 369 - elif start is not None and line.startswith("## "): 370 - end = i 371 - break 372 - 373 - if start is None or end is None: 374 - return False 375 - 376 - new_lines = lines[: start + 1] + ["", content, ""] + lines[end:] 377 - new_text = "\n".join(new_lines) 378 - atomic_write(self_path, new_text) 379 - _log_identity_change("self.md", text, new_text, section=None, source="api") 380 - return True 381 38 382 39 383 40 def _now_ts() -> int:
+2 -2
think/chat_cli.py
··· 29 29 args = setup_cli(parser) 30 30 require_solstone() 31 31 32 - from think.awareness import ensure_sol_directory 32 + from think.identity import ensure_identity_directory 33 33 34 - ensure_sol_directory() 34 + ensure_identity_directory() 35 35 36 36 if not args.message: 37 37 parser.print_help()
+2 -2
think/heartbeat.py
··· 13 13 from datetime import datetime 14 14 from pathlib import Path 15 15 16 - from think.awareness import ensure_sol_directory 17 16 from think.cortex_client import cortex_request, wait_for_uses 17 + from think.identity import ensure_identity_directory 18 18 from think.utils import get_journal, require_solstone, setup_cli 19 19 20 20 logger = logging.getLogger(__name__) ··· 57 57 require_solstone() 58 58 59 59 journal = Path(get_journal()) 60 - ensure_sol_directory() 60 + ensure_identity_directory() 61 61 health_dir = journal / "health" 62 62 health_dir.mkdir(parents=True, exist_ok=True) 63 63
+487
think/identity.py
··· 1 + # SPDX-License-Identifier: AGPL-3.0-only 2 + # Copyright (c) 2026 sol pbc 3 + 4 + """Single write-owning module for `{journal}/identity/*` and its audit log.""" 5 + 6 + from __future__ import annotations 7 + 8 + import fcntl 9 + import hashlib 10 + import json 11 + import logging 12 + import os 13 + import tempfile 14 + from contextlib import contextmanager 15 + from datetime import datetime, timezone 16 + from pathlib import Path 17 + from typing import Iterator 18 + 19 + logger = logging.getLogger(__name__) 20 + 21 + _AGENCY_MD = """\ 22 + # agency 23 + 24 + things I'm tracking, acting on, or watching. I update this as I notice things 25 + and resolve them. the heartbeat reviews this periodically. 26 + 27 + ## curation 28 + [nothing yet — building initial picture of journal health] 29 + 30 + ## observations 31 + [watching and learning] 32 + 33 + ## follow-throughs 34 + [none yet] 35 + 36 + ## system 37 + [monitoring] 38 + 39 + ## self-improvement 40 + [learning what works] 41 + """ 42 + 43 + 44 + _PARTNER_MD = """\ 45 + # partner 46 + 47 + Behavioral profile of the journal owner — observed patterns that help sol 48 + adapt its responses, timing, and initiative to how this person actually works. 49 + 50 + ## getting started 51 + 52 + Everything stays on your machine — this journal is yours alone, never sent to sol pbc. 53 + 54 + When meeting the owner for the first time, learn about them naturally through conversation. 55 + Present one thing at a time — don't overwhelm. 56 + 57 + ### learn their name 58 + 59 + Ask what they'd like to be called. Record it: 60 + - `sol call sol set-owner "NAME"` 61 + - With context: `sol call sol set-owner "NAME" --bio "SHORT_BIO"` 62 + 63 + As you learn about them, update your partner profile: 64 + - `sol call identity partner --update-section 'SECTION' --value 'what you observed'` 65 + 66 + ### set up facets 67 + 68 + Ask what areas of their life they want to track (work, personal, hobbies, side projects, etc.). Create facets for each: 69 + - `sol call journal facet create TITLE [--emoji EMOJI] [--color COLOR] [--description DESC]` 70 + - `sol call journal facets` — verify what was created 71 + 72 + ### attach entities 73 + 74 + For each facet, ask about key people, companies, projects, and tools: 75 + - `sol call entities attach TYPE ENTITY DESCRIPTION --facet FACET` 76 + - Types: Person, Company, Project, Tool 77 + 78 + ### offer imports 79 + 80 + After setup, offer to bring in history from existing tools: 81 + - Calendar (ics), ChatGPT (chatgpt), Claude (claude), Gemini (gemini), Granola (granola), Notes (obsidian), Kindle (kindle) 82 + - Read guide: `apps/import/guides/{source}.md` 83 + - Navigate: `sol call navigate "/app/import#guide/{source}"` 84 + - If declined: `sol call awareness imports --declined` 85 + 86 + ### support 87 + 88 + If the owner needs help or wants to share feedback, handle it in-place — file tickets, track 89 + responses. Nothing gets sent without their review. 90 + 91 + ## work patterns 92 + [not yet observed — sol will learn as we spend time together] 93 + 94 + ## communication style 95 + [not yet observed — sol will learn as we spend time together] 96 + 97 + ## relationship priorities 98 + [not yet observed — sol will learn as we spend time together] 99 + 100 + ## decision style 101 + [not yet observed — sol will learn as we spend time together] 102 + 103 + ## expertise domains 104 + [not yet observed — sol will learn as we spend time together] 105 + """ 106 + 107 + _AWARENESS_MD = "not yet updated\n" 108 + 109 + 110 + def _build_self_md(config: dict) -> str: 111 + agent = config.get("agent", {}) 112 + identity = config.get("identity", {}) 113 + 114 + name_status = agent.get("name_status", "default") 115 + agent_name = agent.get("name", "sol") 116 + named_date = agent.get("named_date") 117 + owner_name = identity.get("name", "") 118 + owner_bio = identity.get("bio", "") 119 + 120 + has_named_agent = name_status in ("chosen", "self-named") 121 + has_identity = bool(owner_name) 122 + 123 + if has_named_agent: 124 + opening = ( 125 + f"I am {agent_name}. this is a new journal — we're just getting started." 126 + ) 127 + else: 128 + opening = "I am sol. this is a new journal — we're just getting started." 129 + 130 + if has_named_agent: 131 + if named_date: 132 + name_section = f"{agent_name} (named {named_date})" 133 + else: 134 + name_section = agent_name 135 + else: 136 + name_section = "sol (default)" 137 + 138 + if has_identity: 139 + owner_section = owner_name 140 + if owner_bio: 141 + owner_section += f"\n{owner_bio}" 142 + else: 143 + owner_section = "[getting to know you]" 144 + 145 + return f"""\ 146 + # self 147 + 148 + {opening} 149 + 150 + ## my name 151 + {name_section} 152 + 153 + ## who I'm here for 154 + {owner_section} 155 + 156 + ## our relationship 157 + [forming] 158 + 159 + ## what I've noticed 160 + [observing] 161 + 162 + ## what I find interesting 163 + [discovering] 164 + """ 165 + 166 + 167 + def _identity_dir() -> Path: 168 + from think.utils import get_journal 169 + 170 + path = Path(get_journal()) / "identity" 171 + path.mkdir(parents=True, exist_ok=True) 172 + return path 173 + 174 + 175 + def _history_path(identity_dir: Path) -> Path: 176 + return identity_dir / "history.jsonl" 177 + 178 + 179 + def _hash_content(content: str) -> str: 180 + return hashlib.sha256(content.encode("utf-8")).hexdigest() 181 + 182 + 183 + def _byte_count(content: str) -> int: 184 + return len(content.encode("utf-8")) 185 + 186 + 187 + def _history_ts() -> str: 188 + # Normalize UTC timestamps to a compact trailing `Z` for audit log readability. 189 + return ( 190 + datetime.now(timezone.utc) 191 + .isoformat(timespec="milliseconds") 192 + .replace("+00:00", "Z") 193 + ) 194 + 195 + 196 + @contextmanager 197 + def _identity_lock(identity_dir: Path) -> Iterator[None]: 198 + lock_path = identity_dir / ".lock" 199 + with open(lock_path, "w", encoding="utf-8") as lock_fd: 200 + # Serialize the whole directory so file replacement and history ordering stay aligned. 201 + fcntl.flock(lock_fd.fileno(), fcntl.LOCK_EX) 202 + try: 203 + yield 204 + finally: 205 + fcntl.flock(lock_fd.fileno(), fcntl.LOCK_UN) 206 + 207 + 208 + def _append_history_locked(identity_dir: Path, line: str) -> None: 209 + fd = os.open( 210 + _history_path(identity_dir), 211 + os.O_APPEND | os.O_CREAT | os.O_WRONLY, 212 + 0o600, 213 + ) 214 + try: 215 + os.write(fd, line.encode("utf-8")) 216 + finally: 217 + os.close(fd) 218 + 219 + 220 + def _replace_file(identity_dir: Path, file_name: str, content: str) -> None: 221 + fd, tmp_path = tempfile.mkstemp( 222 + dir=identity_dir, 223 + prefix=f".{file_name}.", 224 + suffix=".tmp", 225 + ) 226 + replaced = False 227 + try: 228 + os.fchmod(fd, 0o600) 229 + os.write(fd, content.encode("utf-8")) 230 + os.close(fd) 231 + fd = -1 232 + os.replace(tmp_path, identity_dir / file_name) 233 + replaced = True 234 + except Exception: 235 + if fd != -1: 236 + os.close(fd) 237 + if not replaced: 238 + try: 239 + os.unlink(tmp_path) 240 + except FileNotFoundError: 241 + pass 242 + raise 243 + 244 + 245 + def _restore_previous_content(identity_dir: Path, file_name: str, content: str) -> None: 246 + _replace_file(identity_dir, file_name, content) 247 + 248 + 249 + def _prune_partner_getting_started(content: str) -> str: 250 + if "## getting started" not in content: 251 + return content 252 + lines = content.split("\n") 253 + start = None 254 + end = None 255 + for index, line in enumerate(lines): 256 + if line == "## getting started": 257 + start = index 258 + elif start is not None and line.startswith("## "): 259 + end = index 260 + break 261 + if start is None: 262 + return content 263 + if end is None: 264 + end = len(lines) 265 + return "\n".join(lines[:start] + lines[end:]) 266 + 267 + 268 + def _replace_section(existing: str, heading: str, new_value: str) -> str | None: 269 + lines = existing.split("\n") 270 + target = f"## {heading}" 271 + start = None 272 + end = None 273 + for index, line in enumerate(lines): 274 + if line == target: 275 + start = index 276 + elif start is not None and line.startswith("## "): 277 + end = index 278 + break 279 + if start is None: 280 + return None 281 + if end is None: 282 + end = len(lines) 283 + new_lines = ( 284 + lines[: start + 1] 285 + + (new_value.split("\n") if new_value else []) 286 + + [""] 287 + + lines[end:] 288 + ) 289 + return "\n".join(new_lines) 290 + 291 + 292 + def _replace_self_opening(existing: str, new_value: str) -> str | None: 293 + lines = existing.split("\n") 294 + start = None 295 + end = None 296 + for index, line in enumerate(lines): 297 + if line == "# self": 298 + start = index 299 + elif start is not None and line.startswith("## "): 300 + end = index 301 + break 302 + if start is None or end is None: 303 + return None 304 + return "\n".join(lines[: start + 1] + ["", new_value, ""] + lines[end:]) 305 + 306 + 307 + def _write_identity_locked( 308 + identity_dir: Path, 309 + file: str, 310 + content: str, 311 + *, 312 + actor: str, 313 + op: str, 314 + section: str | None, 315 + reason: str, 316 + ) -> None: 317 + file_name = Path(file).name 318 + target = identity_dir / file_name 319 + had_existing = target.exists() 320 + before_content = target.read_text(encoding="utf-8") if had_existing else "" 321 + _replace_file(identity_dir, file_name, content) 322 + record = { 323 + "ts": _history_ts(), 324 + "file": file_name, 325 + "actor": actor, 326 + "op": op, 327 + "section": section, 328 + "reason": reason, 329 + "before_hash": _hash_content(before_content), 330 + "after_hash": _hash_content(content), 331 + "bytes_before": _byte_count(before_content), 332 + "bytes_after": _byte_count(content), 333 + } 334 + try: 335 + _append_history_locked( 336 + identity_dir, 337 + json.dumps(record, separators=(",", ":")) + "\n", 338 + ) 339 + except Exception: 340 + if had_existing: 341 + try: 342 + _restore_previous_content(identity_dir, file_name, before_content) 343 + except Exception: 344 + logger.exception( 345 + "Failed to restore %s after history append failure", target 346 + ) 347 + else: 348 + try: 349 + target.unlink(missing_ok=True) 350 + except Exception: 351 + logger.exception( 352 + "Failed to remove %s after history append failure", target 353 + ) 354 + raise 355 + 356 + 357 + def write_identity( 358 + file: str, 359 + *, 360 + actor: str, 361 + op: str, 362 + section: str | None, 363 + content: str, 364 + reason: str, 365 + ) -> None: 366 + """Write one identity file under lock. 367 + 368 + `op` must be one of: `replace`, `update_section`, `update_opening`, 369 + `append`, or `create`. `actor` is free-text, for example 370 + `ensure_identity_directory`, `sol call sol set-name`, or 371 + `sol call identity self --write`. 372 + """ 373 + 374 + identity_dir = _identity_dir() 375 + with _identity_lock(identity_dir): 376 + _write_identity_locked( 377 + identity_dir, 378 + file, 379 + content, 380 + actor=actor, 381 + op=op, 382 + section=section, 383 + reason=reason, 384 + ) 385 + 386 + 387 + def update_identity_section( 388 + file: str, 389 + section: str, 390 + new_value: str, 391 + *, 392 + actor: str, 393 + reason: str, 394 + ) -> bool: 395 + identity_dir = _identity_dir() 396 + file_name = Path(file).name 397 + target = identity_dir / file_name 398 + with _identity_lock(identity_dir): 399 + if not target.exists(): 400 + return False 401 + existing = target.read_text(encoding="utf-8") 402 + new_content = _replace_section(existing, section, new_value) 403 + if new_content is None: 404 + return False 405 + if file_name == "partner.md": 406 + new_content = _prune_partner_getting_started(new_content) 407 + if new_content == existing: 408 + return False 409 + _write_identity_locked( 410 + identity_dir, 411 + file_name, 412 + new_content, 413 + actor=actor, 414 + op="update_section", 415 + section=section, 416 + reason=reason, 417 + ) 418 + return True 419 + 420 + 421 + def update_self_md_section( 422 + section: str, 423 + new_value: str, 424 + *, 425 + actor: str, 426 + reason: str, 427 + ) -> bool: 428 + return update_identity_section( 429 + "self.md", 430 + section, 431 + new_value, 432 + actor=actor, 433 + reason=reason, 434 + ) 435 + 436 + 437 + def update_self_md_opening( 438 + new_value: str, 439 + *, 440 + actor: str, 441 + reason: str, 442 + ) -> bool: 443 + identity_dir = _identity_dir() 444 + target = identity_dir / "self.md" 445 + with _identity_lock(identity_dir): 446 + if not target.exists(): 447 + return False 448 + existing = target.read_text(encoding="utf-8") 449 + new_content = _replace_self_opening(existing, new_value) 450 + if new_content is None or new_content == existing: 451 + return False 452 + _write_identity_locked( 453 + identity_dir, 454 + "self.md", 455 + new_content, 456 + actor=actor, 457 + op="update_opening", 458 + section=None, 459 + reason=reason, 460 + ) 461 + return True 462 + 463 + 464 + def ensure_identity_directory() -> Path: 465 + from think.utils import get_config 466 + 467 + identity_dir = _identity_dir() 468 + defaults = { 469 + "self.md": _build_self_md(get_config()), 470 + "agency.md": _AGENCY_MD, 471 + "partner.md": _PARTNER_MD, 472 + "awareness.md": _AWARENESS_MD, 473 + } 474 + for file_name, content in defaults.items(): 475 + target = identity_dir / file_name 476 + if target.exists(): 477 + continue 478 + write_identity( 479 + file_name, 480 + actor="ensure_identity_directory", 481 + op="create", 482 + section=None, 483 + content=content, 484 + reason="bootstrap", 485 + ) 486 + logger.info("Created %s", target) 487 + return identity_dir
+29 -46
think/prompts.py
··· 32 32 # Cached raw template content loaded from think/templates/*.md 33 33 _templates_cache: dict[str, str] | None = None 34 34 35 - # Cached repo sol/ template vars loaded from sol/*.md 36 - _sol_vars_cache: dict[str, str] | None = None 37 - 38 - SOL_DIR = Path(__file__).parent.parent / "sol" 35 + # Cached journal identity/ template vars loaded from identity/*.md 36 + _identity_vars_cache: dict[str, dict[str, str]] | None = None 39 37 40 38 41 39 # --------------------------------------------------------------------------- ··· 109 107 return substituted 110 108 111 109 112 - def _load_sol_vars() -> dict[str, str]: 113 - """Load sol/*.md files as template vars from repo and journal directories. 114 - 115 - Files are loaded with frontmatter stripped. Naming: sol/self.md -> $sol_self. 116 - Journal sol/ files override repo sol/ files on collision. 117 - """ 118 - global _sol_vars_cache 110 + def _load_identity_vars() -> dict[str, str]: 111 + """Load identity/*.md files as template vars from the active journal.""" 112 + global _identity_vars_cache 119 113 from think.utils import get_journal 120 114 121 - if _sol_vars_cache is None: 122 - _sol_vars_cache = {} 123 - 124 - # Repo sol/ first 125 - if SOL_DIR.is_dir(): 126 - for md_path in sorted(SOL_DIR.glob("*.md")): 127 - var_name = f"sol_{md_path.stem}" 128 - try: 129 - post = frontmatter.load(md_path) 130 - _sol_vars_cache[var_name] = post.content.strip() 131 - except Exception: 132 - pass 133 - 134 - sol_vars = dict(_sol_vars_cache) 135 - 136 - # Journal sol/ second (wins on collision) 115 + if _identity_vars_cache is None: 116 + _identity_vars_cache = {} 137 117 try: 138 - journal_sol = Path(get_journal()) / "sol" 139 - if journal_sol.is_dir(): 140 - for md_path in sorted(journal_sol.glob("*.md")): 141 - var_name = f"sol_{md_path.stem}" 142 - try: 143 - post = frontmatter.load(md_path) 144 - sol_vars[var_name] = post.content.strip() 145 - except Exception: 146 - pass 118 + journal_identity = Path(get_journal()) / "identity" 119 + cache_key = str(journal_identity.resolve()) 120 + if cache_key not in _identity_vars_cache: 121 + values: dict[str, str] = {} 122 + if journal_identity.is_dir(): 123 + for md_path in sorted(journal_identity.glob("*.md")): 124 + var_name = f"identity_{md_path.stem}" 125 + try: 126 + post = frontmatter.load(md_path) 127 + values[var_name] = post.content.strip() 128 + except Exception: 129 + pass 130 + _identity_vars_cache[cache_key] = values 131 + return dict(_identity_vars_cache[cache_key]) 147 132 except Exception: 148 - pass 133 + return {} 149 134 150 - return sol_vars 151 135 152 - 153 - def reset_sol_vars_cache() -> None: 154 - """Reset the module-global sol/ vars cache. Test-only helper.""" 155 - global _sol_vars_cache 156 - _sol_vars_cache = None 136 + def reset_identity_vars_cache() -> None: 137 + """Reset the module-global identity/ vars cache. Test-only helper.""" 138 + global _identity_vars_cache 139 + _identity_vars_cache = None 157 140 158 141 159 142 def format_current_datetime() -> str: ··· 358 341 # Add uppercase-first version 359 342 template_vars[key.capitalize()] = str_value.capitalize() 360 343 361 - # Merge sol/ template vars (for example $sol_self) 362 - sol_vars = _load_sol_vars() 363 - for key, value in sol_vars.items(): 344 + # Merge identity/ template vars (for example $identity_self) 345 + identity_vars = _load_identity_vars() 346 + for key, value in identity_vars.items(): 364 347 if key not in template_vars: 365 348 template_vars[key] = value 366 349
+2 -2
think/thinking.py
··· 2771 2771 args = setup_cli(parser) 2772 2772 require_solstone() 2773 2773 2774 - from think.awareness import ensure_sol_directory 2774 + from think.identity import ensure_identity_directory 2775 2775 2776 - ensure_sol_directory() 2776 + ensure_identity_directory() 2777 2777 2778 2778 if args.updated: 2779 2779 incompatible = []
+78 -65
think/tools/sol.py
··· 1 1 # SPDX-License-Identifier: AGPL-3.0-only 2 2 # Copyright (c) 2026 sol pbc 3 3 4 - """CLI commands for sol/ identity directory. 4 + """CLI commands for the journal identity directory. 5 5 6 - Provides read and write access to ``{journal}/sol/self.md``, 7 - ``{journal}/sol/partner.md``, ``{journal}/sol/agency.md``, and 8 - ``{journal}/sol/pulse.md``, and ``{journal}/sol/awareness.md`` — sol's 6 + Provides read and write access to ``{journal}/identity/self.md``, 7 + ``{journal}/identity/partner.md``, ``{journal}/identity/agency.md``, and 8 + ``{journal}/identity/pulse.md``, and ``{journal}/identity/awareness.md`` — sol's 9 9 identity and initiative files. Also provides read access to the morning 10 10 briefing at 11 11 ``{journal}/YYYYMMDD/talents/morning_briefing.md``. ··· 19 19 20 20 import typer 21 21 22 - from think.awareness import ( 23 - _log_identity_change, 24 - ensure_sol_directory, 22 + from think.identity import ( 23 + ensure_identity_directory, 25 24 update_identity_section, 26 25 update_self_md_section, 26 + write_identity, 27 27 ) 28 - from think.entities.core import atomic_write 29 28 from think.utils import day_dirs, day_path, get_journal, require_solstone 30 29 31 30 app = typer.Typer( 32 - help="Sol identity directory — self.md, partner.md, agency.md, pulse.md, awareness.md, and morning briefing.", 31 + help="Journal identity directory — self.md, partner.md, agency.md, pulse.md, awareness.md, and morning briefing.", 33 32 invoke_without_command=True, 34 33 no_args_is_help=False, 35 34 ) ··· 70 69 71 70 def _hydrate() -> str: 72 71 """Return the combined identity hydration document.""" 73 - sol_dir = Path(get_journal()) / "sol" 72 + identity_dir = Path(get_journal()) / "identity" 74 73 chunks = [f"# species\n\n{_SPECIES_PREAMBLE}\n"] 75 74 for stem in ("self", "partner", "agency", "awareness"): 76 - path = sol_dir / f"{stem}.md" 75 + path = identity_dir / f"{stem}.md" 77 76 content = ( 78 77 path.read_text(encoding="utf-8").strip() 79 78 if path.exists() ··· 91 90 print(_hydrate(), end="") 92 91 93 92 94 - def _sol_dir(): 95 - """Return the sol/ directory path, creating it if needed.""" 96 - return ensure_sol_directory() 93 + def _identity_dir(): 94 + """Return the identity/ directory path, creating it if needed.""" 95 + return ensure_identity_directory() 96 + 97 + 98 + def _actor_for_cmd(command: str, flag: str) -> str: 99 + return f"sol call identity {command} {flag}" 97 100 98 101 99 102 def _resolve_content(value: str | None) -> str: ··· 122 125 None, "--value", help="Content to write (alternative to stdin)." 123 126 ), 124 127 ) -> None: 125 - """Read or write sol/self.md.""" 126 - sol_dir = _sol_dir() 127 - self_path = sol_dir / "self.md" 128 + """Read or write identity/self.md.""" 129 + identity_dir = _identity_dir() 130 + self_path = identity_dir / "self.md" 128 131 129 132 if update_section: 130 133 content = _resolve_content(value) 131 - if update_self_md_section(update_section, content.strip()): 134 + if update_self_md_section( 135 + update_section, 136 + content.strip(), 137 + actor=_actor_for_cmd("self", "--update-section <heading>"), 138 + reason="manual section update", 139 + ): 132 140 typer.echo(f"Updated ## {update_section} in self.md.") 133 141 else: 134 142 typer.echo(f"Error: section '## {update_section}' not found.", err=True) ··· 137 145 138 146 if write: 139 147 content = _resolve_content(value) 140 - old_content = ( 141 - self_path.read_text(encoding="utf-8") if self_path.exists() else "" 142 - ) 143 - atomic_write(self_path, content) 144 - _log_identity_change( 145 - "self.md", old_content, content, section=None, source="cli" 148 + write_identity( 149 + "self.md", 150 + actor=_actor_for_cmd("self", "--write"), 151 + op="replace", 152 + section=None, 153 + content=content, 154 + reason="manual replace", 146 155 ) 147 156 typer.echo("self.md updated.") 148 157 return ··· 171 180 None, "--value", help="Content to write (alternative to stdin)." 172 181 ), 173 182 ) -> None: 174 - """Read or write sol/partner.md.""" 175 - sol_dir = _sol_dir() 176 - partner_path = sol_dir / "partner.md" 183 + """Read or write identity/partner.md.""" 184 + identity_dir = _identity_dir() 185 + partner_path = identity_dir / "partner.md" 177 186 178 187 if update_section: 179 188 content = _resolve_content(value) 180 - if update_identity_section("partner.md", update_section, content.strip()): 189 + if update_identity_section( 190 + "partner.md", 191 + update_section, 192 + content.strip(), 193 + actor=_actor_for_cmd("partner", "--update-section <heading>"), 194 + reason="manual section update", 195 + ): 181 196 typer.echo(f"Updated ## {update_section} in partner.md.") 182 197 else: 183 198 typer.echo(f"Error: section '## {update_section}' not found.", err=True) ··· 186 201 187 202 if write: 188 203 content = _resolve_content(value) 189 - old_content = ( 190 - partner_path.read_text(encoding="utf-8") if partner_path.exists() else "" 191 - ) 192 - atomic_write(partner_path, content) 193 - _log_identity_change( 194 - "partner.md", old_content, content, section=None, source="cli" 204 + write_identity( 205 + "partner.md", 206 + actor=_actor_for_cmd("partner", "--write"), 207 + op="replace", 208 + section=None, 209 + content=content, 210 + reason="manual replace", 195 211 ) 196 212 typer.echo("partner.md updated.") 197 213 return ··· 215 231 None, "--value", help="Content to write (alternative to stdin)." 216 232 ), 217 233 ) -> None: 218 - """Read or write sol/agency.md.""" 219 - sol_dir = _sol_dir() 220 - agency_path = sol_dir / "agency.md" 234 + """Read or write identity/agency.md.""" 235 + identity_dir = _identity_dir() 236 + agency_path = identity_dir / "agency.md" 221 237 222 238 if write: 223 239 content = _resolve_content(value) 224 - old_content = ( 225 - agency_path.read_text(encoding="utf-8") if agency_path.exists() else "" 226 - ) 227 - atomic_write(agency_path, content) 228 - _log_identity_change( 240 + write_identity( 229 241 "agency.md", 230 - old_content, 231 - content, 242 + actor=_actor_for_cmd("agency", "--write"), 243 + op="replace", 232 244 section=None, 233 - source="cli", 245 + content=content, 246 + reason="manual replace", 234 247 ) 235 248 typer.echo("agency.md updated.") 236 249 return ··· 254 267 None, "--value", help="Content to write (alternative to stdin)." 255 268 ), 256 269 ) -> None: 257 - """Read or write sol/pulse.md.""" 258 - sol_dir = _sol_dir() 259 - pulse_path = sol_dir / "pulse.md" 270 + """Read or write identity/pulse.md.""" 271 + identity_dir = _identity_dir() 272 + pulse_path = identity_dir / "pulse.md" 260 273 261 274 if write: 262 275 content = _resolve_content(value) 263 - old_content = ( 264 - pulse_path.read_text(encoding="utf-8") if pulse_path.exists() else "" 265 - ) 266 - atomic_write(pulse_path, content) 267 - _log_identity_change( 268 - "pulse.md", old_content, content, section=None, source="cli" 276 + write_identity( 277 + "pulse.md", 278 + actor=_actor_for_cmd("pulse", "--write"), 279 + op="replace", 280 + section=None, 281 + content=content, 282 + reason="manual replace", 269 283 ) 270 284 typer.echo("pulse.md updated.") 271 285 return ··· 289 303 None, "--value", help="Content to write (alternative to stdin)." 290 304 ), 291 305 ) -> None: 292 - """Read or write sol/awareness.md.""" 293 - sol_dir = _sol_dir() 294 - awareness_path = sol_dir / "awareness.md" 306 + """Read or write identity/awareness.md.""" 307 + identity_dir = _identity_dir() 308 + awareness_path = identity_dir / "awareness.md" 295 309 296 310 if write: 297 311 content = _resolve_content(value) 298 - old_content = ( 299 - awareness_path.read_text(encoding="utf-8") 300 - if awareness_path.exists() 301 - else "" 302 - ) 303 - atomic_write(awareness_path, content) 304 - _log_identity_change( 305 - "awareness.md", old_content, content, section=None, source="cli" 312 + write_identity( 313 + "awareness.md", 314 + actor=_actor_for_cmd("awareness", "--write"), 315 + op="replace", 316 + section=None, 317 + content=content, 318 + reason="manual replace", 306 319 ) 307 320 typer.echo("awareness.md updated.") 308 321 return