personal memory agent
0
fork

Configure Feed

Select the types of activity you want to include in your feed.

push: ship Wave 3 push server (root /api/push/*, APNs dispatch)

Root Flask blueprint at /api/push/* with register/unregister/status/test
endpoints, backed by a dedicated push runtime that subscribes to cortex
finish events for Daily Briefing and runs a 60s periodic check for
Pre-Meeting Prep. Three categories ship: SOLSTONE_DAILY_BRIEFING,
SOLSTONE_PRE_MEETING_PREP, SOLSTONE_AGENT_ALERT. Commitment Nudge is
deferred to Wave 3.1 (LedgerItem has no machine-readable due date); the
constant + payload shape are defined server-side for iOS forward-compat.

APNs transport is httpx + PyJWT with ES256 bearer JWTs cached for 55
minutes. BadDeviceToken / Unregistered responses auto-prune the device.
Device tokens are never logged beyond the last 4 chars; JWTs and .p8
contents never logged at all. PII fallback is hardcoded in payload
builders — lock-screen bodies are generic; detail lives in `data`.

Write ownership: think/push/devices.py owns push_devices.json,
think/push/triggers.py owns nudge_log.jsonl. Layer hygiene stays clean
without allowlist entries.

Live APNs validation is deferred pending Apple Developer enrollment; the
post-enrollment checklist lives in docs/design/push.md §10.

Design: docs/design/push.md
Spec sections: cpo/specs/in-flight/mobile-ux-native-ios-android.md §§3, 3.2, 3.3, 3.4, 8.3

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>

+2987
+6
convey/__init__.py
··· 109 109 110 110 def create_app(journal: str = "") -> Flask: 111 111 """Create and configure the Convey Flask application.""" 112 + from think.push.runtime import start_push_runtime 112 113 from think.voice.runtime import start_voice_runtime 113 114 115 + from .push import push_bp 114 116 from .voice import voice_bp 115 117 116 118 app = Flask( ··· 150 152 # Register voice API blueprint 151 153 app.register_blueprint(voice_bp) 152 154 155 + # Register push API blueprint 156 + app.register_blueprint(push_bp) 157 + 153 158 # Initialize and register app system 154 159 registry = AppRegistry() 155 160 registry.discover() ··· 161 166 sock = Sock(app) 162 167 register_websocket(sock) 163 168 start_voice_runtime(app) 169 + start_push_runtime(app) 164 170 165 171 if journal: 166 172 state.journal_root = journal
+127
convey/push.py
··· 1 + # SPDX-License-Identifier: AGPL-3.0-only 2 + # Copyright (c) 2026 sol pbc 3 + 4 + """Root-level push API.""" 5 + 6 + from __future__ import annotations 7 + 8 + import uuid 9 + from typing import Any 10 + 11 + from flask import Blueprint, jsonify, request 12 + from werkzeug.exceptions import BadRequest 13 + 14 + from think.push import triggers 15 + from think.push.config import is_configured 16 + from think.push.devices import ( 17 + load_devices, 18 + register_device, 19 + remove_device, 20 + status_view, 21 + ) 22 + from think.push.dispatch import CATEGORIES, CATEGORY_AGENT_ALERT 23 + 24 + push_bp = Blueprint("push", __name__, url_prefix="/api/push") 25 + 26 + 27 + def _error(message: str, status: int): 28 + return jsonify({"error": message}), status 29 + 30 + 31 + def _optional_json_object() -> tuple[dict[str, Any], Any | None]: 32 + if not request.get_data(cache=True): 33 + return {}, None 34 + try: 35 + data = request.get_json(silent=False) 36 + except BadRequest: 37 + return {}, _error("request body must be valid JSON", 400) 38 + if not isinstance(data, dict): 39 + return {}, _error("request body must be a JSON object", 400) 40 + return data, None 41 + 42 + 43 + def _required_json_object() -> tuple[dict[str, Any], Any | None]: 44 + try: 45 + data = request.get_json(silent=False) 46 + except BadRequest: 47 + return {}, _error("request body must be valid JSON", 400) 48 + if not isinstance(data, dict): 49 + return {}, _error("request body must be a JSON object", 400) 50 + return data, None 51 + 52 + 53 + @push_bp.post("/register") 54 + def register_push_device(): 55 + body, error = _required_json_object() 56 + if error is not None: 57 + return error 58 + token = str(body.get("device_token") or "").strip() 59 + bundle_id = str(body.get("bundle_id") or "").strip() 60 + environment = str(body.get("environment") or "").strip() 61 + platform = str(body.get("platform") or "").strip() 62 + if not token: 63 + return _error("device_token is required", 400) 64 + if not bundle_id: 65 + return _error("bundle_id is required", 400) 66 + if environment not in {"development", "production"}: 67 + return _error("environment must be development or production", 400) 68 + if platform != "ios": 69 + return _error("platform must be ios", 400) 70 + count = register_device( 71 + token="".join(token.split()).lower(), 72 + bundle_id=bundle_id, 73 + environment=environment, 74 + platform=platform, 75 + ) 76 + return jsonify({"registered": True, "device_count": count}) 77 + 78 + 79 + @push_bp.delete("/register") 80 + def unregister_push_device(): 81 + body, error = _required_json_object() 82 + if error is not None: 83 + return error 84 + token = str(body.get("device_token") or "").strip() 85 + if not token: 86 + return _error("device_token is required", 400) 87 + removed = remove_device("".join(token.split()).lower()) 88 + return jsonify({"removed": removed, "device_count": len(load_devices())}) 89 + 90 + 91 + @push_bp.get("/status") 92 + def push_status(): 93 + devices = sorted( 94 + load_devices(), 95 + key=lambda device: int(device.get("registered_at", 0)), 96 + reverse=True, 97 + ) 98 + return jsonify( 99 + { 100 + "configured": is_configured(), 101 + "device_count": len(devices), 102 + "devices": [status_view(device) for device in devices], 103 + } 104 + ) 105 + 106 + 107 + @push_bp.post("/test") 108 + def send_push_test(): 109 + body, error = _optional_json_object() 110 + if error is not None: 111 + return error 112 + if not is_configured(): 113 + return _error("push not configured", 503) 114 + category = body.get("category", CATEGORY_AGENT_ALERT) 115 + if category not in CATEGORIES: 116 + return _error("category must be a known push category", 400) 117 + title = str(body.get("title") or "Push test") 118 + message = str(body.get("body") or "This is a test notification.") 119 + sent, failed = triggers.send_agent_alert( 120 + title=title, 121 + body=message, 122 + context_id=f"push-test-{uuid.uuid4().hex[:12]}", 123 + ) 124 + return jsonify({"sent": sent, "failed": failed}) 125 + 126 + 127 + __all__ = ["push_bp"]
+686
docs/design/push.md
··· 1 + # Wave 3 push server 2 + 3 + ## 1. Summary 4 + 5 + Wave 3 ships a root-level push API on the existing Convey server, backed by a dedicated push runtime and APNs dispatch layer. The server surface is a new `convey/push.py` blueprint mounted at `/api/push/*`, following the same root-blueprint pattern as the Wave 2 voice server rather than adding an `apps/push/` package (`convey/voice.py:26-184`, `convey/__init__.py:150-166`, `docs/design/voice-server.md:1-37`, `0a693381 voice: ship Wave 2 voice server (root /api/voice/*, 9-tool sideband)`). The runtime starts from `convey.create_app()`, owns both a `CallosumConnection` listener and an asyncio loop, dispatches Daily Briefing when `cortex.finish` reports `name=="morning_briefing"`, and runs a 60-second periodic check for Pre-Meeting Prep across enabled facets only (`think/callosum.py:245-346`, `think/cortex.py:433-441`, `think/activities.py:877-890`, `think/facets.py:255-261`). The shipped notification categories are `SOLSTONE_DAILY_BRIEFING`, `SOLSTONE_PRE_MEETING_PREP`, and `SOLSTONE_AGENT_ALERT`; the server also defines `SOLSTONE_COMMITMENT_NUDGE` for client forward-compatibility, but no Wave 3 trigger emits it. 6 + 7 + Wave 3 explicitly defers commitment nudges to Wave 3.1 because the current ledger surface does not provide a machine-readable due date. `LedgerItem` has `when: str | None`, no `due` field, and `age_days` is derived from `opened_at`, so any age-threshold proxy would blur “old” and “overdue” in misleading ways (`think/surfaces/types.py:16-31`, `think/surfaces/ledger.py:395-410`). Wave 3 also defers live APNs validation pending Apple Developer enrollment, and it does not cover the native iOS client implementation, which ships in the companion lode. 8 + 9 + Wave 3 is accepted when the mocked round-trip tests pass, the new push paths remain inside their declared write ownership, and the live-validation handoff is explicit enough that enrollment unblocks the final production exercise without design work. The gate stays the same as other repo work: keep layer hygiene clean, keep behavior testable from the Flask surface downward, and make runtime startup and shutdown deterministic (`scripts/check_layer_hygiene.py:38-72`, `scripts/check_layer_hygiene.py:183-240`, `tests/test_voice_runtime.py:19-103`, `tests/test_voice_integration.py:102-149`). 10 + 11 + ## 2. Module layout 12 + 13 + Wave 3 adds a small `think/push/` package plus one new root blueprint. The layout deliberately mirrors the voice-server split where that split is sound, and deliberately does not reuse the voice runtime name because voice shutdown still hardcodes `brain.clear_brain_state()` and voice-specific app attachment (`think/voice/runtime.py:44-50`, `think/voice/runtime.py:88-105`). 14 + 15 + | Path | Role | 16 + |---|---| 17 + | `convey/push.py` | Root Flask blueprint at `/api/push/*`. Defines local request validators mirroring `convey/voice.py` (`_error`, `_required_json_object`, `_optional_json_object`) and exposes `POST /register`, `DELETE /register`, `GET /status`, and `POST /test` (`convey/voice.py:26-55`, `convey/voice.py:58-184`). | 18 + | `convey/__init__.py` | Registers `push_bp` beside the other root blueprints and calls `start_push_runtime(app)` during `create_app()`, mirroring the existing voice wire-in. The implementation must keep using `get_journal()` at call time because `state.journal_root` is assigned only after runtime startup (`convey/__init__.py:138-166`). | 19 + | `think/push/__init__.py` | Package marker plus narrow re-export surface for runtime helpers and the public trigger entry point. This keeps callers out of module-private helpers and matches the small public surfaces used elsewhere in `think/voice/` (`think/voice/runtime.py:121-127`). | 20 + | `think/push/config.py` | Journal-scoped config readers for `push.apns_key_path`, `push.apns_key_id`, `push.apns_team_id`, `push.bundle_id`, and `push.environment`. It mirrors the small-reader style of `think/voice/config.py`, but intentionally does not add env-var fallback because push credentials are journal-scoped operational config, not process-scoped ambient state (`think/voice/config.py:17-42`, `think/journal_default.json:35-39`). | 21 + | `think/push/devices.py` | Device store owner. Exposes `load_devices()`, `register_device(token, bundle_id, environment, platform)`, and `remove_device(token)`. It is the sole writer for `journal/config/push_devices.json`, normalizes token input, recovers from malformed stores by treating them as empty with a warning, and rewrites the store atomically on each mutation. | 22 + | `think/push/dispatch.py` | APNs transport owner. Defines the four category constants, the APNs HTTP/2 client using `httpx`, the ES256 JWT signer using `PyJWT`, the 55-minute bearer-token cache, payload builders, and `send(device, payload)` / `send_many(devices, payload)` helpers. `httpx` is already in the repo dependency set, so only `PyJWT` is added (`pyproject.toml:53`). | 23 + | `think/push/triggers.py` | Trigger owner. Defines `handle_briefing_finish(message)`, `check_pre_meeting_prep(now)`, and `send_agent_alert(title, body, context_id)`. This module is the sole writer for `journal/push/nudge_log.jsonl`; there is no separate `log.py`, so dedupe stays adjacent to trigger decisions. Trigger logic reads only enabled facets, reuses `_load_briefing_md(today)` for Daily Briefing, and reuses `load_activity_records(facet, day)` plus `record["source"] == "anticipated"` filtering for Pre-Meeting Prep (`apps/home/routes.py:149-198`, `apps/home/routes.py:305-337`, `think/activities.py:877-890`, `think/activities.py:937-939`, `think/facets.py:255-261`). | 24 + | `think/push/runtime.py` | Dedicated runtime singleton. Exposes `start_push_runtime(app)`, `stop_push_runtime(app)`, and `stop_all_push_runtime()`. It mirrors the voice runtime’s daemon-thread + asyncio-loop + `atexit` pattern, but keeps push lifecycle independent and owns both the callosum listener and the 60-second periodic task (`think/voice/runtime.py:21-109`, `think/callosum.py:254-346`). | 25 + 26 + Deliberate non-changes: 27 + 28 + - No `sol push` top-level CLI. Wave 3 is a root API plus in-process runtime, matching the Wave 2 voice-server precedent rather than adding a separate command surface (`docs/design/voice-server.md:7-37`, `0a693381 voice: ship Wave 2 voice server (root /api/voice/*, 9-tool sideband)`). 29 + - No `schedules.json` entry. The scheduler only understands `hourly`, `daily`, and `weekly`, which is too coarse for a 15-minute pre-meeting reminder (`think/scheduler.py:29-30`, `think/scheduler.py:375-438`). 30 + - No supervisor hook. `think/supervisor.py::supervise()` is the one-second orchestration loop, but Wave 3 keeps push-domain logic out of supervisor and self-contains it in the push runtime (`think/supervisor.py:1311-1371`). 31 + 32 + `push_devices.json` uses this storage model: 33 + 34 + - Top-level JSON object: `{"devices": [...]}`. 35 + - Each device row stores `token`, `bundle_id`, `environment`, `platform`, and `registered_at`. 36 + - Token identity is unique per row. Re-registering the same token updates the row in place and refreshes `registered_at`. 37 + - Dispatch reads all rows, then filters to rows whose `bundle_id`, `environment`, and `platform` match the current push configuration before sending. 38 + 39 + `nudge_log.jsonl` uses this append-only model: 40 + 41 + - One JSON object per successful trigger fire. 42 + - Common fields: `ts`, `category`, `dedupe_key`, `sent`, `failed`. 43 + - Category-specific context: `day` for Daily Briefing, `activity_id` and `facet` for Pre-Meeting Prep, `context_id` for Agent Alert. 44 + - A line is appended only when at least one device send succeeds. Zero-success attempts stay retryable inside the same trigger window. 45 + 46 + ## 3. Flow diagrams 47 + 48 + ### 3.1 Device register 49 + 50 + ```text 51 + iOS client 52 + -> POST /api/push/register 53 + -> convey/push.py validates JSON body 54 + -> think.push.devices.register_device(...) 55 + -> journal/config/push_devices.json rewrite 56 + -> 200 {"registered": true, "device_count": N} 57 + ``` 58 + 59 + This mirrors the voice blueprint’s “validate locally, then hand off to the feature module” pattern, but the handoff is synchronous because device registration is just a journal write and does not need the runtime loop (`convey/voice.py:29-55`, `convey/voice.py:58-123`). 60 + 61 + ### 3.2 Daily Briefing dispatch 62 + 63 + ```text 64 + cortex subprocess 65 + -> think.cortex emits finish event to Callosum 66 + -> push runtime listener callback receives message 67 + -> triggers.handle_briefing_finish(message) 68 + -> schedule coroutine on push runtime loop 69 + -> poll _load_briefing_md(today) up to 10 x 1s 70 + -> dispatch.send_many(eligible_devices, briefing_payload) 71 + -> append journal/push/nudge_log.jsonl 72 + ``` 73 + 74 + The polling step is mandatory because `cortex.finish` is broadcast before `_write_output(...)` runs and before the `_active.jsonl` file is renamed to its completed name. `_load_briefing_md(today)` already enforces the `type=="morning_briefing"` and `metadata.date==today` gates, so the trigger reuses it instead of inventing a second briefing reader (`think/cortex.py:433-441`, `think/cortex.py:461-510`, `think/cortex.py:621-626`, `apps/home/routes.py:149-198`). 75 + 76 + ### 3.3 Pre-Meeting Prep dispatch 77 + 78 + ```text 79 + push runtime periodic task (every 60s) 80 + -> triggers.check_pre_meeting_prep(now) 81 + -> get_enabled_facets().keys() 82 + -> load_activity_records(facet, YYYYMMDD) 83 + -> keep rows where source == "anticipated" 84 + -> keep rows where start-now is within [14m, 16m] 85 + -> skip rows already present in nudge_log 86 + -> dispatch.send_many(eligible_devices, meeting_payload) 87 + -> append journal/push/nudge_log.jsonl 88 + ``` 89 + 90 + The trigger must use `get_enabled_facets().keys()` so muted facets never produce push. The activity scan follows the existing repo convention: load all rows for `(facet, day)`, then filter `record["source"] == "anticipated"` in-process (`think/facets.py:255-261`, `think/activities.py:877-890`, `think/activities.py:937-939`, `apps/home/routes.py:305-337`). 91 + 92 + ### 3.4 Agent Alert dispatch 93 + 94 + ```text 95 + in-process caller 96 + -> triggers.send_agent_alert(title, body, context_id) 97 + -> dispatch.send_many(eligible_devices, alert_payload) 98 + -> append journal/push/nudge_log.jsonl 99 + ``` 100 + 101 + Agent Alert is intentionally the simplest path: no callosum subscription, no scheduler, no extra persistence besides dedupe log. It is a public in-process API for future callers that want to fire a push without adding new transport plumbing. 102 + 103 + ### 3.5 Shutdown 104 + 105 + ```text 106 + process exit or test cleanup 107 + -> atexit / stop_push_runtime(app) 108 + -> stop CallosumConnection listener 109 + -> cancel periodic asyncio task 110 + -> stop runtime loop 111 + -> join daemon thread 112 + -> clear module-level runtime state 113 + ``` 114 + 115 + The shutdown contract matches the voice runtime: cancel tracked work, stop the loop from the owning thread, join with a bounded timeout, and leave the singleton reusable for the next app instance (`think/voice/runtime.py:53-109`, `tests/test_voice_runtime.py:19-103`). 116 + 117 + ## 4. Endpoint specs 118 + 119 + All four endpoints inherit the default Convey auth gate because `convey/root.py` wraps every request unless the endpoint is on the explicit bypass allowlist, and new `/api/push/*` routes are not on that list (`convey/root.py:81-139`). Request validation mirrors the voice helpers in `convey/voice.py`, so malformed JSON and non-object bodies fail before feature code runs (`convey/voice.py:29-55`). 120 + 121 + ### 4.1 `POST /api/push/register` 122 + 123 + - URL: `/api/push/register` 124 + - Method: `POST` 125 + - Auth: default gate only. Accepts a logged-in session, Basic Auth, or the existing `trust_localhost` bypass when setup is complete and proxy headers are absent (`convey/root.py:111-139`). 126 + - Request schema: 127 + - Required JSON object. 128 + - `device_token: string` — trimmed, non-empty. The server strips embedded spaces and lowercases before storage. 129 + - `bundle_id: string` — trimmed, non-empty. 130 + - `environment: string` — must be `"development"` or `"production"`. 131 + - `platform: string` — must be `"ios"` in Wave 3. 132 + - Success response: 133 + - HTTP 200 134 + - Body: `{"registered": true, "device_count": <int>}` 135 + - Error cases: 136 + - HTTP 400 `{"error": "request body must be valid JSON"}` 137 + - HTTP 400 `{"error": "request body must be a JSON object"}` 138 + - HTTP 400 `{"error": "device_token is required"}` 139 + - HTTP 400 `{"error": "bundle_id is required"}` 140 + - HTTP 400 `{"error": "environment must be development or production"}` 141 + - HTTP 400 `{"error": "platform must be ios"}` 142 + - HTTP 500 `{"error": "device registration failed"}` 143 + - Notes: 144 + - Duplicate tokens upsert in place instead of adding a second row. 145 + - `device_count` reports stored rows after the upsert. 146 + - Registration does not require APNs credentials to be configured. Clients can register before the operator finishes `journal.json`. 147 + 148 + ### 4.2 `DELETE /api/push/register` 149 + 150 + - URL: `/api/push/register` 151 + - Method: `DELETE` 152 + - Auth: default gate only (`convey/root.py:111-139`). 153 + - Request schema: 154 + - Required JSON object. 155 + - `device_token: string` — trimmed, non-empty; normalized with the same rules as register. 156 + - Success response: 157 + - HTTP 200 158 + - Body: `{"removed": true, "device_count": <int>}` when the token existed. 159 + - HTTP 200 160 + - Body: `{"removed": false, "device_count": <int>}` when the token was not present. 161 + - Error cases: 162 + - HTTP 400 `{"error": "request body must be valid JSON"}` 163 + - HTTP 400 `{"error": "request body must be a JSON object"}` 164 + - HTTP 400 `{"error": "device_token is required"}` 165 + - HTTP 500 `{"error": "device removal failed"}` 166 + - Notes: 167 + - Removing an unknown token is not an error because uninstall and token churn are expected. 168 + - The mutation rewrites `push_devices.json` only when the stored set actually changes. 169 + 170 + ### 4.3 `GET /api/push/status` 171 + 172 + - URL: `/api/push/status` 173 + - Method: `GET` 174 + - Auth: default gate only (`convey/root.py:111-139`). 175 + - Request schema: 176 + - No request body. 177 + - Success response: 178 + - HTTP 200 179 + - Body: 180 + - `configured: bool` 181 + - `device_count: int` 182 + - `devices: [{token_suffix, bundle_id, platform, environment, registered_at}]` 183 + - `configured` semantics: 184 + - `true` only when `push.apns_key_path`, `push.apns_key_id`, `push.apns_team_id`, and `push.bundle_id` are non-empty, `push.environment` resolves to `"development"` or `"production"`, and the configured `.p8` path is absolute, exists, and is readable. 185 + - `false` for any missing or invalid APNs config, including a relative or missing key file. 186 + - `devices` semantics: 187 + - `token_suffix` is the last four characters of the stored token. 188 + - `registered_at` is an ISO-8601 timestamp string. 189 + - The list is sorted newest-first by `registered_at`. 190 + - Error cases: 191 + - No endpoint-specific 4xx errors. 192 + - Malformed `push_devices.json` is treated as an empty store with a warning from `think/push/devices.py`; the route still returns HTTP 200. 193 + - Notes: 194 + - This route never returns full device tokens. 195 + - This route reports all stored devices, not just devices matching the active bundle/environment filter. 196 + 197 + ### 4.4 `POST /api/push/test` 198 + 199 + - URL: `/api/push/test` 200 + - Method: `POST` 201 + - Auth: default gate only. There is no extra debug header or test-only bypass (`convey/root.py:111-139`). 202 + - Request schema: 203 + - Empty body allowed. 204 + - If a body is present, it must decode to a JSON object. 205 + - Optional `title: string` 206 + - Optional `body: string` 207 + - Optional `category: string` 208 + - Category rules: 209 + - Default: `SOLSTONE_AGENT_ALERT` 210 + - Allowed explicit values: `SOLSTONE_DAILY_BRIEFING`, `SOLSTONE_PRE_MEETING_PREP`, `SOLSTONE_AGENT_ALERT`, `SOLSTONE_COMMITMENT_NUDGE` 211 + - The route validates the category value but still dispatches through `send_agent_alert(...)`, so Wave 3 test sends always use the Agent Alert payload shape. 212 + - Success response: 213 + - HTTP 200 214 + - Body: `{"sent": <int>, "failed": <int>}` 215 + - Error cases: 216 + - HTTP 400 `{"error": "request body must be valid JSON"}` 217 + - HTTP 400 `{"error": "request body must be a JSON object"}` 218 + - HTTP 400 `{"error": "category must be a known push category"}` 219 + - HTTP 503 `{"error": "push not configured"}` 220 + - Notes: 221 + - The route dispatches through `send_agent_alert(...)`, so successful test sends append `SOLSTONE_AGENT_ALERT` lines to `nudge_log.jsonl` with unique `push-test-<uuid>` context ids. 222 + - The route sends only to stored devices whose `bundle_id`, `environment`, and `platform` match the current push configuration. 223 + - Empty device store is a normal case and returns `{"sent": 0, "failed": 0}`. 224 + 225 + ## 5. Config keys 226 + 227 + Push configuration lives only in `journal/config/journal.json`. Unlike voice, it does not fall back to environment variables, because these values describe the current journal’s push environment rather than a process-global secret cache (`think/voice/config.py:30-42`). Every read uses `get_config()` and every path lookup uses `get_journal()` at call time; the runtime does not cache the journal root during startup because `create_app()` does not assign `state.journal_root` until after `start_voice_runtime(app)` today, and Wave 3 preserves that ordering when it adds `start_push_runtime(app)` (`convey/__init__.py:163-166`, `think/voice/runtime.py:44-46`). 228 + 229 + | Key | Type | Default | Meaning | 230 + |---|---|---|---| 231 + | `push.apns_key_path` | `string \| null` | `null` | Absolute path to the APNs `.p8` signing key file. | 232 + | `push.apns_key_id` | `string \| null` | `null` | Apple-issued APNs Key ID. | 233 + | `push.apns_team_id` | `string \| null` | `null` | Apple Developer Team ID. | 234 + | `push.bundle_id` | `string \| null` | `null` | App bundle identifier, for example `org.solpbc.solstone-swift`. | 235 + | `push.environment` | `"development" \| "production" \| null` | `"development"` | APNs environment. `null` resolves to `"development"`. | 236 + 237 + Literal `think/journal_default.json` block: 238 + 239 + ```json 240 + "voice": { 241 + "openai_api_key": null, 242 + "model": "gpt-realtime", 243 + "brain_model": "haiku" 244 + }, 245 + "push": { 246 + "apns_key_path": null, 247 + "apns_key_id": null, 248 + "apns_team_id": null, 249 + "bundle_id": null, 250 + "environment": "development" 251 + }, 252 + "retention": { 253 + "raw_media": "days", 254 + "raw_media_days": 7, 255 + "per_stream": {}, 256 + "storage_warning_disk_percent": 80, 257 + "storage_warning_raw_media_gb": null 258 + } 259 + ``` 260 + 261 + Validation rules: 262 + 263 + - Blank strings normalize to `null`. 264 + - `push.environment == null` resolves to `"development"`. 265 + - Any non-null environment outside `{"development", "production"}` is invalid and makes dispatch unavailable until corrected. 266 + - `push.apns_key_path` must be an absolute path. Relative paths are rejected because the server already has one journal-root concept and should not invent a second config root. 267 + - `push.apns_key_path` must point to a readable file before the server reports push as configured. 268 + 269 + ## 6. Payload shapes per category 270 + 271 + Common transport rules: 272 + 273 + - `dispatch.send(...)` uses `httpx.AsyncClient(http2=True)` against `https://api.sandbox.push.apple.com` for `"development"` and `https://api.push.apple.com` for `"production"`. 274 + - Authorization is APNs bearer JWT signed with ES256 over the configured `.p8` key, using header `{"alg":"ES256","kid":<apns_key_id>}` and claims `{"iss":<apns_team_id>,"iat":<now>}`. 275 + - The JWT is cached in memory and regenerated when older than 55 minutes. Wave 3 does not mint a fresh token per request. 276 + - `apns-topic` is always the configured `push.bundle_id`. 277 + - `apns-priority` is `10` for every Wave 3 send because every shipped payload includes a visible alert. `5` remains reserved for future silent-only pushes and is not used in Wave 3. 278 + - `BadDeviceToken` and `Unregistered` responses from APNs cause `dispatch.send(...)` to call `devices.remove_device(token)` before returning failure. 279 + 280 + Category constants defined by the server: 281 + 282 + - `SOLSTONE_DAILY_BRIEFING` 283 + - `SOLSTONE_PRE_MEETING_PREP` 284 + - `SOLSTONE_AGENT_ALERT` 285 + - `SOLSTONE_COMMITMENT_NUDGE` 286 + 287 + ### 6.1 Daily Briefing 288 + 289 + Headers: 290 + 291 + - `apns-topic: <bundle_id>` 292 + - `apns-collapse-id: briefing.<YYYYMMDD>` 293 + - `apns-priority: 10` 294 + 295 + Payload: 296 + 297 + ```json 298 + { 299 + "aps": { 300 + "alert": { 301 + "title": "Daily Briefing", 302 + "body": "Your briefing is ready — tap to view" 303 + }, 304 + "category": "SOLSTONE_DAILY_BRIEFING", 305 + "sound": "default", 306 + "mutable-content": 1, 307 + "content-available": 1 308 + }, 309 + "data": { 310 + "action": "open_briefing", 311 + "day": "20260419", 312 + "generated": "2026-04-19T06:45:00", 313 + "needs_attention_count": 3 314 + } 315 + } 316 + ``` 317 + 318 + Builder rules: 319 + 320 + - `day` is the local journal day in `YYYYMMDD` format. 321 + - `generated` comes from briefing frontmatter when present, because `_load_briefing_md(today)` already loads that metadata (`apps/home/routes.py:149-198`, `tests/fixtures/journal/identity/briefing.md:1-14`). 322 + - `needs_attention_count` is the length of the bullets list returned by `_load_briefing_md(today)` (`apps/home/routes.py:191-198`). 323 + 324 + ### 6.2 Pre-Meeting Prep 325 + 326 + Headers: 327 + 328 + - `apns-topic: <bundle_id>` 329 + - `apns-collapse-id: meeting.<activity_id>` 330 + - `apns-priority: 10` 331 + 332 + Payload: 333 + 334 + ```json 335 + { 336 + "aps": { 337 + "alert": { 338 + "title": "Pre-Meeting Prep", 339 + "body": "Meeting in 15 minutes — tap to view" 340 + }, 341 + "category": "SOLSTONE_PRE_MEETING_PREP", 342 + "sound": "default", 343 + "mutable-content": 1, 344 + "content-available": 1, 345 + "interruption-level": "time-sensitive" 346 + }, 347 + "data": { 348 + "action": "open_pre_meeting", 349 + "activity_id": "anticipated_meeting_090000_0420", 350 + "facet": "work", 351 + "day": "20260420", 352 + "start": "09:00", 353 + "title": "Launch sync", 354 + "location": "Room A", 355 + "participants": [ 356 + "Juliet Capulet" 357 + ], 358 + "prep_notes": "Bring launch notes" 359 + } 360 + } 361 + ``` 362 + 363 + Builder rules: 364 + 365 + - `activity_id` comes from the activity record’s `id`, which already exists on anticipated rows and is the right natural key for collapse and dedupe (`tests/test_voice_tools.py:197-211`, `think/activities.py:893-915`). 366 + - `start` accepts stored `HH:MM` or `HH:MM:SS`; the trigger parser is tolerant, but the payload preserves the stored string as-is. 367 + - `participants` pulls attendee names from `participation` entries where `role=="attendee"`, following the existing Home surface pattern (`apps/home/routes.py:312-337`, `tests/test_voice_tools.py:197-214`). 368 + - `interruption-level` is present only on this category. 369 + 370 + ### 6.3 Agent Alert 371 + 372 + Headers: 373 + 374 + - `apns-topic: <bundle_id>` 375 + - `apns-collapse-id: alert.<context_id|uuid>` 376 + - `apns-priority: 10` 377 + 378 + Payload: 379 + 380 + ```json 381 + { 382 + "aps": { 383 + "alert": { 384 + "title": "Agent Alert", 385 + "body": "A workflow needs attention" 386 + }, 387 + "category": "SOLSTONE_AGENT_ALERT", 388 + "sound": "default", 389 + "mutable-content": 1, 390 + "content-available": 1 391 + }, 392 + "data": { 393 + "action": "open_alert", 394 + "context_id": "triage-20260419-001" 395 + } 396 + } 397 + ``` 398 + 399 + Builder rules: 400 + 401 + - `title` and `body` come from the caller. 402 + - `context_id` is required for the public `send_agent_alert(...)` helper and is used directly in both `data.context_id` and the collapse id. 403 + - `POST /api/push/test` generates a UUID context when the client does not supply one through the route body. 404 + 405 + ### 6.4 Deferred Commitment Nudge 406 + 407 + Headers: 408 + 409 + - `apns-topic: <bundle_id>` 410 + - `apns-collapse-id: commitment.<ledger_id>` 411 + - `apns-priority: 10` 412 + 413 + Forward-compatible payload: 414 + 415 + ```json 416 + { 417 + "aps": { 418 + "alert": { 419 + "title": "Commitment Nudge", 420 + "body": "A commitment needs attention — tap to view" 421 + }, 422 + "category": "SOLSTONE_COMMITMENT_NUDGE", 423 + "sound": "default", 424 + "mutable-content": 1, 425 + "content-available": 1 426 + }, 427 + "data": { 428 + "action": "open_commitment", 429 + "ledger_id": "lg_123" 430 + } 431 + } 432 + ``` 433 + 434 + Wave 3 defines this payload shape and constant only for client forward-compatibility. No trigger emits it until Wave 3.1 lands a real due-date primitive. 435 + 436 + ### 6.5 PII fallback rule 437 + 438 + Wave 3 hardcodes the lock-screen-safe fallback into the payload builders. It is not a TODO for the iOS client. 439 + 440 + - Daily Briefing body is always generic: “Your briefing is ready — tap to view.” 441 + - Pre-Meeting Prep body is always generic: “Meeting in 15 minutes — tap to view.” 442 + - Deferred Commitment Nudge body is always generic: “A commitment needs attention — tap to view.” 443 + - Sensitive detail lives in `data`, where the Notification Service Extension can read it off-device and decide what to reveal. 444 + - Agent Alert is the exception: `title` and `body` are caller-provided, so the caller is responsible for keeping them lock-screen safe. 445 + 446 + ## 7. Domain write-ownership (L1–L9 declarations) 447 + 448 + Push owns its own journal domain. It is not an indexer, importer, scheduler, or search subsystem, so its writes belong in `think/push/*` and do not need to be routed through another domain owner (`scripts/check_layer_hygiene.py:38-57`, `scripts/check_layer_hygiene.py:199-209`). 449 + 450 + | Path | Owner module | Write API | Read API | 451 + |---|---|---|---| 452 + | `journal/config/push_devices.json` | `think/push/devices.py` | `register_device`, `remove_device` | `load_devices` | 453 + | `journal/push/nudge_log.jsonl` | `think/push/triggers.py` | `_append_nudge_log` | `_has_nudged` | 454 + 455 + L1 declaration: 456 + 457 + - `think/push/*` is a feature-owned runtime and transport layer, not infrastructure. Its journal writes are feature state, not accidental cross-domain mutation. 458 + 459 + L2 declaration: 460 + 461 + - No module outside `think/push/` writes `journal/config/push_devices.json`. 462 + - No module outside `think/push/` writes `journal/push/nudge_log.jsonl`. 463 + - `convey/push.py` calls feature APIs but never writes these paths directly. 464 + 465 + L3 declaration: 466 + 467 + - `load_devices()` never writes. Malformed-store recovery is explicit: the reader returns an empty list and leaves rewrite responsibility to the next write path. 468 + - `_has_nudged(...)` never writes. Dedupe is read-first, then write on success. 469 + - There is no create-on-miss hidden behind any `load_*` or `get_*` helper. 470 + 471 + L6/L7 declaration: 472 + 473 + - Indexers and importers do not touch push paths. 474 + - The current hygiene script only scans infrastructure scopes `think/indexer`, `think/importers`, `think/search`, and `think/graph`, plus read-verb `apps/*/call.py` handlers; `think/push/*` is outside those scopes (`scripts/check_layer_hygiene.py:38-44`, `scripts/check_layer_hygiene.py:124-145`, `scripts/check_layer_hygiene.py:156-180`). 475 + - The current hygiene script also only looks for writes near `journal/entities`, `journal/facets`, and `journal/observations`, not `journal/config/push_devices.json` or `journal/push/nudge_log.jsonl`, so no allowlist entry is required (`scripts/check_layer_hygiene.py:59-72`, `scripts/check_layer_hygiene.py:105-108`). 476 + 477 + L8 declaration: 478 + 479 + - Hooks do not apply. Push has no talent hook and does not write through `think/hooks.py` or `talent/*.py`. 480 + 481 + L9 declaration: 482 + 483 + - Daily Briefing dedupe key: `(SOLSTONE_DAILY_BRIEFING, YYYYMMDD)` 484 + - Pre-Meeting Prep dedupe key: `(SOLSTONE_PRE_MEETING_PREP, activity_id, YYYYMMDD)` 485 + - Agent Alert dedupe key: `(SOLSTONE_AGENT_ALERT, context_id)` 486 + - Commitment Nudge reserved dedupe key: `(SOLSTONE_COMMITMENT_NUDGE, ledger_id)` 487 + - Trigger order is always: 488 + - compute dedupe key 489 + - check `_has_nudged(...)` 490 + - send to eligible devices 491 + - append log only when `sent > 0` 492 + - That write-after-success rule keeps Pre-Meeting Prep retryable if the first attempt produces zero successful sends, while still preventing duplicate delivery after one device has already received the notification. 493 + 494 + `nudge_log.jsonl` line shape: 495 + 496 + - Daily Briefing line: `ts`, `category`, `dedupe_key`, `day`, `sent`, `failed` 497 + - Pre-Meeting Prep line: `ts`, `category`, `dedupe_key`, `day`, `facet`, `activity_id`, `sent`, `failed` 498 + - Agent Alert line: `ts`, `category`, `dedupe_key`, `context_id`, `sent`, `failed` 499 + 500 + ## 8. Tests 501 + 502 + All push tests use `tests/fixtures/journal/` plus `monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", ...)` where necessary, following the existing voice integration and route test setup pattern (`tests/test_voice_routes.py:14-23`, `tests/test_voice_integration.py:102-149`). 503 + 504 + ### 8.1 `tests/test_push_config.py` 505 + 506 + Purpose: mirror the small-reader coverage shape of `tests/test_voice_config.py` while locking down the journal-only push config contract (`tests/test_voice_config.py:9-43`). 507 + 508 + - Defaults: all four required APNs fields resolve to `None`, and `push.environment` resolves to `"development"` when omitted. 509 + - Whitespace cleanup: blank strings normalize to `None`. 510 + - Journal precedence: populated `journal.json` values are used directly. 511 + - No env fallback: setting unrelated env vars does not change push config reads. 512 + - Invalid environment: non-`development` / non-`production` values raise a validation error or mark config unavailable, depending on the caller. 513 + - Missing key path: status/configured becomes false when the configured `.p8` path does not exist. 514 + 515 + ### 8.2 `tests/test_push_devices.py` 516 + 517 + Purpose: cover the device store as its own write-owning module. 518 + 519 + - Register/load/remove round trip against a temporary journal. 520 + - Duplicate token registration updates the existing row instead of creating a second device. 521 + - `registered_at` refreshes on duplicate registration. 522 + - Remove returns `False` for unknown token and leaves the count unchanged. 523 + - Empty store returns `[]` and does not create the file. 524 + - Malformed `push_devices.json` is treated as empty with a warning, then repaired by the next successful write. 525 + - Status-shaping helper masks tokens to last four only. 526 + 527 + ### 8.3 `tests/test_push_dispatch.py` 528 + 529 + Purpose: unit-test APNs transport, JWT handling, payload builders, and token-redaction rules. 530 + 531 + - JWT signing shape: header includes `alg=ES256` and `kid=<apns_key_id>`, claims include `iss=<apns_team_id>` and `iat=<now>`. 532 + - JWT caching: second send inside 55 minutes reuses the cached token. 533 + - JWT refresh: send at 60+ minutes mints a fresh token. 534 + - Payload shape for Daily Briefing matches §6 exactly. 535 + - Payload shape for Pre-Meeting Prep includes `interruption-level: time-sensitive`. 536 + - Payload shape for Agent Alert matches §6 exactly. 537 + - Commitment payload constant and builder exist but are not wired to a trigger. 538 + - `apns-collapse-id` is `briefing.<day>`, `meeting.<activity_id>`, `alert.<context_id>`, and `commitment.<ledger_id>`. 539 + - `BadDeviceToken` response calls `devices.remove_device(token)`. 540 + - `Unregistered` response calls `devices.remove_device(token)`. 541 + - Captured logs never include full tokens, JWTs, or raw `.p8` contents. 542 + 543 + ### 8.4 `tests/test_push_triggers.py` 544 + 545 + Purpose: lock down idempotency, timing, and source filtering in the trigger layer. 546 + 547 + - Daily Briefing finish polls until `briefing.md` exists, using a delayed write to simulate the `cortex.finish` before-write ordering. 548 + - Daily Briefing gives up after 10 polls and logs a warning. 549 + - Daily Briefing fires once per day even if the same finish message is delivered twice. 550 + - Daily Briefing uses `_load_briefing_md(today)` and ignores stale or wrong-type briefing files. 551 + - Pre-Meeting Prep scans only `get_enabled_facets().keys()` and skips muted facets. 552 + - Pre-Meeting Prep filters `record["source"] == "anticipated"` and ignores all other activity rows. 553 + - Pre-Meeting Prep detects the 15-minute window with both `HH:MM` and `HH:MM:SS` starts. 554 + - Pre-Meeting Prep is idempotent across repeated periodic ticks within the same 2-minute window. 555 + - `send_agent_alert(...)` builds the expected payload and appends the expected dedupe log line. 556 + - Zero-device and zero-success sends do not append dedupe log lines. 557 + 558 + ### 8.5 `tests/test_push_routes.py` 559 + 560 + Purpose: mirror the validation-heavy style of `tests/test_voice_routes.py` at the new `/api/push/*` surface (`tests/test_voice_routes.py:26-118`). 561 + 562 + - `POST /api/push/register` happy path. 563 + - `POST /api/push/register` rejects non-object JSON. 564 + - `POST /api/push/register` rejects missing fields and invalid `environment` / `platform`. 565 + - `DELETE /api/push/register` happy path for an existing token. 566 + - `DELETE /api/push/register` returns `removed: false` for a missing token. 567 + - `GET /api/push/status` returns `configured`, `device_count`, and masked device rows. 568 + - `POST /api/push/test` returns `503` when APNs config is missing. 569 + - `POST /api/push/test` aggregates sent/failed counts from the dispatch layer. 570 + - `POST /api/push/test` rejects unknown category strings. 571 + 572 + ### 8.6 `tests/test_push_integration.py` 573 + 574 + Purpose: prove the whole runtime path from Flask startup through callosum listener and periodic dispatch, following the same end-to-end philosophy as `tests/test_voice_integration.py` (`tests/test_voice_integration.py:115-149`). 575 + 576 + - Boot `create_app()` against a fixture journal and verify `start_push_runtime(app)` runs. 577 + - Patch `httpx.AsyncClient.post` so APNs sends are fully mocked. 578 + - Patch JWT signing inputs so tests can assert stable Authorization headers. 579 + - Fire a fake `cortex.finish` event through the push runtime’s `CallosumConnection` callback and assert Daily Briefing dispatch occurs. 580 + - Confirm `nudge_log.jsonl` receives the expected Daily Briefing line. 581 + - Seed anticipated activities and run one periodic `check_pre_meeting_prep(now)` pass through the runtime loop. 582 + - Confirm muted facets do not dispatch. 583 + - Stop the runtime cleanly and assert the loop and thread are cleared. 584 + 585 + ## 9. Security considerations 586 + 587 + - Device-token redaction: full tokens are never returned by `GET /api/push/status` and never written to logs. Status exposes last four characters only, and dispatch logs use the same redaction rule. 588 + - APNs JWT secrecy: bearer JWT values are never logged. Refresh decisions may log age or cache-hit state, but not the token string itself. 589 + - `.p8` key secrecy: `push.apns_key_path` may appear in operator config, but the file contents themselves are never logged or echoed back through routes. 590 + - PII fallback: Daily Briefing, Pre-Meeting Prep, and deferred Commitment Nudge use generic lock-screen bodies, with detail moved into `data` for device-side handling. Agent Alert is caller-provided and inherits caller responsibility for lock-screen safety. 591 + - Auth: all `/api/push/*` endpoints use the default root auth gate. There is no debug header and no push-specific bypass. Default auth is session cookie, then Basic Auth, then opt-in `trust_localhost` after setup when proxy headers are absent (`convey/root.py:49-57`, `convey/root.py:81-139`). 592 + - `trust_localhost` stays narrow by design: it only applies after setup completion and only when `request.remote_addr` is local and proxy headers are absent (`convey/root.py:119-139`). 593 + - Facet eligibility: all push triggers operate on `get_enabled_facets().keys()`, so muted facets are excluded from both dispatch and device-visible summaries (`think/facets.py:255-261`, `think/surfaces/ledger.py:454-456`). 594 + - Terminology covenant: operator-visible strings and payload labels use the repo’s “observer/listen” vocabulary and avoid “capture”, “record”, “keeper”, or “assistant”. 595 + - Hosted-MVP privacy stance: payloads are cleartext to APNs and the device; Wave 3 is explicitly non-E2E. 596 + - No analytics: Wave 3 adds no tracking, analytics beacons, crash reporting, or delivery pixel equivalents. 597 + 598 + ## 10. Live validation 599 + 600 + **Live APNs validation is deferred pending Apple Developer enrollment.** Wave 3 ships infrastructure and mocked tests only. When enrollment completes: 601 + 602 + 1. Configure `push.apns_key_path`, `push.apns_key_id`, `push.apns_team_id`, `push.bundle_id`, and `push.environment` in `journal.json`. 603 + 2. Register a real device from the iOS client. 604 + 3. Exercise `POST /api/push/test` against the development APNs environment and confirm the device receives the notification. 605 + 4. Manually trigger a `morning_briefing` cortex run and confirm the Daily Briefing push lands. 606 + 5. Wait for a scheduled meeting and confirm Pre-Meeting Prep lands within approximately ±30 seconds of T-15:00. 607 + 6. Only then flip `push.environment` to `production` and deploy. 608 + 609 + Sandbox smoke-test commands: 610 + 611 + ```sh 612 + BASE_URL=${BASE_URL:-http://127.0.0.1:5015} 613 + AUTH=${AUTH:-":$SOL_PASSWORD"} 614 + TOKEN=${TOKEN:-0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef} 615 + 616 + curl -u "$AUTH" \ 617 + -H 'Content-Type: application/json' \ 618 + -X POST "$BASE_URL/api/push/register" \ 619 + -d '{ 620 + "device_token": "'"$TOKEN"'", 621 + "bundle_id": "org.solpbc.solstone-swift", 622 + "environment": "development", 623 + "platform": "ios" 624 + }' 625 + 626 + curl -u "$AUTH" \ 627 + "$BASE_URL/api/push/status" 628 + 629 + curl -u "$AUTH" \ 630 + -H 'Content-Type: application/json' \ 631 + -X POST "$BASE_URL/api/push/test" \ 632 + -d '{ 633 + "title": "Push test", 634 + "body": "This is a sandbox test notification.", 635 + "category": "SOLSTONE_AGENT_ALERT" 636 + }' 637 + 638 + curl -u "$AUTH" \ 639 + -H 'Content-Type: application/json' \ 640 + -X DELETE "$BASE_URL/api/push/register" \ 641 + -d '{ 642 + "device_token": "'"$TOKEN"'" 643 + }' 644 + ``` 645 + 646 + Basic Auth uses only the password component, so `-u ":$SOL_PASSWORD"` is the portable curl form for these routes (`convey/root.py:49-57`). 647 + 648 + ## 11. Open questions 649 + 650 + - Agent Alert body limits: Wave 3 should probably enforce a soft cap before the native client ships, but the exact truncation policy can wait until the iOS notification UI settles. 651 + - Multi-build device identity: Wave 3 keys stored devices by token alone. If one journal starts registering multiple app builds that share a token namespace, revisit whether identity should widen to `(token, bundle_id, environment)`. 652 + - Retry telemetry: Wave 3 records dedupe state in `nudge_log.jsonl`, but it does not yet record APNs failure reasons in a separate operator-facing history file. 653 + 654 + ## 12. Sources 655 + 656 + Voice-server analog: 657 + 658 + - `docs/design/voice-server.md:1-465`, `convey/voice.py:26-184`, `convey/__init__.py:112-166`, `think/voice/runtime.py:21-109`, `think/voice/config.py:17-42`, `think/voice/sideband.py:20-61`, `tests/test_voice_config.py:9-43`, `tests/test_voice_runtime.py:19-103`, `tests/test_voice_routes.py:26-118`, `tests/test_voice_integration.py:102-149` 659 + 660 + Callosum / cortex finish timing: 661 + 662 + - `think/callosum.py:245-346`, `think/cortex.py:433-441`, `think/cortex.py:461-510`, `think/cortex.py:621-626`, `apps/home/routes.py:149-198`, `apps/home/workspace.html:1468-1470`, `apps/home/workspace.html:1732-1788`, `convey/bridge.py:45-86`, `apps/home/events.py:21-55` 663 + 664 + Ledger / activities APIs: 665 + 666 + - `think/surfaces/types.py:16-31`, `think/surfaces/ledger.py:395-487`, `think/activities.py:877-945`, `apps/home/routes.py:305-337`, `think/facets.py:255-261`, `tests/test_voice_tools.py:197-214`, `tests/fixtures/journal/identity/briefing.md:1-14` 667 + 668 + Auth model: 669 + 670 + - `convey/root.py:49-57`, `convey/root.py:81-139` 671 + 672 + Scheduler / runtime choice: 673 + 674 + - `think/scheduler.py:29-30`, `think/scheduler.py:375-438`, `think/supervisor.py:1311-1371`, `think/heartbeat.py:45-138` 675 + 676 + Layer-hygiene script: 677 + 678 + - `scripts/check_layer_hygiene.py:38-72`, `scripts/check_layer_hygiene.py:105-108`, `scripts/check_layer_hygiene.py:124-145`, `scripts/check_layer_hygiene.py:156-180`, `scripts/check_layer_hygiene.py:199-220` 679 + 680 + Config and dependencies: 681 + 682 + - `think/journal_default.json:35-39`, `pyproject.toml:53` 683 + 684 + Wave 2 commit: 685 + 686 + - `0a693381 voice: ship Wave 2 voice server (root /api/voice/*, 9-tool sideband)`
+2
pyproject.toml
··· 51 51 "openai-agents>=0.1.0", 52 52 "anthropic", 53 53 "httpx", 54 + "h2", 55 + "pyjwt>=2.8", 54 56 "jsonschema>=4.26,<5", 55 57 "genai-prices", 56 58 # Link tunnel service (think/link/): TLS 1.3 in memory-BIO mode over
+7
tests/conftest.py
··· 19 19 from think.entities.loading import clear_entity_loading_cache 20 20 from think.entities.observations import clear_observation_cache 21 21 from think.entities.relationships import clear_relationship_caches 22 + from think.push.runtime import stop_all_push_runtime 22 23 from think.utils import now_ms 23 24 from think.voice import brain as voice_brain 24 25 from think.voice.runtime import stop_all_voice_runtime ··· 65 66 yield 66 67 stop_all_voice_runtime() 67 68 voice_brain.clear_brain_state() 69 + 70 + 71 + @pytest.fixture(autouse=True) 72 + def _cleanup_push_runtime(): 73 + yield 74 + stop_all_push_runtime() 68 75 69 76 70 77 @pytest.fixture
+140
tests/test_push_config.py
··· 1 + # SPDX-License-Identifier: AGPL-3.0-only 2 + # Copyright (c) 2026 sol pbc 3 + 4 + from __future__ import annotations 5 + 6 + import json 7 + from pathlib import Path 8 + 9 + import pytest 10 + 11 + from think.push import config 12 + 13 + 14 + def _write_config(tmp_path: Path, payload: dict) -> None: 15 + config_path = tmp_path / "config" / "journal.json" 16 + config_path.parent.mkdir(parents=True, exist_ok=True) 17 + config_path.write_text(json.dumps(payload), encoding="utf-8") 18 + 19 + 20 + def test_push_config_defaults(monkeypatch, tmp_path): 21 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 22 + _write_config(tmp_path, {"agent": {"name": "sol"}}) 23 + 24 + assert config.get_apns_key_path() is None 25 + assert config.get_apns_key_id() is None 26 + assert config.get_apns_team_id() is None 27 + assert config.get_bundle_id() is None 28 + assert config.get_environment() == "development" 29 + assert config.is_configured() is False 30 + 31 + 32 + def test_push_config_reads_journal_values(monkeypatch, tmp_path): 33 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 34 + key_path = tmp_path / "keys" / "apns.p8" 35 + key_path.parent.mkdir(parents=True, exist_ok=True) 36 + key_path.write_text("PRIVATE KEY", encoding="utf-8") 37 + _write_config( 38 + tmp_path, 39 + { 40 + "push": { 41 + "apns_key_path": f" {key_path} ", 42 + "apns_key_id": " KEY123 ", 43 + "apns_team_id": " TEAM123 ", 44 + "bundle_id": " org.solpbc.solstone-swift ", 45 + "environment": "production", 46 + } 47 + }, 48 + ) 49 + 50 + assert config.get_apns_key_path() == key_path 51 + assert config.get_apns_key_id() == "KEY123" 52 + assert config.get_apns_team_id() == "TEAM123" 53 + assert config.get_bundle_id() == "org.solpbc.solstone-swift" 54 + assert config.get_environment() == "production" 55 + assert config.is_configured() is True 56 + 57 + 58 + def test_push_config_blank_values_normalize_to_none(monkeypatch, tmp_path): 59 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 60 + _write_config( 61 + tmp_path, 62 + { 63 + "push": { 64 + "apns_key_path": " ", 65 + "apns_key_id": "\t", 66 + "apns_team_id": "", 67 + "bundle_id": " ", 68 + "environment": " ", 69 + } 70 + }, 71 + ) 72 + 73 + assert config.get_apns_key_path() is None 74 + assert config.get_apns_key_id() is None 75 + assert config.get_apns_team_id() is None 76 + assert config.get_bundle_id() is None 77 + assert config.get_environment() == "development" 78 + assert config.is_configured() is False 79 + 80 + 81 + def test_push_config_invalid_environment_raises(monkeypatch, tmp_path): 82 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 83 + _write_config(tmp_path, {"push": {"environment": "staging"}}) 84 + 85 + with pytest.raises( 86 + ValueError, match="push.environment must be 'development' or 'production'" 87 + ): 88 + config.get_environment() 89 + 90 + assert config.is_configured() is False 91 + 92 + 93 + def test_push_config_missing_key_file_is_unconfigured(monkeypatch, tmp_path): 94 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 95 + _write_config( 96 + tmp_path, 97 + { 98 + "push": { 99 + "apns_key_path": str(tmp_path / "missing.p8"), 100 + "apns_key_id": "KEY123", 101 + "apns_team_id": "TEAM123", 102 + "bundle_id": "org.solpbc.solstone-swift", 103 + "environment": "development", 104 + } 105 + }, 106 + ) 107 + 108 + assert config.is_configured() is False 109 + 110 + 111 + def test_push_config_relative_key_path_is_unconfigured(monkeypatch, tmp_path): 112 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 113 + relative_key_path = Path("keys/apns.p8") 114 + _write_config( 115 + tmp_path, 116 + { 117 + "push": { 118 + "apns_key_path": str(relative_key_path), 119 + "apns_key_id": "KEY123", 120 + "apns_team_id": "TEAM123", 121 + "bundle_id": "org.solpbc.solstone-swift", 122 + "environment": "development", 123 + } 124 + }, 125 + ) 126 + 127 + assert config.get_apns_key_path() == relative_key_path 128 + assert config.is_configured() is False 129 + 130 + 131 + def test_push_config_ignores_env_fallback(monkeypatch, tmp_path): 132 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 133 + monkeypatch.setenv("OPENAI_API_KEY", "sk-env") 134 + monkeypatch.setenv("APNS_KEY_ID", "ENVKEY") 135 + _write_config(tmp_path, {"push": {}}) 136 + 137 + assert config.get_apns_key_id() is None 138 + assert config.get_apns_team_id() is None 139 + assert config.get_bundle_id() is None 140 + assert config.is_configured() is False
+136
tests/test_push_devices.py
··· 1 + # SPDX-License-Identifier: AGPL-3.0-only 2 + # Copyright (c) 2026 sol pbc 3 + 4 + from __future__ import annotations 5 + 6 + import json 7 + from pathlib import Path 8 + 9 + from think.push import devices 10 + 11 + 12 + def _devices_path(tmp_path: Path) -> Path: 13 + return tmp_path / "config" / "push_devices.json" 14 + 15 + 16 + def test_load_devices_returns_empty_for_missing_store(monkeypatch, tmp_path): 17 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 18 + 19 + assert devices.load_devices() == [] 20 + 21 + 22 + def test_register_load_remove_round_trip(monkeypatch, tmp_path): 23 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 24 + 25 + count = devices.register_device( 26 + token="a" * 64, 27 + bundle_id="org.solpbc.solstone-swift", 28 + environment="development", 29 + platform="ios", 30 + ) 31 + 32 + assert count == 1 33 + stored = devices.load_devices() 34 + assert stored == [ 35 + { 36 + "token": "a" * 64, 37 + "bundle_id": "org.solpbc.solstone-swift", 38 + "environment": "development", 39 + "platform": "ios", 40 + "registered_at": stored[0]["registered_at"], 41 + } 42 + ] 43 + 44 + removed = devices.remove_device("a" * 64) 45 + assert removed is True 46 + assert devices.load_devices() == [] 47 + 48 + 49 + def test_register_device_updates_existing_token(monkeypatch, tmp_path): 50 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 51 + times = iter([1000, 2000]) 52 + monkeypatch.setattr(devices.time, "time", lambda: next(times)) 53 + 54 + first = devices.register_device( 55 + token="b" * 64, 56 + bundle_id="org.solpbc.solstone-swift", 57 + environment="development", 58 + platform="ios", 59 + ) 60 + second = devices.register_device( 61 + token="b" * 64, 62 + bundle_id="org.solpbc.solstone-swift", 63 + environment="production", 64 + platform="ios", 65 + ) 66 + 67 + assert first == 1 68 + assert second == 1 69 + assert devices.load_devices() == [ 70 + { 71 + "token": "b" * 64, 72 + "bundle_id": "org.solpbc.solstone-swift", 73 + "environment": "production", 74 + "platform": "ios", 75 + "registered_at": 2000, 76 + } 77 + ] 78 + 79 + 80 + def test_remove_device_returns_false_for_unknown_token(monkeypatch, tmp_path): 81 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 82 + devices.register_device( 83 + token="c" * 64, 84 + bundle_id="org.solpbc.solstone-swift", 85 + environment="development", 86 + platform="ios", 87 + ) 88 + 89 + assert devices.remove_device("d" * 64) is False 90 + assert len(devices.load_devices()) == 1 91 + 92 + 93 + def test_load_devices_returns_empty_for_malformed_store(monkeypatch, tmp_path, caplog): 94 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 95 + path = _devices_path(tmp_path) 96 + path.parent.mkdir(parents=True, exist_ok=True) 97 + path.write_text('{"devices": "bad"}', encoding="utf-8") 98 + 99 + loaded = devices.load_devices() 100 + 101 + assert loaded == [] 102 + assert "push device store unreadable" in caplog.text 103 + 104 + 105 + def test_status_view_masks_token(monkeypatch, tmp_path): 106 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 107 + path = _devices_path(tmp_path) 108 + path.parent.mkdir(parents=True, exist_ok=True) 109 + path.write_text( 110 + json.dumps( 111 + { 112 + "devices": [ 113 + { 114 + "token": "0123456789abcdef", 115 + "bundle_id": "org.solpbc.solstone-swift", 116 + "environment": "development", 117 + "platform": "ios", 118 + "registered_at": 1713528000, 119 + } 120 + ] 121 + } 122 + ), 123 + encoding="utf-8", 124 + ) 125 + 126 + device = devices.load_devices()[0] 127 + view = devices.status_view(device) 128 + 129 + assert devices.mask_token("0123456789abcdef") == "...cdef" 130 + assert view == { 131 + "token_suffix": "...cdef", 132 + "bundle_id": "org.solpbc.solstone-swift", 133 + "environment": "development", 134 + "platform": "ios", 135 + "registered_at": "2024-04-19T12:00:00Z", 136 + }
+222
tests/test_push_dispatch.py
··· 1 + # SPDX-License-Identifier: AGPL-3.0-only 2 + # Copyright (c) 2026 sol pbc 3 + 4 + from __future__ import annotations 5 + 6 + import re 7 + from pathlib import Path 8 + from unittest.mock import patch 9 + 10 + import httpx 11 + import jwt 12 + 13 + from think.push import dispatch 14 + 15 + TEST_KEY = """-----BEGIN PRIVATE KEY----- 16 + MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQg+Zj7Bk6Dzp080/PU 17 + jTZnJ6kP4KtlHErFO/WuVRTQvkShRANCAARW8djY5HF7K8noSZQRfjP38mIzaufi 18 + /YPI38YuaWmiPIqRmwDOu5rICl4PPLem4k+qtb950rlYCGx3J+MQN9tO 19 + -----END PRIVATE KEY----- 20 + """ 21 + 22 + 23 + def _write_key(tmp_path: Path) -> Path: 24 + key_path = tmp_path / "keys" / "apns.p8" 25 + key_path.parent.mkdir(parents=True, exist_ok=True) 26 + key_path.write_text(TEST_KEY, encoding="utf-8") 27 + return key_path 28 + 29 + 30 + def _configure_push(monkeypatch, tmp_path: Path) -> None: 31 + key_path = _write_key(tmp_path) 32 + monkeypatch.setattr(dispatch, "get_apns_key_path", lambda: key_path) 33 + monkeypatch.setattr(dispatch, "get_apns_key_id", lambda: "KEY123") 34 + monkeypatch.setattr(dispatch, "get_apns_team_id", lambda: "TEAM123") 35 + monkeypatch.setattr(dispatch, "get_bundle_id", lambda: "org.solpbc.solstone-swift") 36 + monkeypatch.setattr(dispatch, "get_environment", lambda: "development") 37 + dispatch._APNS_JWT_CACHE.clear() 38 + 39 + 40 + def test_mint_apns_jwt_has_expected_header_and_claims(monkeypatch, tmp_path): 41 + _configure_push(monkeypatch, tmp_path) 42 + 43 + token = dispatch._mint_apns_jwt(now=1713528000) 44 + 45 + assert jwt.get_unverified_header(token) == { 46 + "alg": "ES256", 47 + "kid": "KEY123", 48 + "typ": "JWT", 49 + } 50 + assert jwt.decode(token, options={"verify_signature": False}) == { 51 + "iss": "TEAM123", 52 + "iat": 1713528000, 53 + } 54 + 55 + 56 + def test_mint_apns_jwt_reuses_cached_token_within_55_minutes(monkeypatch, tmp_path): 57 + _configure_push(monkeypatch, tmp_path) 58 + 59 + first = dispatch._mint_apns_jwt(now=1000) 60 + second = dispatch._mint_apns_jwt(now=1000 + 55 * 60) 61 + 62 + assert first == second 63 + 64 + 65 + def test_mint_apns_jwt_refreshes_after_55_minutes(monkeypatch, tmp_path): 66 + _configure_push(monkeypatch, tmp_path) 67 + 68 + first = dispatch._mint_apns_jwt(now=1000) 69 + second = dispatch._mint_apns_jwt(now=1000 + 55 * 60 + 1) 70 + 71 + assert first != second 72 + 73 + 74 + def test_daily_briefing_payload_shape(): 75 + payload = dispatch.build_daily_briefing_payload( 76 + day="20260419", generated="2026-04-19T06:45:00", needs_attention_count=3 77 + ) 78 + 79 + assert payload["aps"]["category"] == dispatch.CATEGORY_DAILY_BRIEFING 80 + assert payload["aps"]["sound"] == "default" 81 + assert payload["aps"]["mutable-content"] == 1 82 + assert payload["aps"]["content-available"] == 1 83 + assert "interruption-level" not in payload["aps"] 84 + assert payload["data"] == { 85 + "action": "open_briefing", 86 + "day": "20260419", 87 + "generated": "2026-04-19T06:45:00", 88 + "needs_attention_count": 3, 89 + } 90 + 91 + 92 + def test_pre_meeting_payload_shape(): 93 + payload = dispatch.build_pre_meeting_payload( 94 + activity={ 95 + "id": "anticipated_meeting_090000_0420", 96 + "start": "09:00", 97 + "title": "Launch sync", 98 + "location": "Room A", 99 + "prep_notes": "Bring launch notes", 100 + "participation": [ 101 + {"name": "Juliet Capulet", "role": "attendee"}, 102 + {"name": "Observer", "role": "organizer"}, 103 + ], 104 + }, 105 + facet="work", 106 + day="20260420", 107 + ) 108 + 109 + assert payload["aps"]["category"] == dispatch.CATEGORY_PRE_MEETING_PREP 110 + assert payload["aps"]["interruption-level"] == "time-sensitive" 111 + assert payload["data"]["action"] == "open_pre_meeting" 112 + assert payload["data"]["participants"] == ["Juliet Capulet"] 113 + 114 + 115 + def test_agent_alert_payload_shape(): 116 + payload = dispatch.build_agent_alert_payload( 117 + title="Agent Alert", body="Needs review", context_id="ctx-1" 118 + ) 119 + 120 + assert payload["aps"]["category"] == dispatch.CATEGORY_AGENT_ALERT 121 + assert payload["data"] == {"action": "open_alert", "context_id": "ctx-1"} 122 + assert "interruption-level" not in payload["aps"] 123 + 124 + 125 + def test_commitment_payload_shape(): 126 + payload = dispatch.build_commitment_payload(ledger_id="lg_123") 127 + 128 + assert payload["aps"]["category"] == dispatch.CATEGORY_COMMITMENT_NUDGE 129 + assert payload["data"] == {"action": "open_commitment", "ledger_id": "lg_123"} 130 + 131 + 132 + def test_collapse_ids(): 133 + assert dispatch.build_daily_briefing_collapse_id("20260419") == "briefing.20260419" 134 + assert ( 135 + dispatch.build_pre_meeting_collapse_id("anticipated_meeting_090000_0420") 136 + == "meeting.anticipated_meeting_090000_0420" 137 + ) 138 + assert dispatch.build_agent_alert_collapse_id("ctx-1") == "alert.ctx-1" 139 + assert dispatch.build_commitment_collapse_id("lg_123") == "commitment.lg_123" 140 + 141 + 142 + def test_send_removes_bad_device_token(monkeypatch, tmp_path): 143 + _configure_push(monkeypatch, tmp_path) 144 + removed: list[str] = [] 145 + monkeypatch.setattr( 146 + dispatch.devices, "remove_device", lambda token: removed.append(token) or True 147 + ) 148 + 149 + async def fake_post(self, url, *, headers, json): 150 + return httpx.Response(400, json={"reason": "BadDeviceToken"}) 151 + 152 + with patch.object(httpx.AsyncClient, "post", new=fake_post): 153 + ok, reason = dispatch.send( 154 + {"token": "a" * 64}, 155 + dispatch.build_agent_alert_payload( 156 + title="Agent Alert", body="Needs review", context_id="ctx-1" 157 + ), 158 + collapse_id="alert.ctx-1", 159 + ) 160 + 161 + assert ok is False 162 + assert reason == "BadDeviceToken" 163 + assert removed == ["a" * 64] 164 + 165 + 166 + def test_send_removes_unregistered_device_on_410(monkeypatch, tmp_path): 167 + _configure_push(monkeypatch, tmp_path) 168 + removed: list[str] = [] 169 + monkeypatch.setattr( 170 + dispatch.devices, "remove_device", lambda token: removed.append(token) or True 171 + ) 172 + 173 + async def fake_post(self, url, *, headers, json): 174 + return httpx.Response(410, json={"reason": "Unregistered"}) 175 + 176 + with patch.object(httpx.AsyncClient, "post", new=fake_post): 177 + ok, reason = dispatch.send( 178 + {"token": "b" * 64}, 179 + dispatch.build_agent_alert_payload( 180 + title="Agent Alert", body="Needs review", context_id="ctx-1" 181 + ), 182 + collapse_id="alert.ctx-1", 183 + ) 184 + 185 + assert ok is False 186 + assert reason == "Unregistered" 187 + assert removed == ["b" * 64] 188 + 189 + 190 + def test_send_many_reuses_client_and_redacts_tokens(monkeypatch, tmp_path, caplog): 191 + _configure_push(monkeypatch, tmp_path) 192 + calls: list[dict[str, object]] = [] 193 + caplog.set_level("WARNING", logger="solstone.push.dispatch") 194 + 195 + async def fake_post(self, url, *, headers, json): 196 + calls.append({"url": url, "headers": headers, "json": json}) 197 + return httpx.Response(500, json={"reason": "InternalServerError"}) 198 + 199 + with patch.object(httpx.AsyncClient, "post", new=fake_post): 200 + sent, failed = dispatch.send_many( 201 + [ 202 + {"token": "c" * 64}, 203 + {"token": "d" * 64}, 204 + ], 205 + dispatch.build_daily_briefing_payload( 206 + day="20260419", 207 + generated="2026-04-19T06:45:00", 208 + needs_attention_count=1, 209 + ), 210 + collapse_id="briefing.20260419", 211 + ) 212 + 213 + assert sent == 0 214 + assert failed == 2 215 + assert len(calls) == 2 216 + assert calls[0]["headers"]["apns-collapse-id"] == "briefing.20260419" 217 + assert calls[0]["headers"]["apns-priority"] == "10" 218 + assert calls[0]["headers"]["apns-push-type"] == "alert" 219 + assert calls[0]["headers"]["apns-topic"] == "org.solpbc.solstone-swift" 220 + assert "push rejected token=...cccc" in caplog.text 221 + assert all(record.levelname == "WARNING" for record in caplog.records) 222 + assert re.search(r"[0-9a-f]{64}", caplog.text) is None
+146
tests/test_push_integration.py
··· 1 + # SPDX-License-Identifier: AGPL-3.0-only 2 + # Copyright (c) 2026 sol pbc 3 + 4 + from __future__ import annotations 5 + 6 + import json 7 + from datetime import datetime 8 + from pathlib import Path 9 + from unittest.mock import patch 10 + 11 + import httpx 12 + 13 + from convey import create_app 14 + from think.push import devices, runtime, triggers 15 + 16 + TEST_KEY = """-----BEGIN PRIVATE KEY----- 17 + MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQg+Zj7Bk6Dzp080/PU 18 + jTZnJ6kP4KtlHErFO/WuVRTQvkShRANCAARW8djY5HF7K8noSZQRfjP38mIzaufi 19 + /YPI38YuaWmiPIqRmwDOu5rICl4PPLem4k+qtb950rlYCGx3J+MQN9tO 20 + -----END PRIVATE KEY----- 21 + """ 22 + 23 + 24 + class FixedDateTime(datetime): 25 + @classmethod 26 + def now(cls, tz=None): 27 + return cls(2026, 3, 27, 8, 45, 0, tzinfo=tz) 28 + 29 + 30 + def _write_push_config(journal_copy: Path) -> None: 31 + key_path = journal_copy / "keys" / "apns.p8" 32 + key_path.parent.mkdir(parents=True, exist_ok=True) 33 + key_path.write_text(TEST_KEY, encoding="utf-8") 34 + config_path = journal_copy / "config" / "journal.json" 35 + config = json.loads(config_path.read_text(encoding="utf-8")) 36 + config["push"] = { 37 + "apns_key_path": str(key_path), 38 + "apns_key_id": "KEY123", 39 + "apns_team_id": "TEAM123", 40 + "bundle_id": "org.solpbc.solstone-swift", 41 + "environment": "development", 42 + } 43 + config_path.write_text(json.dumps(config, indent=2) + "\n", encoding="utf-8") 44 + 45 + 46 + def _seed_activity(journal_copy: Path, facet: str, day: str, rows: list[dict]) -> None: 47 + path = journal_copy / "facets" / facet / "activities" / f"{day}.jsonl" 48 + path.parent.mkdir(parents=True, exist_ok=True) 49 + path.write_text("\n".join(json.dumps(row) for row in rows) + "\n", encoding="utf-8") 50 + 51 + 52 + def test_push_integration_briefing_dispatch_and_log(journal_copy, monkeypatch): 53 + _write_push_config(journal_copy) 54 + devices.register_device( 55 + token="a" * 64, 56 + bundle_id="org.solpbc.solstone-swift", 57 + environment="development", 58 + platform="ios", 59 + ) 60 + monkeypatch.setattr( 61 + "think.push.runtime.CallosumConnection.start", 62 + lambda self, callback=None: None, 63 + ) 64 + monkeypatch.setattr("think.push.runtime.CallosumConnection.stop", lambda self: None) 65 + monkeypatch.setattr(triggers, "datetime", FixedDateTime) 66 + captured: list[str] = [] 67 + 68 + async def fake_post(self, url, *, headers, json): 69 + captured.append(url) 70 + return httpx.Response(200) 71 + 72 + with patch.object(httpx.AsyncClient, "post", new=fake_post): 73 + app = create_app(str(journal_copy)) 74 + app.config["TESTING"] = True 75 + runtime._on_callosum_message( 76 + {"tract": "cortex", "event": "finish", "name": "morning_briefing"} 77 + ) 78 + 79 + log_path = journal_copy / "push" / "nudge_log.jsonl" 80 + lines = [ 81 + json.loads(line) 82 + for line in log_path.read_text(encoding="utf-8").splitlines() 83 + if line.strip() 84 + ] 85 + assert captured == [f"https://api.sandbox.push.apple.com/3/device/{'a' * 64}"] 86 + assert lines[0]["category"] == "SOLSTONE_DAILY_BRIEFING" 87 + runtime.stop_all_push_runtime() 88 + assert runtime.get_runtime_state() is None 89 + 90 + 91 + def test_push_integration_pre_meeting_and_muted_facet(journal_copy): 92 + _write_push_config(journal_copy) 93 + devices.register_device( 94 + token="b" * 64, 95 + bundle_id="org.solpbc.solstone-swift", 96 + environment="development", 97 + platform="ios", 98 + ) 99 + muted_facet = journal_copy / "facets" / "muted" 100 + muted_facet.mkdir(parents=True, exist_ok=True) 101 + (muted_facet / "facet.json").write_text( 102 + json.dumps({"muted": True}), encoding="utf-8" 103 + ) 104 + _seed_activity( 105 + journal_copy, 106 + "montague", 107 + "20260327", 108 + [ 109 + { 110 + "id": "anticipated_meeting_090000_0327", 111 + "source": "anticipated", 112 + "start": "09:00", 113 + "title": "Launch sync", 114 + } 115 + ], 116 + ) 117 + _seed_activity( 118 + journal_copy, 119 + "muted", 120 + "20260327", 121 + [ 122 + { 123 + "id": "anticipated_meeting_090000_muted", 124 + "source": "anticipated", 125 + "start": "09:00", 126 + "title": "Muted meeting", 127 + } 128 + ], 129 + ) 130 + captured: list[str] = [] 131 + 132 + async def fake_post(self, url, *, headers, json): 133 + captured.append(headers["apns-collapse-id"]) 134 + return httpx.Response(200) 135 + 136 + with patch.object(httpx.AsyncClient, "post", new=fake_post): 137 + triggers.check_pre_meeting_prep(datetime(2026, 3, 27, 8, 45, 0)) 138 + 139 + log_path = journal_copy / "push" / "nudge_log.jsonl" 140 + lines = [ 141 + json.loads(line) 142 + for line in log_path.read_text(encoding="utf-8").splitlines() 143 + if line.strip() 144 + ] 145 + assert captured == ["meeting.anticipated_meeting_090000_0327"] 146 + assert any(line["category"] == "SOLSTONE_PRE_MEETING_PREP" for line in lines)
+139
tests/test_push_routes.py
··· 1 + # SPDX-License-Identifier: AGPL-3.0-only 2 + # Copyright (c) 2026 sol pbc 3 + 4 + from __future__ import annotations 5 + 6 + import pytest 7 + 8 + from convey import create_app 9 + from think.push.runtime import stop_all_push_runtime 10 + 11 + 12 + @pytest.fixture 13 + def push_app(journal_copy): 14 + app = create_app(str(journal_copy)) 15 + app.config["TESTING"] = True 16 + yield app 17 + stop_all_push_runtime() 18 + 19 + 20 + @pytest.fixture 21 + def push_client(push_app): 22 + return push_app.test_client() 23 + 24 + 25 + def test_register_push_device_happy_path(push_client, monkeypatch): 26 + monkeypatch.setattr("convey.push.register_device", lambda **kwargs: 2) 27 + 28 + response = push_client.post( 29 + "/api/push/register", 30 + json={ 31 + "device_token": "A" * 64, 32 + "bundle_id": "org.solpbc.solstone-swift", 33 + "environment": "development", 34 + "platform": "ios", 35 + }, 36 + ) 37 + 38 + assert response.status_code == 200 39 + assert response.get_json() == {"registered": True, "device_count": 2} 40 + 41 + 42 + def test_register_push_device_rejects_non_object(push_client): 43 + response = push_client.post("/api/push/register", json=["bad"]) 44 + 45 + assert response.status_code == 400 46 + assert response.get_json() == {"error": "request body must be a JSON object"} 47 + 48 + 49 + def test_register_push_device_validates_fields(push_client): 50 + response = push_client.post("/api/push/register", json={"device_token": "x"}) 51 + 52 + assert response.status_code == 400 53 + assert response.get_json() == {"error": "bundle_id is required"} 54 + 55 + 56 + def test_delete_push_device_happy_path(push_client, monkeypatch): 57 + monkeypatch.setattr("convey.push.remove_device", lambda token: True) 58 + monkeypatch.setattr("convey.push.load_devices", lambda: [{"token": "a"}]) 59 + 60 + response = push_client.delete("/api/push/register", json={"device_token": "a" * 64}) 61 + 62 + assert response.status_code == 200 63 + assert response.get_json() == {"removed": True, "device_count": 1} 64 + 65 + 66 + def test_status_masks_tokens(push_client, monkeypatch): 67 + monkeypatch.setattr("convey.push.is_configured", lambda: True) 68 + monkeypatch.setattr( 69 + "convey.push.load_devices", 70 + lambda: [ 71 + { 72 + "token": "a" * 64, 73 + "bundle_id": "org.solpbc.solstone-swift", 74 + "environment": "development", 75 + "platform": "ios", 76 + "registered_at": 2, 77 + } 78 + ], 79 + ) 80 + monkeypatch.setattr( 81 + "convey.push.status_view", 82 + lambda device: { 83 + "token_suffix": "...aaaa", 84 + "bundle_id": device["bundle_id"], 85 + "environment": device["environment"], 86 + "platform": device["platform"], 87 + "registered_at": "2024-04-19T12:00:00Z", 88 + }, 89 + ) 90 + 91 + response = push_client.get("/api/push/status") 92 + 93 + assert response.status_code == 200 94 + assert response.get_json() == { 95 + "configured": True, 96 + "device_count": 1, 97 + "devices": [ 98 + { 99 + "token_suffix": "...aaaa", 100 + "bundle_id": "org.solpbc.solstone-swift", 101 + "environment": "development", 102 + "platform": "ios", 103 + "registered_at": "2024-04-19T12:00:00Z", 104 + } 105 + ], 106 + } 107 + 108 + 109 + def test_push_test_requires_configuration(push_client, monkeypatch): 110 + monkeypatch.setattr("convey.push.is_configured", lambda: False) 111 + 112 + response = push_client.post("/api/push/test") 113 + 114 + assert response.status_code == 503 115 + assert response.get_json() == {"error": "push not configured"} 116 + 117 + 118 + def test_push_test_validates_category(push_client, monkeypatch): 119 + monkeypatch.setattr("convey.push.is_configured", lambda: True) 120 + 121 + response = push_client.post("/api/push/test", json={"category": "BAD"}) 122 + 123 + assert response.status_code == 400 124 + assert response.get_json() == {"error": "category must be a known push category"} 125 + 126 + 127 + def test_push_test_happy_path(push_client, monkeypatch): 128 + monkeypatch.setattr("convey.push.is_configured", lambda: True) 129 + monkeypatch.setattr( 130 + "convey.push.triggers.send_agent_alert", 131 + lambda *, title, body, context_id: (1, 0), 132 + ) 133 + 134 + response = push_client.post( 135 + "/api/push/test", json={"title": "Alert", "body": "Body"} 136 + ) 137 + 138 + assert response.status_code == 200 139 + assert response.get_json() == {"sent": 1, "failed": 0}
+95
tests/test_push_runtime.py
··· 1 + # SPDX-License-Identifier: AGPL-3.0-only 2 + # Copyright (c) 2026 sol pbc 3 + 4 + from __future__ import annotations 5 + 6 + import pytest 7 + from flask import Flask 8 + 9 + from think.push.runtime import ( 10 + get_runtime_state, 11 + start_push_runtime, 12 + stop_all_push_runtime, 13 + stop_push_runtime, 14 + ) 15 + 16 + 17 + @pytest.fixture(autouse=True) 18 + def reset_runtime(): 19 + stop_all_push_runtime() 20 + yield 21 + stop_all_push_runtime() 22 + 23 + 24 + def test_start_push_runtime_attaches_state(monkeypatch): 25 + calls: list[str] = [] 26 + monkeypatch.setattr( 27 + "think.push.runtime.CallosumConnection.start", 28 + lambda self, callback=None: calls.append("start"), 29 + ) 30 + monkeypatch.setattr( 31 + "think.push.runtime.CallosumConnection.stop", 32 + lambda self: calls.append("stop"), 33 + ) 34 + app = Flask(__name__) 35 + 36 + start_push_runtime(app) 37 + try: 38 + runtime = get_runtime_state() 39 + assert app.push_runtime_started is True 40 + assert runtime is not None 41 + assert runtime.loop is not None 42 + assert runtime.thread is not None 43 + assert calls == ["start"] 44 + finally: 45 + stop_push_runtime(app) 46 + 47 + 48 + def test_start_push_runtime_is_idempotent(monkeypatch): 49 + monkeypatch.setattr( 50 + "think.push.runtime.CallosumConnection.start", lambda self, callback=None: None 51 + ) 52 + monkeypatch.setattr("think.push.runtime.CallosumConnection.stop", lambda self: None) 53 + app = Flask(__name__) 54 + 55 + start_push_runtime(app) 56 + runtime = get_runtime_state() 57 + first_loop = runtime.loop if runtime else None 58 + first_thread = runtime.thread if runtime else None 59 + try: 60 + start_push_runtime(app) 61 + runtime = get_runtime_state() 62 + assert runtime is not None 63 + assert runtime.loop is first_loop 64 + assert runtime.thread is first_thread 65 + assert runtime.apps.count(app) == 1 66 + finally: 67 + stop_push_runtime(app) 68 + 69 + 70 + def test_stop_push_runtime_cleans_last_app(monkeypatch): 71 + monkeypatch.setattr( 72 + "think.push.runtime.CallosumConnection.start", lambda self, callback=None: None 73 + ) 74 + monkeypatch.setattr("think.push.runtime.CallosumConnection.stop", lambda self: None) 75 + app = Flask(__name__) 76 + 77 + start_push_runtime(app) 78 + stop_push_runtime(app) 79 + 80 + assert app.push_runtime_started is False 81 + assert get_runtime_state() is None 82 + 83 + 84 + def test_stop_all_push_runtime_clears_runtime(monkeypatch): 85 + monkeypatch.setattr( 86 + "think.push.runtime.CallosumConnection.start", lambda self, callback=None: None 87 + ) 88 + monkeypatch.setattr("think.push.runtime.CallosumConnection.stop", lambda self: None) 89 + app = Flask(__name__) 90 + 91 + start_push_runtime(app) 92 + stop_all_push_runtime() 93 + 94 + assert app.push_runtime_started is False 95 + assert get_runtime_state() is None
+212
tests/test_push_triggers.py
··· 1 + # SPDX-License-Identifier: AGPL-3.0-only 2 + # Copyright (c) 2026 sol pbc 3 + 4 + from __future__ import annotations 5 + 6 + import json 7 + from datetime import datetime 8 + from pathlib import Path 9 + 10 + from think.push import triggers 11 + 12 + 13 + def _log_path(tmp_path: Path) -> Path: 14 + return tmp_path / "push" / "nudge_log.jsonl" 15 + 16 + 17 + def test_handle_briefing_finish_polls_until_briefing_exists(monkeypatch, tmp_path): 18 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 19 + responses = iter( 20 + [ 21 + ({}, None, []), 22 + ({}, None, []), 23 + ( 24 + {"needs_attention": "- item"}, 25 + {"generated": "2026-04-19T06:45:00"}, 26 + ["one"], 27 + ), 28 + ] 29 + ) 30 + sent_calls: list[dict[str, object]] = [] 31 + monkeypatch.setattr(triggers, "_load_briefing_md", lambda today: next(responses)) 32 + monkeypatch.setattr(triggers.time, "sleep", lambda seconds: None) 33 + monkeypatch.setattr(triggers, "_eligible_devices", lambda: [{"token": "a" * 64}]) 34 + monkeypatch.setattr( 35 + triggers, 36 + "send_many", 37 + lambda devices, payload, *, collapse_id: ( 38 + sent_calls.append( 39 + {"devices": devices, "payload": payload, "collapse_id": collapse_id} 40 + ) 41 + or (1, 0) 42 + ), 43 + ) 44 + 45 + triggers.handle_briefing_finish( 46 + {"tract": "cortex", "event": "finish", "name": "morning_briefing"} 47 + ) 48 + 49 + assert len(sent_calls) == 1 50 + assert sent_calls[0]["collapse_id"].startswith("briefing.") 51 + log_lines = _log_path(tmp_path).read_text(encoding="utf-8").splitlines() 52 + assert len(log_lines) == 1 53 + 54 + 55 + def test_handle_briefing_finish_is_idempotent(monkeypatch, tmp_path): 56 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 57 + sent_calls: list[str] = [] 58 + monkeypatch.setattr( 59 + triggers, 60 + "_load_briefing_md", 61 + lambda today: ( 62 + {"needs_attention": "- item"}, 63 + {"generated": "2026-04-19T06:45:00"}, 64 + ["one"], 65 + ), 66 + ) 67 + monkeypatch.setattr(triggers.time, "sleep", lambda seconds: None) 68 + monkeypatch.setattr(triggers, "_eligible_devices", lambda: [{"token": "a" * 64}]) 69 + monkeypatch.setattr( 70 + triggers, 71 + "send_many", 72 + lambda devices, payload, *, collapse_id: ( 73 + sent_calls.append(collapse_id) or (1, 0) 74 + ), 75 + ) 76 + 77 + message = {"tract": "cortex", "event": "finish", "name": "morning_briefing"} 78 + triggers.handle_briefing_finish(message) 79 + triggers.handle_briefing_finish(message) 80 + 81 + assert sent_calls == [sent_calls[0]] 82 + 83 + 84 + def test_check_pre_meeting_prep_skips_muted_facets(monkeypatch, tmp_path): 85 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 86 + monkeypatch.setattr(triggers, "_eligible_devices", lambda: [{"token": "a" * 64}]) 87 + monkeypatch.setattr(triggers, "get_enabled_facets", lambda: {}) 88 + sent_calls: list[str] = [] 89 + monkeypatch.setattr( 90 + triggers, 91 + "send_many", 92 + lambda devices, payload, *, collapse_id: ( 93 + sent_calls.append(collapse_id) or (1, 0) 94 + ), 95 + ) 96 + 97 + triggers.check_pre_meeting_prep(datetime(2026, 4, 20, 8, 45, 0)) 98 + 99 + assert sent_calls == [] 100 + 101 + 102 + def test_check_pre_meeting_prep_skips_non_anticipated(monkeypatch, tmp_path): 103 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 104 + monkeypatch.setattr(triggers, "_eligible_devices", lambda: [{"token": "a" * 64}]) 105 + monkeypatch.setattr(triggers, "get_enabled_facets", lambda: {"work": {}}) 106 + monkeypatch.setattr( 107 + triggers, 108 + "load_activity_records", 109 + lambda facet, day: [{"id": "meeting", "source": "cogitate", "start": "09:00"}], 110 + ) 111 + sent_calls: list[str] = [] 112 + monkeypatch.setattr( 113 + triggers, 114 + "send_many", 115 + lambda devices, payload, *, collapse_id: ( 116 + sent_calls.append(collapse_id) or (1, 0) 117 + ), 118 + ) 119 + 120 + triggers.check_pre_meeting_prep(datetime(2026, 4, 20, 8, 45, 0)) 121 + 122 + assert sent_calls == [] 123 + 124 + 125 + def test_check_pre_meeting_prep_fires_for_hhmm_and_hhmmss(monkeypatch, tmp_path): 126 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 127 + monkeypatch.setattr(triggers, "_eligible_devices", lambda: [{"token": "a" * 64}]) 128 + monkeypatch.setattr(triggers, "get_enabled_facets", lambda: {"work": {}}) 129 + monkeypatch.setattr( 130 + triggers, 131 + "load_activity_records", 132 + lambda facet, day: [ 133 + { 134 + "id": "anticipated_meeting_090000_0420", 135 + "source": "anticipated", 136 + "start": "09:00", 137 + "title": "Launch sync", 138 + }, 139 + { 140 + "id": "anticipated_call_090030_0420", 141 + "source": "anticipated", 142 + "start": "09:00:30", 143 + "title": "Prep call", 144 + }, 145 + ], 146 + ) 147 + sent_calls: list[str] = [] 148 + monkeypatch.setattr( 149 + triggers, 150 + "send_many", 151 + lambda devices, payload, *, collapse_id: ( 152 + sent_calls.append(collapse_id) or (1, 0) 153 + ), 154 + ) 155 + 156 + triggers.check_pre_meeting_prep(datetime(2026, 4, 20, 8, 45, 0)) 157 + 158 + assert sent_calls == [ 159 + "meeting.anticipated_meeting_090000_0420", 160 + "meeting.anticipated_call_090030_0420", 161 + ] 162 + 163 + 164 + def test_check_pre_meeting_prep_zero_devices_skips_log(monkeypatch, tmp_path): 165 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 166 + monkeypatch.setattr(triggers, "_eligible_devices", lambda: []) 167 + monkeypatch.setattr(triggers, "get_enabled_facets", lambda: {"work": {}}) 168 + monkeypatch.setattr( 169 + triggers, 170 + "load_activity_records", 171 + lambda facet, day: [ 172 + { 173 + "id": "anticipated_meeting_090000_0420", 174 + "source": "anticipated", 175 + "start": "09:00", 176 + "title": "Launch sync", 177 + } 178 + ], 179 + ) 180 + 181 + triggers.check_pre_meeting_prep(datetime(2026, 4, 20, 8, 45, 0)) 182 + 183 + assert _log_path(tmp_path).exists() is False 184 + 185 + 186 + def test_send_agent_alert_same_context_id_fires_once(monkeypatch, tmp_path): 187 + monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path)) 188 + monkeypatch.setattr(triggers, "_eligible_devices", lambda: [{"token": "a" * 64}]) 189 + sent_calls: list[str] = [] 190 + monkeypatch.setattr( 191 + triggers, 192 + "send_many", 193 + lambda devices, payload, *, collapse_id: ( 194 + sent_calls.append(collapse_id) or (1, 0) 195 + ), 196 + ) 197 + 198 + first = triggers.send_agent_alert( 199 + title="Agent Alert", body="Needs review", context_id="ctx-1" 200 + ) 201 + second = triggers.send_agent_alert( 202 + title="Agent Alert", body="Needs review", context_id="ctx-1" 203 + ) 204 + 205 + assert first == (1, 0) 206 + assert second == (0, 0) 207 + assert sent_calls == ["alert.ctx-1"] 208 + lines = [ 209 + json.loads(line) 210 + for line in _log_path(tmp_path).read_text(encoding="utf-8").splitlines() 211 + ] 212 + assert len(lines) == 1
+7
think/journal_default.json
··· 37 37 "model": "gpt-realtime", 38 38 "brain_model": "haiku" 39 39 }, 40 + "push": { 41 + "apns_key_path": null, 42 + "apns_key_id": null, 43 + "apns_team_id": null, 44 + "bundle_id": null, 45 + "environment": "development" 46 + }, 40 47 "retention": { 41 48 "raw_media": "days", 42 49 "raw_media_days": 7,
+20
think/push/__init__.py
··· 1 + # SPDX-License-Identifier: AGPL-3.0-only 2 + # Copyright (c) 2026 sol pbc 3 + 4 + """Push package.""" 5 + 6 + from think.push.runtime import ( 7 + get_runtime_state, 8 + start_push_runtime, 9 + stop_all_push_runtime, 10 + stop_push_runtime, 11 + ) 12 + from think.push.triggers import send_agent_alert 13 + 14 + __all__ = [ 15 + "get_runtime_state", 16 + "send_agent_alert", 17 + "start_push_runtime", 18 + "stop_all_push_runtime", 19 + "stop_push_runtime", 20 + ]
+92
think/push/config.py
··· 1 + # SPDX-License-Identifier: AGPL-3.0-only 2 + # Copyright (c) 2026 sol pbc 3 + 4 + """Push config readers.""" 5 + 6 + from __future__ import annotations 7 + 8 + from pathlib import Path 9 + from typing import Any 10 + 11 + from think.utils import get_config 12 + 13 + DEFAULT_ENVIRONMENT = "development" 14 + _VALID_ENVIRONMENTS = {"development", "production"} 15 + 16 + 17 + def _push_config() -> dict[str, Any]: 18 + config = get_config() 19 + push = config.get("push") 20 + return push if isinstance(push, dict) else {} 21 + 22 + 23 + def _clean_str(value: Any) -> str | None: 24 + if not isinstance(value, str): 25 + return None 26 + cleaned = value.strip() 27 + return cleaned or None 28 + 29 + 30 + def get_apns_key_path() -> Path | None: 31 + configured = _clean_str(_push_config().get("apns_key_path")) 32 + return Path(configured) if configured else None 33 + 34 + 35 + def get_apns_key_id() -> str | None: 36 + return _clean_str(_push_config().get("apns_key_id")) 37 + 38 + 39 + def get_apns_team_id() -> str | None: 40 + return _clean_str(_push_config().get("apns_team_id")) 41 + 42 + 43 + def get_bundle_id() -> str | None: 44 + return _clean_str(_push_config().get("bundle_id")) 45 + 46 + 47 + def get_environment() -> str: 48 + configured = _clean_str(_push_config().get("environment")) 49 + if configured is None: 50 + return DEFAULT_ENVIRONMENT 51 + if configured not in _VALID_ENVIRONMENTS: 52 + raise ValueError( 53 + "push.environment must be 'development' or 'production' when set" 54 + ) 55 + return configured 56 + 57 + 58 + def _has_valid_key_path() -> bool: 59 + key_path = get_apns_key_path() 60 + if key_path is None or not key_path.is_absolute() or not key_path.is_file(): 61 + return False 62 + try: 63 + key_path.read_text(encoding="utf-8") 64 + except OSError: 65 + return False 66 + return True 67 + 68 + 69 + def is_configured() -> bool: 70 + if not ( 71 + get_apns_key_path() 72 + and get_apns_key_id() 73 + and get_apns_team_id() 74 + and get_bundle_id() 75 + ): 76 + return False 77 + try: 78 + get_environment() 79 + except ValueError: 80 + return False 81 + return _has_valid_key_path() 82 + 83 + 84 + __all__ = [ 85 + "DEFAULT_ENVIRONMENT", 86 + "get_apns_key_id", 87 + "get_apns_key_path", 88 + "get_apns_team_id", 89 + "get_bundle_id", 90 + "get_environment", 91 + "is_configured", 92 + ]
+153
think/push/devices.py
··· 1 + # SPDX-License-Identifier: AGPL-3.0-only 2 + # Copyright (c) 2026 sol pbc 3 + 4 + """Push device storage.""" 5 + 6 + from __future__ import annotations 7 + 8 + import json 9 + import logging 10 + import time 11 + from datetime import datetime, timezone 12 + from pathlib import Path 13 + from typing import Any 14 + 15 + from think.entities.core import atomic_write 16 + from think.utils import get_journal 17 + 18 + logger = logging.getLogger("solstone.push.devices") 19 + 20 + 21 + def _devices_path() -> Path: 22 + return Path(get_journal()) / "config" / "push_devices.json" 23 + 24 + 25 + def _empty_store() -> dict[str, list[dict[str, Any]]]: 26 + return {"devices": []} 27 + 28 + 29 + def _validate_store(payload: Any) -> list[dict[str, Any]]: 30 + if not isinstance(payload, dict): 31 + raise ValueError("push device store must be a JSON object") 32 + devices = payload.get("devices") 33 + if not isinstance(devices, list): 34 + raise ValueError("push device store must contain a devices list") 35 + normalized: list[dict[str, Any]] = [] 36 + for device in devices: 37 + if not isinstance(device, dict): 38 + raise ValueError("push device rows must be JSON objects") 39 + token = str(device.get("token") or "").strip() 40 + bundle_id = str(device.get("bundle_id") or "").strip() 41 + environment = str(device.get("environment") or "").strip() 42 + platform = str(device.get("platform") or "").strip() 43 + registered_at = device.get("registered_at") 44 + if ( 45 + not token 46 + or not bundle_id 47 + or not environment 48 + or not platform 49 + or not isinstance(registered_at, (int, float)) 50 + ): 51 + raise ValueError("push device row missing required fields") 52 + normalized.append( 53 + { 54 + "token": token, 55 + "bundle_id": bundle_id, 56 + "environment": environment, 57 + "platform": platform, 58 + "registered_at": int(registered_at), 59 + } 60 + ) 61 + return normalized 62 + 63 + 64 + def _read_store() -> list[dict[str, Any]]: 65 + path = _devices_path() 66 + if not path.exists(): 67 + return [] 68 + try: 69 + payload = json.loads(path.read_text(encoding="utf-8")) 70 + return _validate_store(payload) 71 + except Exception as exc: 72 + logger.warning("push device store unreadable path=%s error=%s", path, exc) 73 + return [] 74 + 75 + 76 + def _write_store(devices: list[dict[str, Any]]) -> None: 77 + payload = json.dumps({"devices": devices}, indent=2, ensure_ascii=False) + "\n" 78 + atomic_write(_devices_path(), payload, prefix=".push_devices_") 79 + 80 + 81 + def load_devices() -> list[dict[str, Any]]: 82 + return _read_store() 83 + 84 + 85 + def register_device( 86 + *, token: str, bundle_id: str, environment: str, platform: str 87 + ) -> int: 88 + devices = load_devices() 89 + registered_at = int(time.time()) 90 + updated = False 91 + for device in devices: 92 + if device["token"] != token: 93 + continue 94 + device.update( 95 + { 96 + "bundle_id": bundle_id, 97 + "environment": environment, 98 + "platform": platform, 99 + "registered_at": registered_at, 100 + } 101 + ) 102 + updated = True 103 + break 104 + if not updated: 105 + devices.append( 106 + { 107 + "token": token, 108 + "bundle_id": bundle_id, 109 + "environment": environment, 110 + "platform": platform, 111 + "registered_at": registered_at, 112 + } 113 + ) 114 + _write_store(devices) 115 + return len(devices) 116 + 117 + 118 + def remove_device(token: str) -> bool: 119 + devices = load_devices() 120 + remaining = [device for device in devices if device["token"] != token] 121 + if len(remaining) == len(devices): 122 + return False 123 + _write_store(remaining) 124 + return True 125 + 126 + 127 + def mask_token(token: str) -> str: 128 + return "..." + str(token or "")[-4:] 129 + 130 + 131 + def status_view(device: dict[str, Any]) -> dict[str, Any]: 132 + registered_at = int(device["registered_at"]) 133 + registered_at_label = ( 134 + datetime.fromtimestamp(registered_at, tz=timezone.utc) 135 + .isoformat() 136 + .replace("+00:00", "Z") 137 + ) 138 + return { 139 + "token_suffix": mask_token(device.get("token", "")), 140 + "bundle_id": device["bundle_id"], 141 + "environment": device["environment"], 142 + "platform": device["platform"], 143 + "registered_at": registered_at_label, 144 + } 145 + 146 + 147 + __all__ = [ 148 + "load_devices", 149 + "mask_token", 150 + "register_device", 151 + "remove_device", 152 + "status_view", 153 + ]
+378
think/push/dispatch.py
··· 1 + # SPDX-License-Identifier: AGPL-3.0-only 2 + # Copyright (c) 2026 sol pbc 3 + 4 + """APNs transport for push notifications.""" 5 + 6 + from __future__ import annotations 7 + 8 + import asyncio 9 + import json 10 + import logging 11 + import threading 12 + import time 13 + from pathlib import Path 14 + from typing import Any 15 + 16 + import httpx 17 + import jwt 18 + 19 + from think.push import devices 20 + from think.push.config import ( 21 + get_apns_key_id, 22 + get_apns_key_path, 23 + get_apns_team_id, 24 + get_bundle_id, 25 + get_environment, 26 + ) 27 + 28 + logger = logging.getLogger("solstone.push.dispatch") 29 + 30 + CATEGORY_DAILY_BRIEFING = "SOLSTONE_DAILY_BRIEFING" 31 + CATEGORY_PRE_MEETING_PREP = "SOLSTONE_PRE_MEETING_PREP" 32 + CATEGORY_AGENT_ALERT = "SOLSTONE_AGENT_ALERT" 33 + CATEGORY_COMMITMENT_NUDGE = "SOLSTONE_COMMITMENT_NUDGE" 34 + CATEGORIES = ( 35 + CATEGORY_DAILY_BRIEFING, 36 + CATEGORY_PRE_MEETING_PREP, 37 + CATEGORY_AGENT_ALERT, 38 + CATEGORY_COMMITMENT_NUDGE, 39 + ) 40 + _JWT_MAX_AGE_SECONDS = 55 * 60 41 + _APNS_JWT_CACHE: dict[tuple[str, str], tuple[str, int]] = {} 42 + _APNS_JWT_CACHE_LOCK = threading.Lock() 43 + 44 + 45 + def _require_bundle_id() -> str: 46 + bundle_id = get_bundle_id() 47 + if bundle_id is None: 48 + raise RuntimeError("push.bundle_id is not configured") 49 + return bundle_id 50 + 51 + 52 + def _require_key_id() -> str: 53 + key_id = get_apns_key_id() 54 + if key_id is None: 55 + raise RuntimeError("push.apns_key_id is not configured") 56 + return key_id 57 + 58 + 59 + def _require_team_id() -> str: 60 + team_id = get_apns_team_id() 61 + if team_id is None: 62 + raise RuntimeError("push.apns_team_id is not configured") 63 + return team_id 64 + 65 + 66 + def _require_key_path() -> Path: 67 + key_path = get_apns_key_path() 68 + if key_path is None: 69 + raise RuntimeError("push.apns_key_path is not configured") 70 + if not key_path.is_absolute(): 71 + raise ValueError("push.apns_key_path must be an absolute path") 72 + if not key_path.exists(): 73 + raise FileNotFoundError(f"APNs key file not found: {key_path}") 74 + if not key_path.is_file(): 75 + raise RuntimeError(f"APNs key file is not a regular file: {key_path}") 76 + return key_path 77 + 78 + 79 + def _mint_apns_jwt(*, now: int | None = None) -> str: 80 + issued_at = int(time.time()) if now is None else now 81 + key_id = _require_key_id() 82 + team_id = _require_team_id() 83 + cache_key = (key_id, team_id) 84 + with _APNS_JWT_CACHE_LOCK: 85 + cached = _APNS_JWT_CACHE.get(cache_key) 86 + if cached and issued_at - cached[1] <= _JWT_MAX_AGE_SECONDS: 87 + return cached[0] 88 + token = jwt.encode( 89 + {"iss": team_id, "iat": issued_at}, 90 + _require_key_path().read_text(encoding="utf-8"), 91 + algorithm="ES256", 92 + headers={"alg": "ES256", "kid": key_id}, 93 + ) 94 + _APNS_JWT_CACHE[cache_key] = (token, issued_at) 95 + return token 96 + 97 + 98 + def build_daily_briefing_collapse_id(day: str) -> str: 99 + return f"briefing.{day}" 100 + 101 + 102 + def build_pre_meeting_collapse_id(activity_id: str) -> str: 103 + return f"meeting.{activity_id}" 104 + 105 + 106 + def build_agent_alert_collapse_id(context_id: str) -> str: 107 + return f"alert.{context_id}" 108 + 109 + 110 + def build_commitment_collapse_id(ledger_id: str) -> str: 111 + return f"commitment.{ledger_id}" 112 + 113 + 114 + def build_daily_briefing_payload( 115 + *, day: str, generated: str | None, needs_attention_count: int 116 + ) -> dict[str, Any]: 117 + return { 118 + "aps": { 119 + "alert": { 120 + "title": "Daily Briefing", 121 + "body": "Your briefing is ready — tap to view", 122 + }, 123 + "category": CATEGORY_DAILY_BRIEFING, 124 + "sound": "default", 125 + "mutable-content": 1, 126 + "content-available": 1, 127 + }, 128 + "data": { 129 + "action": "open_briefing", 130 + "day": day, 131 + "generated": generated, 132 + "needs_attention_count": needs_attention_count, 133 + }, 134 + } 135 + 136 + 137 + def build_pre_meeting_payload( 138 + *, activity: dict[str, Any], facet: str, day: str 139 + ) -> dict[str, Any]: 140 + participants = [ 141 + str(entry.get("name") or "").strip() 142 + for entry in activity.get("participation", []) 143 + if isinstance(entry, dict) 144 + and entry.get("role") == "attendee" 145 + and str(entry.get("name") or "").strip() 146 + ] 147 + return { 148 + "aps": { 149 + "alert": { 150 + "title": "Pre-Meeting Prep", 151 + "body": "Meeting in 15 minutes — tap to view", 152 + }, 153 + "category": CATEGORY_PRE_MEETING_PREP, 154 + "sound": "default", 155 + "mutable-content": 1, 156 + "content-available": 1, 157 + "interruption-level": "time-sensitive", 158 + }, 159 + "data": { 160 + "action": "open_pre_meeting", 161 + "activity_id": str(activity.get("id") or ""), 162 + "facet": facet, 163 + "day": day, 164 + "start": str(activity.get("start") or ""), 165 + "title": str(activity.get("title") or ""), 166 + "location": str(activity.get("location") or ""), 167 + "participants": participants, 168 + "prep_notes": str(activity.get("prep_notes") or ""), 169 + }, 170 + } 171 + 172 + 173 + def build_agent_alert_payload( 174 + *, title: str, body: str, context_id: str 175 + ) -> dict[str, Any]: 176 + return { 177 + "aps": { 178 + "alert": {"title": title, "body": body}, 179 + "category": CATEGORY_AGENT_ALERT, 180 + "sound": "default", 181 + "mutable-content": 1, 182 + "content-available": 1, 183 + }, 184 + "data": {"action": "open_alert", "context_id": context_id}, 185 + } 186 + 187 + 188 + def build_commitment_payload(*, ledger_id: str) -> dict[str, Any]: 189 + return { 190 + "aps": { 191 + "alert": { 192 + "title": "Commitment Nudge", 193 + "body": "A commitment needs attention — tap to view", 194 + }, 195 + "category": CATEGORY_COMMITMENT_NUDGE, 196 + "sound": "default", 197 + "mutable-content": 1, 198 + "content-available": 1, 199 + }, 200 + "data": {"action": "open_commitment", "ledger_id": ledger_id}, 201 + } 202 + 203 + 204 + def _apns_host() -> str: 205 + environment = get_environment() 206 + if environment == "production": 207 + return "https://api.push.apple.com" 208 + return "https://api.sandbox.push.apple.com" 209 + 210 + 211 + def _headers(*, collapse_id: str, priority: int) -> dict[str, str]: 212 + return { 213 + "apns-topic": _require_bundle_id(), 214 + "apns-collapse-id": collapse_id, 215 + "apns-priority": str(priority), 216 + "apns-push-type": "alert", 217 + "authorization": f"bearer {_mint_apns_jwt()}", 218 + } 219 + 220 + 221 + def _response_reason(response: httpx.Response) -> str | None: 222 + try: 223 + payload = response.json() 224 + except (json.JSONDecodeError, ValueError): 225 + return None 226 + if not isinstance(payload, dict): 227 + return None 228 + reason = payload.get("reason") 229 + return str(reason) if isinstance(reason, str) and reason else None 230 + 231 + 232 + def _run_async(coro: Any) -> Any: 233 + try: 234 + asyncio.get_running_loop() 235 + except RuntimeError: 236 + return asyncio.run(coro) 237 + result: dict[str, Any] = {} 238 + error: dict[str, BaseException] = {} 239 + 240 + def runner() -> None: 241 + try: 242 + result["value"] = asyncio.run(coro) 243 + except BaseException as exc: 244 + error["value"] = exc 245 + 246 + thread = threading.Thread(target=runner, name="push-dispatch", daemon=True) 247 + thread.start() 248 + thread.join() 249 + if "value" in error: 250 + raise error["value"] 251 + return result.get("value") 252 + 253 + 254 + async def _send_with_client( 255 + client: httpx.AsyncClient, 256 + device: dict[str, Any], 257 + payload: dict[str, Any], 258 + *, 259 + collapse_id: str, 260 + priority: int, 261 + ) -> tuple[bool, str | None]: 262 + token = str(device.get("token") or "") 263 + masked_token = devices.mask_token(token) 264 + try: 265 + response = await client.post( 266 + f"{_apns_host()}/3/device/{token}", 267 + headers=_headers(collapse_id=collapse_id, priority=priority), 268 + json=payload, 269 + ) 270 + except Exception as exc: 271 + logger.warning("push delivery failed token=%s error=%s", masked_token, exc) 272 + return False, str(exc) 273 + reason = _response_reason(response) 274 + if response.status_code == 200: 275 + return True, None 276 + if response.status_code == 410 or reason in {"BadDeviceToken", "Unregistered"}: 277 + devices.remove_device(token) 278 + logger.warning( 279 + "push pruning token=%s status=%s reason=%s", 280 + masked_token, 281 + response.status_code, 282 + reason or "", 283 + ) 284 + return False, reason 285 + if 500 <= response.status_code: 286 + logger.warning( 287 + "push rejected token=%s status=%s reason=%s", 288 + masked_token, 289 + response.status_code, 290 + reason or "", 291 + ) 292 + return False, reason 293 + logger.error( 294 + "push rejected token=%s status=%s reason=%s", 295 + masked_token, 296 + response.status_code, 297 + reason or "", 298 + ) 299 + return False, reason 300 + 301 + 302 + async def _send_async( 303 + device: dict[str, Any], 304 + payload: dict[str, Any], 305 + *, 306 + collapse_id: str, 307 + priority: int = 10, 308 + ) -> tuple[bool, str | None]: 309 + async with httpx.AsyncClient(http2=True, timeout=10.0) as client: 310 + return await _send_with_client( 311 + client, device, payload, collapse_id=collapse_id, priority=priority 312 + ) 313 + 314 + 315 + def send( 316 + device: dict[str, Any], 317 + payload: dict[str, Any], 318 + *, 319 + collapse_id: str, 320 + priority: int = 10, 321 + ) -> tuple[bool, str | None]: 322 + return _run_async( 323 + _send_async(device, payload, collapse_id=collapse_id, priority=priority) 324 + ) 325 + 326 + 327 + async def _send_many_async( 328 + push_devices: list[dict[str, Any]], 329 + payload: dict[str, Any], 330 + *, 331 + collapse_id: str, 332 + priority: int = 10, 333 + ) -> tuple[int, int]: 334 + sent = 0 335 + failed = 0 336 + async with httpx.AsyncClient(http2=True, timeout=10.0) as client: 337 + for device in push_devices: 338 + ok, _ = await _send_with_client( 339 + client, device, payload, collapse_id=collapse_id, priority=priority 340 + ) 341 + if ok: 342 + sent += 1 343 + else: 344 + failed += 1 345 + return sent, failed 346 + 347 + 348 + def send_many( 349 + push_devices: list[dict[str, Any]], 350 + payload: dict[str, Any], 351 + *, 352 + collapse_id: str, 353 + priority: int = 10, 354 + ) -> tuple[int, int]: 355 + return _run_async( 356 + _send_many_async( 357 + push_devices, payload, collapse_id=collapse_id, priority=priority 358 + ) 359 + ) 360 + 361 + 362 + __all__ = [ 363 + "CATEGORIES", 364 + "CATEGORY_AGENT_ALERT", 365 + "CATEGORY_COMMITMENT_NUDGE", 366 + "CATEGORY_DAILY_BRIEFING", 367 + "CATEGORY_PRE_MEETING_PREP", 368 + "build_agent_alert_collapse_id", 369 + "build_agent_alert_payload", 370 + "build_commitment_collapse_id", 371 + "build_commitment_payload", 372 + "build_daily_briefing_collapse_id", 373 + "build_daily_briefing_payload", 374 + "build_pre_meeting_collapse_id", 375 + "build_pre_meeting_payload", 376 + "send", 377 + "send_many", 378 + ]
+144
think/push/runtime.py
··· 1 + # SPDX-License-Identifier: AGPL-3.0-only 2 + # Copyright (c) 2026 sol pbc 3 + 4 + """Background runtime for push tasks.""" 5 + 6 + from __future__ import annotations 7 + 8 + import asyncio 9 + import atexit 10 + import logging 11 + import threading 12 + from dataclasses import dataclass, field 13 + from datetime import datetime 14 + from typing import Any 15 + 16 + from think.callosum import CallosumConnection 17 + from think.push import triggers 18 + 19 + logger = logging.getLogger("solstone.push.runtime") 20 + 21 + 22 + @dataclass 23 + class RuntimeState: 24 + loop: asyncio.AbstractEventLoop | None = None 25 + thread: threading.Thread | None = None 26 + started_event: threading.Event = field(default_factory=threading.Event) 27 + apps: list[Any] = field(default_factory=list) 28 + callosum: CallosumConnection | None = None 29 + periodic_task: asyncio.Task[Any] | None = None 30 + 31 + 32 + _RUNTIME_LOCK = threading.Lock() 33 + _runtime: RuntimeState | None = None 34 + _atexit_registered = False 35 + 36 + 37 + def get_runtime_state() -> RuntimeState | None: 38 + return _runtime 39 + 40 + 41 + def _on_callosum_message(message: dict[str, Any]) -> None: 42 + try: 43 + triggers.handle_briefing_finish(message) 44 + except Exception: 45 + logger.exception("push callosum handler failed") 46 + 47 + 48 + async def _periodic_loop() -> None: 49 + while True: 50 + await asyncio.sleep(60) 51 + try: 52 + triggers.check_pre_meeting_prep(datetime.now()) 53 + except Exception: 54 + logger.exception("push periodic check failed") 55 + 56 + 57 + def _thread_main(runtime: RuntimeState) -> None: 58 + loop = asyncio.new_event_loop() 59 + asyncio.set_event_loop(loop) 60 + runtime.loop = loop 61 + runtime.callosum = CallosumConnection() 62 + runtime.callosum.start(callback=_on_callosum_message) 63 + runtime.periodic_task = loop.create_task(_periodic_loop()) 64 + runtime.started_event.set() 65 + try: 66 + loop.run_forever() 67 + finally: 68 + pending = [task for task in asyncio.all_tasks(loop) if not task.done()] 69 + for task in pending: 70 + task.cancel() 71 + if pending: 72 + loop.run_until_complete(asyncio.gather(*pending, return_exceptions=True)) 73 + loop.close() 74 + 75 + 76 + def start_push_runtime(app: Any) -> None: 77 + global _runtime, _atexit_registered 78 + 79 + with _RUNTIME_LOCK: 80 + if _runtime is None: 81 + runtime = RuntimeState() 82 + thread = threading.Thread( 83 + target=_thread_main, 84 + args=(runtime,), 85 + name="push-runtime", 86 + daemon=True, 87 + ) 88 + runtime.thread = thread 89 + _runtime = runtime 90 + thread.start() 91 + runtime = _runtime 92 + if app not in runtime.apps: 93 + runtime.apps.append(app) 94 + app.push_runtime_started = True 95 + if not _atexit_registered: 96 + atexit.register(stop_all_push_runtime) 97 + _atexit_registered = True 98 + started_event = runtime.started_event 99 + started_event.wait(timeout=1.0) 100 + 101 + 102 + def stop_push_runtime(app: Any) -> None: 103 + runtime = _runtime 104 + app.push_runtime_started = False 105 + if runtime is None: 106 + return 107 + with _RUNTIME_LOCK: 108 + if app in runtime.apps: 109 + runtime.apps.remove(app) 110 + remaining = list(runtime.apps) 111 + if not remaining: 112 + stop_all_push_runtime() 113 + 114 + 115 + def stop_all_push_runtime() -> None: 116 + global _runtime 117 + 118 + with _RUNTIME_LOCK: 119 + runtime = _runtime 120 + _runtime = None 121 + if runtime is None: 122 + return 123 + for app in list(runtime.apps): 124 + try: 125 + app.push_runtime_started = False 126 + except Exception: 127 + logger.exception("push runtime app cleanup failed") 128 + if runtime.callosum is not None: 129 + runtime.callosum.stop() 130 + if runtime.loop is not None: 131 + if runtime.periodic_task is not None: 132 + runtime.loop.call_soon_threadsafe(runtime.periodic_task.cancel) 133 + runtime.loop.call_soon_threadsafe(runtime.loop.stop) 134 + if runtime.thread is not None: 135 + runtime.thread.join(timeout=1.0) 136 + 137 + 138 + __all__ = [ 139 + "RuntimeState", 140 + "get_runtime_state", 141 + "start_push_runtime", 142 + "stop_all_push_runtime", 143 + "stop_push_runtime", 144 + ]
+240
think/push/triggers.py
··· 1 + # SPDX-License-Identifier: AGPL-3.0-only 2 + # Copyright (c) 2026 sol pbc 3 + 4 + """Push trigger handlers.""" 5 + 6 + from __future__ import annotations 7 + 8 + import json 9 + import logging 10 + import time 11 + from datetime import datetime 12 + from pathlib import Path 13 + from typing import Any 14 + 15 + from apps.home.routes import _load_briefing_md 16 + from think.activities import load_activity_records 17 + from think.facets import get_enabled_facets 18 + from think.push.config import get_bundle_id, get_environment, is_configured 19 + from think.push.devices import load_devices 20 + from think.push.dispatch import ( 21 + CATEGORY_AGENT_ALERT, 22 + CATEGORY_DAILY_BRIEFING, 23 + CATEGORY_PRE_MEETING_PREP, 24 + build_agent_alert_collapse_id, 25 + build_agent_alert_payload, 26 + build_daily_briefing_collapse_id, 27 + build_daily_briefing_payload, 28 + build_pre_meeting_collapse_id, 29 + build_pre_meeting_payload, 30 + send_many, 31 + ) 32 + from think.utils import get_journal 33 + 34 + logger = logging.getLogger("solstone.push.triggers") 35 + 36 + 37 + def _nudge_log_path() -> Path: 38 + return Path(get_journal()) / "push" / "nudge_log.jsonl" 39 + 40 + 41 + def _serialize_dedupe_key(dedupe_key: tuple[Any, ...]) -> str: 42 + return json.dumps(list(dedupe_key), separators=(",", ":"), ensure_ascii=False) 43 + 44 + 45 + def _has_nudged(dedupe_key: tuple[Any, ...]) -> bool: 46 + path = _nudge_log_path() 47 + if not path.exists(): 48 + return False 49 + encoded = _serialize_dedupe_key(dedupe_key) 50 + for line in path.read_text(encoding="utf-8").splitlines(): 51 + if not line.strip(): 52 + continue 53 + try: 54 + payload = json.loads(line) 55 + except json.JSONDecodeError: 56 + continue 57 + if isinstance(payload, dict) and payload.get("dedupe_key") == encoded: 58 + return True 59 + return False 60 + 61 + 62 + def _append_nudge_log(line: dict[str, Any]) -> None: 63 + path = _nudge_log_path() 64 + path.parent.mkdir(parents=True, exist_ok=True) 65 + with path.open("a", encoding="utf-8") as handle: 66 + handle.write(json.dumps(line, ensure_ascii=False) + "\n") 67 + 68 + 69 + def _eligible_devices() -> list[dict[str, Any]]: 70 + if not is_configured(): 71 + logger.debug("push skipped configured=false") 72 + return [] 73 + bundle_id = get_bundle_id() 74 + environment = get_environment() 75 + matched = [ 76 + device 77 + for device in load_devices() 78 + if device.get("bundle_id") == bundle_id 79 + and device.get("environment") == environment 80 + and device.get("platform") == "ios" 81 + ] 82 + if not matched: 83 + logger.debug("push skipped devices=0") 84 + return matched 85 + 86 + 87 + def _metadata_generated(metadata: dict[str, Any] | None) -> str | None: 88 + if not isinstance(metadata, dict): 89 + return None 90 + generated = metadata.get("generated") 91 + if isinstance(generated, str): 92 + return generated 93 + if hasattr(generated, "isoformat"): 94 + return generated.isoformat() 95 + return None 96 + 97 + 98 + def _record_send( 99 + *, 100 + dedupe_key: tuple[Any, ...], 101 + category: str, 102 + sent: int, 103 + failed: int, 104 + **payload: Any, 105 + ) -> None: 106 + _append_nudge_log( 107 + { 108 + "ts": int(time.time()), 109 + "category": category, 110 + "dedupe_key": _serialize_dedupe_key(dedupe_key), 111 + "sent": sent, 112 + "failed": failed, 113 + **payload, 114 + } 115 + ) 116 + 117 + 118 + def handle_briefing_finish(message: dict[str, Any]) -> None: 119 + if message.get("tract") != "cortex": 120 + return 121 + if message.get("event") != "finish": 122 + return 123 + if message.get("name") != "morning_briefing": 124 + return 125 + today = datetime.now().strftime("%Y%m%d") 126 + dedupe_key = (CATEGORY_DAILY_BRIEFING, today) 127 + if _has_nudged(dedupe_key): 128 + return 129 + sections: dict[str, str] = {} 130 + metadata: dict[str, Any] | None = None 131 + needs_attention: list[str] = [] 132 + for _ in range(10): 133 + sections, metadata, needs_attention = _load_briefing_md(today) 134 + if sections and metadata: 135 + break 136 + time.sleep(1) 137 + else: 138 + logger.warning("push briefing unavailable after finish day=%s", today) 139 + return 140 + eligible_devices = _eligible_devices() 141 + if not eligible_devices: 142 + return 143 + sent, failed = send_many( 144 + eligible_devices, 145 + build_daily_briefing_payload( 146 + day=today, 147 + generated=_metadata_generated(metadata), 148 + needs_attention_count=len(needs_attention), 149 + ), 150 + collapse_id=build_daily_briefing_collapse_id(today), 151 + ) 152 + if sent > 0: 153 + _record_send( 154 + dedupe_key=dedupe_key, 155 + category=CATEGORY_DAILY_BRIEFING, 156 + day=today, 157 + sent=sent, 158 + failed=failed, 159 + ) 160 + 161 + 162 + def _parse_start(now: datetime, start: str) -> datetime | None: 163 + for pattern in ("%H:%M", "%H:%M:%S"): 164 + try: 165 + parsed = datetime.strptime(start, pattern) 166 + except ValueError: 167 + continue 168 + return now.replace( 169 + hour=parsed.hour, 170 + minute=parsed.minute, 171 + second=parsed.second, 172 + microsecond=0, 173 + ) 174 + return None 175 + 176 + 177 + def check_pre_meeting_prep(now: datetime) -> None: 178 + today = now.strftime("%Y%m%d") 179 + eligible_devices = _eligible_devices() 180 + if not eligible_devices: 181 + return 182 + for facet in get_enabled_facets().keys(): 183 + for record in load_activity_records(facet, today): 184 + if record.get("source") != "anticipated": 185 + continue 186 + activity_id = str(record.get("id") or "").strip() 187 + start = str(record.get("start") or "").strip() 188 + if not activity_id or not start: 189 + continue 190 + event_start = _parse_start(now, start) 191 + if event_start is None: 192 + logger.debug("push skipped invalid meeting start id=%s", activity_id) 193 + continue 194 + seconds_until = (event_start - now).total_seconds() 195 + if seconds_until < 14 * 60 or seconds_until > 16 * 60: 196 + continue 197 + dedupe_key = (CATEGORY_PRE_MEETING_PREP, activity_id, today) 198 + if _has_nudged(dedupe_key): 199 + continue 200 + sent, failed = send_many( 201 + eligible_devices, 202 + build_pre_meeting_payload(activity=record, facet=facet, day=today), 203 + collapse_id=build_pre_meeting_collapse_id(activity_id), 204 + ) 205 + if sent > 0: 206 + _record_send( 207 + dedupe_key=dedupe_key, 208 + category=CATEGORY_PRE_MEETING_PREP, 209 + day=today, 210 + facet=facet, 211 + activity_id=activity_id, 212 + sent=sent, 213 + failed=failed, 214 + ) 215 + 216 + 217 + def send_agent_alert(*, title: str, body: str, context_id: str) -> tuple[int, int]: 218 + dedupe_key = (CATEGORY_AGENT_ALERT, context_id) 219 + if _has_nudged(dedupe_key): 220 + return 0, 0 221 + eligible_devices = _eligible_devices() 222 + if not eligible_devices: 223 + return 0, 0 224 + sent, failed = send_many( 225 + eligible_devices, 226 + build_agent_alert_payload(title=title, body=body, context_id=context_id), 227 + collapse_id=build_agent_alert_collapse_id(context_id), 228 + ) 229 + if sent > 0: 230 + _record_send( 231 + dedupe_key=dedupe_key, 232 + category=CATEGORY_AGENT_ALERT, 233 + context_id=context_id, 234 + sent=sent, 235 + failed=failed, 236 + ) 237 + return sent, failed 238 + 239 + 240 + __all__ = ["check_pre_meeting_prep", "handle_briefing_finish", "send_agent_alert"]
+35
uv.lock
··· 1010 1010 ] 1011 1011 1012 1012 [[package]] 1013 + name = "h2" 1014 + version = "4.3.0" 1015 + source = { registry = "https://pypi.org/simple" } 1016 + dependencies = [ 1017 + { name = "hpack" }, 1018 + { name = "hyperframe" }, 1019 + ] 1020 + sdist = { url = "https://files.pythonhosted.org/packages/1d/17/afa56379f94ad0fe8defd37d6eb3f89a25404ffc71d4d848893d270325fc/h2-4.3.0.tar.gz", hash = "sha256:6c59efe4323fa18b47a632221a1888bd7fde6249819beda254aeca909f221bf1", size = 2152026, upload-time = "2025-08-23T18:12:19.778Z" } 1021 + wheels = [ 1022 + { url = "https://files.pythonhosted.org/packages/69/b2/119f6e6dcbd96f9069ce9a2665e0146588dc9f88f29549711853645e736a/h2-4.3.0-py3-none-any.whl", hash = "sha256:c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd", size = 61779, upload-time = "2025-08-23T18:12:17.779Z" }, 1023 + ] 1024 + 1025 + [[package]] 1013 1026 name = "hf-xet" 1014 1027 version = "1.2.0" 1015 1028 source = { registry = "https://pypi.org/simple" } ··· 1036 1049 { url = "https://files.pythonhosted.org/packages/0b/dd/7ac658d54b9fb7999a0ccb07ad863b413cbaf5cf172f48ebcd9497ec7263/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:4c1428c9ae73ec0939410ec73023c4f842927f39db09b063b9482dac5a3bb737", size = 3413812, upload-time = "2025-10-24T19:04:24.585Z" }, 1037 1050 { url = "https://files.pythonhosted.org/packages/92/68/89ac4e5b12a9ff6286a12174c8538a5930e2ed662091dd2572bbe0a18c8a/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a55558084c16b09b5ed32ab9ed38421e2d87cf3f1f89815764d1177081b99865", size = 3508920, upload-time = "2025-10-24T19:04:26.927Z" }, 1038 1051 { url = "https://files.pythonhosted.org/packages/cb/44/870d44b30e1dcfb6a65932e3e1506c103a8a5aea9103c337e7a53180322c/hf_xet-1.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:e6584a52253f72c9f52f9e549d5895ca7a471608495c4ecaa6cc73dba2b24d69", size = 2905735, upload-time = "2025-10-24T19:04:35.928Z" }, 1052 + ] 1053 + 1054 + [[package]] 1055 + name = "hpack" 1056 + version = "4.1.0" 1057 + source = { registry = "https://pypi.org/simple" } 1058 + sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276, upload-time = "2025-01-22T21:44:58.347Z" } 1059 + wheels = [ 1060 + { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357, upload-time = "2025-01-22T21:44:56.92Z" }, 1039 1061 ] 1040 1062 1041 1063 [[package]] ··· 1094 1116 sdist = { url = "https://files.pythonhosted.org/packages/c4/fc/eb9bc06130e8bbda6a616e1b80a7aa127681c448d6b49806f61db2670b61/huggingface_hub-1.4.1.tar.gz", hash = "sha256:b41131ec35e631e7383ab26d6146b8d8972abc8b6309b963b306fbcca87f5ed5", size = 642156, upload-time = "2026-02-06T09:20:03.013Z" } 1095 1117 wheels = [ 1096 1118 { url = "https://files.pythonhosted.org/packages/d5/ae/2f6d96b4e6c5478d87d606a1934b5d436c4a2bce6bb7c6fdece891c128e3/huggingface_hub-1.4.1-py3-none-any.whl", hash = "sha256:9931d075fb7a79af5abc487106414ec5fba2c0ae86104c0c62fd6cae38873d18", size = 553326, upload-time = "2026-02-06T09:20:00.728Z" }, 1119 + ] 1120 + 1121 + [[package]] 1122 + name = "hyperframe" 1123 + version = "6.1.0" 1124 + source = { registry = "https://pypi.org/simple" } 1125 + sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566, upload-time = "2025-01-22T21:41:49.302Z" } 1126 + wheels = [ 1127 + { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007, upload-time = "2025-01-22T21:41:47.295Z" }, 1097 1128 ] 1098 1129 1099 1130 [[package]] ··· 3538 3569 { name = "freezegun" }, 3539 3570 { name = "genai-prices" }, 3540 3571 { name = "google-genai" }, 3572 + { name = "h2" }, 3541 3573 { name = "httpx" }, 3542 3574 { name = "icalendar" }, 3543 3575 { name = "jsonschema" }, ··· 3553 3585 { name = "pillow" }, 3554 3586 { name = "playwright" }, 3555 3587 { name = "psutil" }, 3588 + { name = "pyjwt" }, 3556 3589 { name = "pyopenssl" }, 3557 3590 { name = "pypdf" }, 3558 3591 { name = "pytesseract" }, ··· 3588 3621 { name = "freezegun" }, 3589 3622 { name = "genai-prices" }, 3590 3623 { name = "google-genai" }, 3624 + { name = "h2" }, 3591 3625 { name = "httpx" }, 3592 3626 { name = "icalendar" }, 3593 3627 { name = "jsonschema", specifier = ">=4.26,<5" }, ··· 3602 3636 { name = "pillow" }, 3603 3637 { name = "playwright", specifier = ">=1.40.0" }, 3604 3638 { name = "psutil" }, 3639 + { name = "pyjwt", specifier = ">=2.8" }, 3605 3640 { name = "pyopenssl", specifier = ">=24.0" }, 3606 3641 { name = "pypdf" }, 3607 3642 { name = "pytesseract" },