a digital entity named phi that roams bsky phi.zzstoatzz.io
2
fork

Configure Feed

Select the types of activity you want to include in your feed.

feat: add post search tool, fix observation extraction, add memory inspector

- add search_posts tool: authenticated bluesky post search via bot_client
- fix extraction prompt: distinguish user-expressed interests from
bot-retrieved content (trending topics, search results)
- add scripts/memory_inspect.py: list namespaces, dump user memories,
delete rows, purge observations
- update bio and personality with search capability

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

zzstoatzz 9dc91117 63337894

+201 -7
+1
personalities/phi.md
··· 57 57 - use pdsx tools for atproto record operations (create, list, get, update, delete any record type) 58 58 - search memory for more context about a user when needed 59 59 - search ATProto publications (leaflet, whitewind, offprint, etc.) via pub-search tools (prefixed with `pub_`) 60 + - search bluesky posts by keyword via `search_posts` 60 61 - check what's trending on bluesky via `get_trending` (entity-level trends from coral + official trending topics) 61 62 62 63 ## how responses work
+166
scripts/memory_inspect.py
··· 1 + """Inspect and prune stored memories. 2 + 3 + Usage: 4 + uv run scripts/memory_inspect.py # list all user namespaces 5 + uv run scripts/memory_inspect.py USER_HANDLE # dump observations + interactions for a user 6 + uv run scripts/memory_inspect.py USER_HANDLE --delete ID # delete a specific row by ID 7 + uv run scripts/memory_inspect.py USER_HANDLE --purge-observations # delete ALL observations for a user 8 + """ 9 + 10 + import argparse 11 + import sys 12 + 13 + from turbopuffer import Turbopuffer 14 + 15 + from bot.config import settings 16 + 17 + 18 + def get_client() -> Turbopuffer: 19 + return Turbopuffer(api_key=settings.turbopuffer_api_key, region=settings.turbopuffer_region) 20 + 21 + 22 + def list_namespaces(client: Turbopuffer): 23 + """List all namespaces that look like user memory.""" 24 + prefix = "phi-users-" 25 + namespaces = client.namespaces() 26 + user_ns = [ns for ns in namespaces if ns.id.startswith(prefix)] 27 + if not user_ns: 28 + print("no user namespaces found") 29 + return 30 + print(f"found {len(user_ns)} user namespaces:\n") 31 + for ns in sorted(user_ns, key=lambda n: n.id): 32 + handle = ns.id.removeprefix(prefix).replace("_", ".") 33 + print(f" {handle:<40} ({ns.id})") 34 + 35 + 36 + def dump_user(client: Turbopuffer, handle: str): 37 + """Dump all memory for a user.""" 38 + clean = handle.replace(".", "_").replace("@", "").replace("-", "_") 39 + ns_name = f"phi-users-{clean}" 40 + ns = client.namespace(ns_name) 41 + 42 + try: 43 + response = ns.query( 44 + rank_by=("vector", "ANN", [0.5] * 1536), 45 + top_k=200, 46 + include_attributes=["kind", "content", "tags", "created_at"], 47 + ) 48 + except Exception as e: 49 + if "was not found" in str(e): 50 + print(f"no namespace found for @{handle} ({ns_name})") 51 + return 52 + if "attribute" in str(e) and "not found" in str(e): 53 + # old namespace without kind/tags columns 54 + response = ns.query( 55 + rank_by=("vector", "ANN", [0.5] * 1536), 56 + top_k=200, 57 + include_attributes=True, 58 + ) 59 + else: 60 + raise 61 + 62 + if not response.rows: 63 + print(f"no rows found for @{handle}") 64 + return 65 + 66 + observations = [] 67 + interactions = [] 68 + for row in response.rows: 69 + kind = getattr(row, "kind", "unknown") 70 + entry = { 71 + "id": row.id, 72 + "content": row.content, 73 + "tags": getattr(row, "tags", []), 74 + "created_at": getattr(row, "created_at", ""), 75 + } 76 + if kind == "observation": 77 + observations.append(entry) 78 + else: 79 + interactions.append(entry) 80 + 81 + if observations: 82 + print(f"=== observations ({len(observations)}) ===\n") 83 + for obs in observations: 84 + tags = f" [{', '.join(obs['tags'])}]" if obs["tags"] else "" 85 + print(f" [{obs['id']}] {obs['content']}{tags}") 86 + if obs["created_at"]: 87 + print(f" created: {obs['created_at']}") 88 + print() 89 + 90 + if interactions: 91 + print(f"=== interactions ({len(interactions)}) ===\n") 92 + for ix in interactions: 93 + content = ix["content"].replace("\n", "\n ") 94 + print(f" [{ix['id']}]") 95 + print(f" {content}") 96 + if ix["created_at"]: 97 + print(f" created: {ix['created_at']}") 98 + print() 99 + 100 + print(f"total: {len(observations)} observations, {len(interactions)} interactions") 101 + 102 + 103 + def delete_row(client: Turbopuffer, handle: str, row_id: str): 104 + """Delete a specific row by ID.""" 105 + clean = handle.replace(".", "_").replace("@", "").replace("-", "_") 106 + ns_name = f"phi-users-{clean}" 107 + ns = client.namespace(ns_name) 108 + ns.write(deletes=[row_id]) 109 + print(f"deleted row {row_id} from {ns_name}") 110 + 111 + 112 + def purge_observations(client: Turbopuffer, handle: str): 113 + """Delete all observations for a user.""" 114 + clean = handle.replace(".", "_").replace("@", "").replace("-", "_") 115 + ns_name = f"phi-users-{clean}" 116 + ns = client.namespace(ns_name) 117 + 118 + try: 119 + response = ns.query( 120 + rank_by=("vector", "ANN", [0.5] * 1536), 121 + top_k=200, 122 + filters={"kind": ["Eq", "observation"]}, 123 + include_attributes=["content"], 124 + ) 125 + except Exception as e: 126 + if "was not found" in str(e): 127 + print(f"no namespace found for @{handle}") 128 + return 129 + raise 130 + 131 + if not response.rows: 132 + print(f"no observations to purge for @{handle}") 133 + return 134 + 135 + ids = [row.id for row in response.rows] 136 + print(f"purging {len(ids)} observations for @{handle}:") 137 + for row in response.rows: 138 + print(f" - {row.content}") 139 + 140 + ns.write(deletes=ids) 141 + print(f"\ndeleted {len(ids)} observations") 142 + 143 + 144 + def main(): 145 + parser = argparse.ArgumentParser(description="Inspect and prune phi memories") 146 + parser.add_argument("handle", nargs="?", help="User handle to inspect") 147 + parser.add_argument("--delete", metavar="ID", help="Delete a specific row by ID") 148 + parser.add_argument("--purge-observations", action="store_true", help="Delete all observations for a user") 149 + args = parser.parse_args() 150 + 151 + client = get_client() 152 + 153 + if not args.handle: 154 + list_namespaces(client) 155 + return 156 + 157 + if args.purge_observations: 158 + purge_observations(client, args.handle) 159 + elif args.delete: 160 + delete_row(client, args.handle, args.delete) 161 + else: 162 + dump_user(client, args.handle) 163 + 164 + 165 + if __name__ == "__main__": 166 + main()
+22
src/bot/agent.py
··· 95 95 return "\n".join(parts) 96 96 97 97 @self.agent.tool 98 + async def search_posts(ctx: RunContext[dict], query: str, limit: int = 10) -> str: 99 + """Search Bluesky posts by keyword. Use this to find what people are saying about a topic.""" 100 + from bot.core.atproto_client import bot_client 101 + 102 + try: 103 + response = bot_client.client.app.bsky.feed.search_posts( 104 + params={"q": query, "limit": min(limit, 25), "sort": "top"} 105 + ) 106 + if not response.posts: 107 + return f"no posts found for '{query}'" 108 + 109 + lines = [] 110 + for post in response.posts: 111 + text = post.record.text if hasattr(post.record, "text") else "" 112 + handle = post.author.handle 113 + likes = post.like_count or 0 114 + lines.append(f"@{handle} ({likes} likes): {text[:200]}") 115 + return "\n\n".join(lines) 116 + except Exception as e: 117 + return f"search failed: {e}" 118 + 119 + @self.agent.tool 98 120 async def get_trending(ctx: RunContext[dict]) -> str: 99 121 """Get what's currently trending on Bluesky. Returns entity-level trends from the firehose (via coral) and official Bluesky trending topics. Use this when someone asks about current events, what people are talking about, or when you want timely context.""" 100 122 parts: list[str] = []
+1 -1
src/bot/core/profile_manager.py
··· 6 6 7 7 logger = logging.getLogger("bot.profile_manager") 8 8 9 - _ONLINE_SUFFIX = "\n\n🟢 memory, thread context, atproto records, publication search, trending" 9 + _ONLINE_SUFFIX = "\n\n🟢 memory, thread context, atproto records, publication search, post search, trending" 10 10 _OFFLINE_SUFFIX = " • 🔴 offline" 11 11 _ALL_SUFFIXES = [_ONLINE_SUFFIX, _OFFLINE_SUFFIX] 12 12
+11 -6
src/bot/memory/namespace_memory.py
··· 29 29 30 30 31 31 EXTRACTION_SYSTEM_PROMPT = """\ 32 - extract factual observations from this conversation exchange. 33 - focus on: interests, preferences, facts about the user, topics discussed, opinions expressed. 34 - skip: greetings, filler, things that are only meaningful in the moment. 35 - each observation should be a standalone fact that would be useful context in a future conversation. 36 - use short, lowercase tags to categorize each observation. 37 - if there's nothing worth extracting, return an empty list. 32 + extract factual observations about the USER from this conversation exchange. 33 + focus on things the user explicitly stated or clearly demonstrated: 34 + - interests they expressed (not topics the bot brought up) 35 + - preferences, opinions, facts about themselves 36 + - what they asked about and WHY (e.g. "curious about current events" not the specific events listed) 37 + skip: 38 + - greetings, filler, things only meaningful in the moment 39 + - content the bot retrieved or generated (trending topics, search results, etc.) — those are NOT the user's interests 40 + - circumstantial details from bot tool output 41 + each observation should be a standalone fact useful in a future conversation. 42 + use short, lowercase tags. return an empty list if nothing is worth extracting. 38 43 deduplicate against the existing observations provided.""" 39 44 40 45 _extraction_agent: Agent[None, ExtractionResult] | None = None