claude code plugin that sends telemetry to otlp destinations (traces)
0
fork

Configure Feed

Select the types of activity you want to include in your feed.

feat: add missing gen_ai semconv attributes

LLM spans now include:
- gen_ai.response.id (message ID from API response)
- gen_ai.conversation.id (session ID, replaces custom session.id)

Tool spans now include:
- gen_ai.tool.call.id (tool_use ID)
- gen_ai.tool.type ("extension" for Claude Code tools)
- gen_ai.operation.name ("execute_tool")
- gen_ai.system ("anthropic")

Subagent spans now use:
- gen_ai.agent.id, gen_ai.agent.name (replaces custom attributes)

All spans use gen_ai.conversation.id instead of session.id for
consistency with the semconv spec.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>

+119 -32
+28 -8
bundle/session-end.cjs
··· 22321 22321 } 22322 22322 22323 22323 // node_modules/.pnpm/@opentelemetry+semantic-conventions@1.40.0/node_modules/@opentelemetry/semantic-conventions/build/esm/experimental_attributes.js 22324 + var ATTR_GEN_AI_AGENT_ID = "gen_ai.agent.id"; 22325 + var ATTR_GEN_AI_CONVERSATION_ID = "gen_ai.conversation.id"; 22324 22326 var ATTR_GEN_AI_OPERATION_NAME = "gen_ai.operation.name"; 22325 22327 var GEN_AI_OPERATION_NAME_VALUE_CHAT = "chat"; 22328 + var GEN_AI_OPERATION_NAME_VALUE_EXECUTE_TOOL = "execute_tool"; 22326 22329 var ATTR_GEN_AI_REQUEST_MODEL = "gen_ai.request.model"; 22327 22330 var ATTR_GEN_AI_RESPONSE_FINISH_REASONS = "gen_ai.response.finish_reasons"; 22331 + var ATTR_GEN_AI_RESPONSE_ID = "gen_ai.response.id"; 22328 22332 var ATTR_GEN_AI_RESPONSE_MODEL = "gen_ai.response.model"; 22329 22333 var ATTR_GEN_AI_SYSTEM = "gen_ai.system"; 22330 22334 var GEN_AI_SYSTEM_VALUE_ANTHROPIC = "anthropic"; 22335 + var ATTR_GEN_AI_TOOL_CALL_ID = "gen_ai.tool.call.id"; 22331 22336 var ATTR_GEN_AI_TOOL_NAME = "gen_ai.tool.name"; 22337 + var ATTR_GEN_AI_TOOL_TYPE = "gen_ai.tool.type"; 22332 22338 var ATTR_GEN_AI_USAGE_CACHE_CREATION_INPUT_TOKENS = "gen_ai.usage.cache_creation.input_tokens"; 22333 22339 var ATTR_GEN_AI_USAGE_CACHE_READ_INPUT_TOKENS = "gen_ai.usage.cache_read.input_tokens"; 22334 22340 var ATTR_GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens"; ··· 22440 22446 usage: last.message.usage, 22441 22447 // SSE usage is cumulative; last chunk has final totals. 22442 22448 startTime: first.timestamp, 22443 - endTime: last.timestamp 22449 + endTime: last.timestamp, 22450 + messageId: first.message.id 22444 22451 }; 22445 22452 } 22446 22453 function mergeAdjacentTextBlocks(blocks) { ··· 22514 22521 usage: merged.usage, 22515 22522 startTime: merged.startTime, 22516 22523 endTime: merged.endTime, 22517 - toolCalls 22524 + toolCalls, 22525 + messageId: merged.messageId 22518 22526 }); 22519 22527 } 22520 22528 turns.push({ ··· 22588 22596 kind: SpanKind.SERVER, 22589 22597 startTime: new Date(turnStartMs), 22590 22598 attributes: { 22591 - "claude_code.turn_number": turnNum, 22592 - "session.id": sessionId 22599 + [ATTR_GEN_AI_SYSTEM]: GEN_AI_SYSTEM_VALUE_ANTHROPIC, 22600 + [ATTR_GEN_AI_CONVERSATION_ID]: sessionId, 22601 + "claude_code.turn_number": turnNum 22593 22602 } 22594 22603 }, turnParentCtx); 22595 22604 const turnCtx = trace.setSpan(ROOT_CONTEXT, turnSpan); ··· 22614 22623 kind: SpanKind.INTERNAL, 22615 22624 startTime: new Date(toolStartMs), 22616 22625 attributes: { 22626 + [ATTR_GEN_AI_OPERATION_NAME]: GEN_AI_OPERATION_NAME_VALUE_EXECUTE_TOOL, 22627 + [ATTR_GEN_AI_SYSTEM]: GEN_AI_SYSTEM_VALUE_ANTHROPIC, 22617 22628 [ATTR_GEN_AI_TOOL_NAME]: toolCall.tool_use.name, 22618 - "session.id": sessionId 22629 + [ATTR_GEN_AI_TOOL_CALL_ID]: toolCall.tool_use.id, 22630 + [ATTR_GEN_AI_TOOL_TYPE]: "extension", 22631 + [ATTR_GEN_AI_CONVERSATION_ID]: sessionId, 22632 + ...toolCall.agentId ? { 22633 + [ATTR_GEN_AI_AGENT_ID]: toolCall.agentId 22634 + } : {} 22619 22635 } 22620 22636 }, turnCtx); 22621 22637 if (toolCall.agentId) { ··· 22632 22648 [ATTR_GEN_AI_REQUEST_MODEL]: llmCall.model, 22633 22649 [ATTR_GEN_AI_RESPONSE_MODEL]: llmCall.model, 22634 22650 [ATTR_GEN_AI_RESPONSE_FINISH_REASONS]: [finishReason], 22635 - "session.id": sessionId, 22651 + [ATTR_GEN_AI_CONVERSATION_ID]: sessionId, 22636 22652 ...usageAttributes(llmCall.usage) 22637 22653 }; 22654 + if (llmCall.messageId) { 22655 + llmAttrs[ATTR_GEN_AI_RESPONSE_ID] = llmCall.messageId; 22656 + } 22638 22657 if (llmCall.synthetic) { 22639 22658 llmAttrs["claude_code.synthetic"] = true; 22640 22659 } ··· 22706 22725 kind: SpanKind.SERVER, 22707 22726 startTime: new Date(startMs), 22708 22727 attributes: { 22709 - "claude_code.turn_number": turnNum, 22710 - "session.id": sessionId 22728 + [ATTR_GEN_AI_SYSTEM]: GEN_AI_SYSTEM_VALUE_ANTHROPIC, 22729 + [ATTR_GEN_AI_CONVERSATION_ID]: sessionId, 22730 + "claude_code.turn_number": turnNum 22711 22731 } 22712 22732 }, parentCtx); 22713 22733 turnSpan.setStatus({ code: SpanStatusCode.ERROR, message: "User interrupt" });
+30 -9
bundle/stop.cjs
··· 22303 22303 usage: last.message.usage, 22304 22304 // SSE usage is cumulative; last chunk has final totals. 22305 22305 startTime: first.timestamp, 22306 - endTime: last.timestamp 22306 + endTime: last.timestamp, 22307 + messageId: first.message.id 22307 22308 }; 22308 22309 } 22309 22310 function mergeAdjacentTextBlocks(blocks) { ··· 22377 22378 usage: merged.usage, 22378 22379 startTime: merged.startTime, 22379 22380 endTime: merged.endTime, 22380 - toolCalls 22381 + toolCalls, 22382 + messageId: merged.messageId 22381 22383 }); 22382 22384 } 22383 22385 turns.push({ ··· 22632 22634 } 22633 22635 22634 22636 // node_modules/.pnpm/@opentelemetry+semantic-conventions@1.40.0/node_modules/@opentelemetry/semantic-conventions/build/esm/experimental_attributes.js 22637 + var ATTR_GEN_AI_AGENT_ID = "gen_ai.agent.id"; 22638 + var ATTR_GEN_AI_AGENT_NAME = "gen_ai.agent.name"; 22639 + var ATTR_GEN_AI_CONVERSATION_ID = "gen_ai.conversation.id"; 22635 22640 var ATTR_GEN_AI_OPERATION_NAME = "gen_ai.operation.name"; 22636 22641 var GEN_AI_OPERATION_NAME_VALUE_CHAT = "chat"; 22642 + var GEN_AI_OPERATION_NAME_VALUE_EXECUTE_TOOL = "execute_tool"; 22637 22643 var ATTR_GEN_AI_REQUEST_MODEL = "gen_ai.request.model"; 22638 22644 var ATTR_GEN_AI_RESPONSE_FINISH_REASONS = "gen_ai.response.finish_reasons"; 22645 + var ATTR_GEN_AI_RESPONSE_ID = "gen_ai.response.id"; 22639 22646 var ATTR_GEN_AI_RESPONSE_MODEL = "gen_ai.response.model"; 22640 22647 var ATTR_GEN_AI_SYSTEM = "gen_ai.system"; 22641 22648 var GEN_AI_SYSTEM_VALUE_ANTHROPIC = "anthropic"; 22649 + var ATTR_GEN_AI_TOOL_CALL_ID = "gen_ai.tool.call.id"; 22642 22650 var ATTR_GEN_AI_TOOL_NAME = "gen_ai.tool.name"; 22651 + var ATTR_GEN_AI_TOOL_TYPE = "gen_ai.tool.type"; 22643 22652 var ATTR_GEN_AI_USAGE_CACHE_CREATION_INPUT_TOKENS = "gen_ai.usage.cache_creation.input_tokens"; 22644 22653 var ATTR_GEN_AI_USAGE_CACHE_READ_INPUT_TOKENS = "gen_ai.usage.cache_read.input_tokens"; 22645 22654 var ATTR_GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens"; ··· 22679 22688 kind: SpanKind.SERVER, 22680 22689 startTime: new Date(turnStartMs), 22681 22690 attributes: { 22682 - "claude_code.turn_number": turnNum, 22683 - "session.id": sessionId 22691 + [ATTR_GEN_AI_SYSTEM]: GEN_AI_SYSTEM_VALUE_ANTHROPIC, 22692 + [ATTR_GEN_AI_CONVERSATION_ID]: sessionId, 22693 + "claude_code.turn_number": turnNum 22684 22694 } 22685 22695 }, turnParentCtx); 22686 22696 const turnCtx = trace.setSpan(ROOT_CONTEXT, turnSpan); ··· 22705 22715 kind: SpanKind.INTERNAL, 22706 22716 startTime: new Date(toolStartMs), 22707 22717 attributes: { 22718 + [ATTR_GEN_AI_OPERATION_NAME]: GEN_AI_OPERATION_NAME_VALUE_EXECUTE_TOOL, 22719 + [ATTR_GEN_AI_SYSTEM]: GEN_AI_SYSTEM_VALUE_ANTHROPIC, 22708 22720 [ATTR_GEN_AI_TOOL_NAME]: toolCall.tool_use.name, 22709 - "session.id": sessionId 22721 + [ATTR_GEN_AI_TOOL_CALL_ID]: toolCall.tool_use.id, 22722 + [ATTR_GEN_AI_TOOL_TYPE]: "extension", 22723 + [ATTR_GEN_AI_CONVERSATION_ID]: sessionId, 22724 + ...toolCall.agentId ? { 22725 + [ATTR_GEN_AI_AGENT_ID]: toolCall.agentId 22726 + } : {} 22710 22727 } 22711 22728 }, turnCtx); 22712 22729 if (toolCall.agentId) { ··· 22723 22740 [ATTR_GEN_AI_REQUEST_MODEL]: llmCall.model, 22724 22741 [ATTR_GEN_AI_RESPONSE_MODEL]: llmCall.model, 22725 22742 [ATTR_GEN_AI_RESPONSE_FINISH_REASONS]: [finishReason], 22726 - "session.id": sessionId, 22743 + [ATTR_GEN_AI_CONVERSATION_ID]: sessionId, 22727 22744 ...usageAttributes(llmCall.usage) 22728 22745 }; 22746 + if (llmCall.messageId) { 22747 + llmAttrs[ATTR_GEN_AI_RESPONSE_ID] = llmCall.messageId; 22748 + } 22729 22749 if (llmCall.synthetic) { 22730 22750 llmAttrs["claude_code.synthetic"] = true; 22731 22751 } ··· 22770 22790 kind: SpanKind.INTERNAL, 22771 22791 startTime: new Date(subStartMs), 22772 22792 attributes: { 22773 - "claude_code.agent_type": toolName, 22774 - "claude_code.agent_id": sub.agent_id, 22775 - "session.id": sessionId 22793 + [ATTR_GEN_AI_SYSTEM]: GEN_AI_SYSTEM_VALUE_ANTHROPIC, 22794 + [ATTR_GEN_AI_AGENT_NAME]: toolName, 22795 + [ATTR_GEN_AI_AGENT_ID]: sub.agent_id, 22796 + [ATTR_GEN_AI_CONVERSATION_ID]: sessionId 22776 22797 } 22777 22798 }, parentCtx); 22778 22799 const chainSpanId = chainSpan.spanContext().spanId;
+28 -8
bundle/user-prompt-submit.cjs
··· 22333 22333 } 22334 22334 22335 22335 // node_modules/.pnpm/@opentelemetry+semantic-conventions@1.40.0/node_modules/@opentelemetry/semantic-conventions/build/esm/experimental_attributes.js 22336 + var ATTR_GEN_AI_AGENT_ID = "gen_ai.agent.id"; 22337 + var ATTR_GEN_AI_CONVERSATION_ID = "gen_ai.conversation.id"; 22336 22338 var ATTR_GEN_AI_OPERATION_NAME = "gen_ai.operation.name"; 22337 22339 var GEN_AI_OPERATION_NAME_VALUE_CHAT = "chat"; 22340 + var GEN_AI_OPERATION_NAME_VALUE_EXECUTE_TOOL = "execute_tool"; 22338 22341 var ATTR_GEN_AI_REQUEST_MODEL = "gen_ai.request.model"; 22339 22342 var ATTR_GEN_AI_RESPONSE_FINISH_REASONS = "gen_ai.response.finish_reasons"; 22343 + var ATTR_GEN_AI_RESPONSE_ID = "gen_ai.response.id"; 22340 22344 var ATTR_GEN_AI_RESPONSE_MODEL = "gen_ai.response.model"; 22341 22345 var ATTR_GEN_AI_SYSTEM = "gen_ai.system"; 22342 22346 var GEN_AI_SYSTEM_VALUE_ANTHROPIC = "anthropic"; 22347 + var ATTR_GEN_AI_TOOL_CALL_ID = "gen_ai.tool.call.id"; 22343 22348 var ATTR_GEN_AI_TOOL_NAME = "gen_ai.tool.name"; 22349 + var ATTR_GEN_AI_TOOL_TYPE = "gen_ai.tool.type"; 22344 22350 var ATTR_GEN_AI_USAGE_CACHE_CREATION_INPUT_TOKENS = "gen_ai.usage.cache_creation.input_tokens"; 22345 22351 var ATTR_GEN_AI_USAGE_CACHE_READ_INPUT_TOKENS = "gen_ai.usage.cache_read.input_tokens"; 22346 22352 var ATTR_GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens"; ··· 22491 22497 usage: last.message.usage, 22492 22498 // SSE usage is cumulative; last chunk has final totals. 22493 22499 startTime: first.timestamp, 22494 - endTime: last.timestamp 22500 + endTime: last.timestamp, 22501 + messageId: first.message.id 22495 22502 }; 22496 22503 } 22497 22504 function mergeAdjacentTextBlocks(blocks) { ··· 22565 22572 usage: merged.usage, 22566 22573 startTime: merged.startTime, 22567 22574 endTime: merged.endTime, 22568 - toolCalls 22575 + toolCalls, 22576 + messageId: merged.messageId 22569 22577 }); 22570 22578 } 22571 22579 turns.push({ ··· 22639 22647 kind: SpanKind.SERVER, 22640 22648 startTime: new Date(turnStartMs), 22641 22649 attributes: { 22642 - "claude_code.turn_number": turnNum, 22643 - "session.id": sessionId 22650 + [ATTR_GEN_AI_SYSTEM]: GEN_AI_SYSTEM_VALUE_ANTHROPIC, 22651 + [ATTR_GEN_AI_CONVERSATION_ID]: sessionId, 22652 + "claude_code.turn_number": turnNum 22644 22653 } 22645 22654 }, turnParentCtx); 22646 22655 const turnCtx = trace.setSpan(ROOT_CONTEXT, turnSpan); ··· 22665 22674 kind: SpanKind.INTERNAL, 22666 22675 startTime: new Date(toolStartMs), 22667 22676 attributes: { 22677 + [ATTR_GEN_AI_OPERATION_NAME]: GEN_AI_OPERATION_NAME_VALUE_EXECUTE_TOOL, 22678 + [ATTR_GEN_AI_SYSTEM]: GEN_AI_SYSTEM_VALUE_ANTHROPIC, 22668 22679 [ATTR_GEN_AI_TOOL_NAME]: toolCall.tool_use.name, 22669 - "session.id": sessionId 22680 + [ATTR_GEN_AI_TOOL_CALL_ID]: toolCall.tool_use.id, 22681 + [ATTR_GEN_AI_TOOL_TYPE]: "extension", 22682 + [ATTR_GEN_AI_CONVERSATION_ID]: sessionId, 22683 + ...toolCall.agentId ? { 22684 + [ATTR_GEN_AI_AGENT_ID]: toolCall.agentId 22685 + } : {} 22670 22686 } 22671 22687 }, turnCtx); 22672 22688 if (toolCall.agentId) { ··· 22683 22699 [ATTR_GEN_AI_REQUEST_MODEL]: llmCall.model, 22684 22700 [ATTR_GEN_AI_RESPONSE_MODEL]: llmCall.model, 22685 22701 [ATTR_GEN_AI_RESPONSE_FINISH_REASONS]: [finishReason], 22686 - "session.id": sessionId, 22702 + [ATTR_GEN_AI_CONVERSATION_ID]: sessionId, 22687 22703 ...usageAttributes(llmCall.usage) 22688 22704 }; 22705 + if (llmCall.messageId) { 22706 + llmAttrs[ATTR_GEN_AI_RESPONSE_ID] = llmCall.messageId; 22707 + } 22689 22708 if (llmCall.synthetic) { 22690 22709 llmAttrs["claude_code.synthetic"] = true; 22691 22710 } ··· 22757 22776 kind: SpanKind.SERVER, 22758 22777 startTime: new Date(startMs), 22759 22778 attributes: { 22760 - "claude_code.turn_number": turnNum, 22761 - "session.id": sessionId 22779 + [ATTR_GEN_AI_SYSTEM]: GEN_AI_SYSTEM_VALUE_ANTHROPIC, 22780 + [ATTR_GEN_AI_CONVERSATION_ID]: sessionId, 22781 + "claude_code.turn_number": turnNum 22762 22782 } 22763 22783 }, parentCtx); 22764 22784 turnSpan.setStatus({ code: SpanStatusCode.ERROR, message: "User interrupt" });
+28 -7
src/tracing.ts
··· 30 30 ATTR_GEN_AI_REQUEST_MODEL, 31 31 ATTR_GEN_AI_RESPONSE_MODEL, 32 32 ATTR_GEN_AI_RESPONSE_FINISH_REASONS, 33 + ATTR_GEN_AI_RESPONSE_ID, 34 + ATTR_GEN_AI_CONVERSATION_ID, 33 35 ATTR_GEN_AI_USAGE_INPUT_TOKENS, 34 36 ATTR_GEN_AI_USAGE_OUTPUT_TOKENS, 35 37 ATTR_GEN_AI_USAGE_CACHE_READ_INPUT_TOKENS, 36 38 ATTR_GEN_AI_USAGE_CACHE_CREATION_INPUT_TOKENS, 37 39 ATTR_GEN_AI_TOOL_NAME, 40 + ATTR_GEN_AI_TOOL_CALL_ID, 41 + ATTR_GEN_AI_TOOL_TYPE, 42 + ATTR_GEN_AI_AGENT_ID, 43 + ATTR_GEN_AI_AGENT_NAME, 38 44 GEN_AI_SYSTEM_VALUE_ANTHROPIC, 39 45 GEN_AI_OPERATION_NAME_VALUE_CHAT, 46 + GEN_AI_OPERATION_NAME_VALUE_EXECUTE_TOOL, 40 47 } from "@opentelemetry/semantic-conventions/incubating"; 41 48 import type { Turn, Usage, SessionState, DeferredAgentTool } from "./types.js"; 42 49 import { readTranscript, groupIntoTurns } from "./transcript.js"; ··· 126 133 kind: SpanKind.SERVER, 127 134 startTime: new Date(turnStartMs), 128 135 attributes: { 136 + [ATTR_GEN_AI_SYSTEM]: GEN_AI_SYSTEM_VALUE_ANTHROPIC, 137 + [ATTR_GEN_AI_CONVERSATION_ID]: sessionId, 129 138 "claude_code.turn_number": turnNum, 130 - "session.id": sessionId, 131 139 }, 132 140 }, turnParentCtx); 133 141 ··· 162 170 kind: SpanKind.INTERNAL, 163 171 startTime: new Date(toolStartMs), 164 172 attributes: { 173 + [ATTR_GEN_AI_OPERATION_NAME]: GEN_AI_OPERATION_NAME_VALUE_EXECUTE_TOOL, 174 + [ATTR_GEN_AI_SYSTEM]: GEN_AI_SYSTEM_VALUE_ANTHROPIC, 165 175 [ATTR_GEN_AI_TOOL_NAME]: toolCall.tool_use.name, 166 - "session.id": sessionId, 176 + [ATTR_GEN_AI_TOOL_CALL_ID]: toolCall.tool_use.id, 177 + [ATTR_GEN_AI_TOOL_TYPE]: "extension", 178 + [ATTR_GEN_AI_CONVERSATION_ID]: sessionId, 179 + ...(toolCall.agentId ? { 180 + [ATTR_GEN_AI_AGENT_ID]: toolCall.agentId, 181 + } : {}), 167 182 }, 168 183 }, turnCtx); 169 184 ··· 187 202 [ATTR_GEN_AI_REQUEST_MODEL]: llmCall.model, 188 203 [ATTR_GEN_AI_RESPONSE_MODEL]: llmCall.model, 189 204 [ATTR_GEN_AI_RESPONSE_FINISH_REASONS]: [finishReason], 190 - "session.id": sessionId, 205 + [ATTR_GEN_AI_CONVERSATION_ID]: sessionId, 191 206 ...usageAttributes(llmCall.usage), 192 207 }; 208 + 209 + if (llmCall.messageId) { 210 + llmAttrs[ATTR_GEN_AI_RESPONSE_ID] = llmCall.messageId; 211 + } 193 212 194 213 if (llmCall.synthetic) { 195 214 llmAttrs["claude_code.synthetic"] = true; ··· 296 315 kind: SpanKind.SERVER, 297 316 startTime: new Date(startMs), 298 317 attributes: { 318 + [ATTR_GEN_AI_SYSTEM]: GEN_AI_SYSTEM_VALUE_ANTHROPIC, 319 + [ATTR_GEN_AI_CONVERSATION_ID]: sessionId, 299 320 "claude_code.turn_number": turnNum, 300 - "session.id": sessionId, 301 321 }, 302 322 }, parentCtx); 303 323 ··· 366 386 kind: SpanKind.INTERNAL, 367 387 startTime: new Date(subStartMs), 368 388 attributes: { 369 - "claude_code.agent_type": toolName, 370 - "claude_code.agent_id": sub.agent_id, 371 - "session.id": sessionId, 389 + [ATTR_GEN_AI_SYSTEM]: GEN_AI_SYSTEM_VALUE_ANTHROPIC, 390 + [ATTR_GEN_AI_AGENT_NAME]: toolName, 391 + [ATTR_GEN_AI_AGENT_ID]: sub.agent_id, 392 + [ATTR_GEN_AI_CONVERSATION_ID]: sessionId, 372 393 }, 373 394 }, parentCtx); 374 395
+3
src/transcript.ts
··· 202 202 usage: Usage; 203 203 startTime: string; 204 204 endTime: string; 205 + messageId?: string; 205 206 } { 206 207 if (chunks.length === 0) { 207 208 throw new Error("Cannot merge zero chunks"); ··· 220 221 usage: last.message.usage, // SSE usage is cumulative; last chunk has final totals. 221 222 startTime: first.timestamp, 222 223 endTime: last.timestamp, 224 + messageId: first.message.id, 223 225 }; 224 226 } 225 227 ··· 338 340 startTime: merged.startTime, 339 341 endTime: merged.endTime, 340 342 toolCalls, 343 + messageId: merged.messageId, 341 344 }); 342 345 } 343 346
+2
src/types.ts
··· 143 143 toolCalls: ToolCall[]; 144 144 /** True if this LLM call was synthesized (not from the transcript). */ 145 145 synthetic?: boolean; 146 + /** Message ID from the API response (for gen_ai.response.id). */ 147 + messageId?: string; 146 148 } 147 149 148 150 /** A complete turn: one user prompt → one or more LLM calls. */