-
-
Notifications
You must be signed in to change notification settings - Fork 153
fix(anthropic): preserve thinking blocks across tool-call turns #336
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -61,6 +61,8 @@ type AnthropicContentBlocks = | |
| : never | ||
| type AnthropicContentBlock = | ||
| AnthropicContentBlocks extends Array<infer Block> ? Block : never | ||
| type AnthropicThinkingBlock = { thinking: string; signature: string } | ||
| type ThinkingByToolCallBatchKey = Map<string, Array<AnthropicThinkingBlock>> | ||
|
|
||
| // =========================== | ||
| // Type Resolution Helpers | ||
|
|
@@ -119,7 +121,11 @@ export class AnthropicTextAdapter< | |
| options: TextOptions<AnthropicTextProviderOptions>, | ||
| ): AsyncIterable<StreamChunk> { | ||
| try { | ||
| const requestParams = this.mapCommonOptionsToAnthropic(options) | ||
| const thinkingByToolCallBatchKey: ThinkingByToolCallBatchKey = new Map() | ||
| const requestParams = this.mapCommonOptionsToAnthropic( | ||
| options, | ||
| thinkingByToolCallBatchKey, | ||
| ) | ||
|
|
||
| const stream = await this.client.beta.messages.create( | ||
| { ...requestParams, stream: true }, | ||
|
|
@@ -129,8 +135,11 @@ export class AnthropicTextAdapter< | |
| }, | ||
| ) | ||
|
|
||
| yield* this.processAnthropicStream(stream, options.model, () => | ||
| generateId(this.name), | ||
| yield* this.processAnthropicStream( | ||
| stream, | ||
| options.model, | ||
| () => generateId(this.name), | ||
| thinkingByToolCallBatchKey, | ||
| ) | ||
| } catch (error: unknown) { | ||
| const err = error as Error & { status?: number; code?: string } | ||
|
|
@@ -156,8 +165,12 @@ export class AnthropicTextAdapter< | |
| options: StructuredOutputOptions<AnthropicTextProviderOptions>, | ||
| ): Promise<StructuredOutputResult<unknown>> { | ||
| const { chatOptions, outputSchema } = options | ||
| const thinkingByToolCallBatchKey: ThinkingByToolCallBatchKey = new Map() | ||
|
|
||
| const requestParams = this.mapCommonOptionsToAnthropic(chatOptions) | ||
| const requestParams = this.mapCommonOptionsToAnthropic( | ||
| chatOptions, | ||
| thinkingByToolCallBatchKey, | ||
| ) | ||
|
|
||
| // Create a tool that will capture the structured output | ||
| // Anthropic's SDK requires input_schema with type: 'object' literal | ||
|
|
@@ -232,12 +245,16 @@ export class AnthropicTextAdapter< | |
|
|
||
| private mapCommonOptionsToAnthropic( | ||
| options: TextOptions<AnthropicTextProviderOptions>, | ||
| thinkingByToolCallBatchKey: ThinkingByToolCallBatchKey, | ||
| ) { | ||
| const modelOptions = options.modelOptions as | ||
| | InternalTextProviderOptions | ||
| | undefined | ||
|
|
||
| const formattedMessages = this.formatMessages(options.messages) | ||
| const formattedMessages = this.formatMessages( | ||
| options.messages, | ||
| thinkingByToolCallBatchKey, | ||
| ) | ||
| const tools = options.tools | ||
| ? convertToolsToProviderFormat(options.tools) | ||
| : undefined | ||
|
|
@@ -294,6 +311,10 @@ export class AnthropicTextAdapter< | |
| return requestParams | ||
| } | ||
|
|
||
| private getToolCallBatchKey(toolCallIds: Array<string>): string { | ||
| return toolCallIds.join('|') | ||
| } | ||
|
Comment on lines
+314
to
+316
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Use a deterministic and collision-safe tool-call batch key. Line [315] uses 💡 Proposed fix private getToolCallBatchKey(toolCallIds: Array<string>): string {
- return toolCallIds.join('|')
+ return JSON.stringify([...toolCallIds].sort())
}Also applies to: 413-417, 770-779 🤖 Prompt for AI Agents |
||
|
|
||
| private convertContentPartToAnthropic( | ||
| part: ContentPart, | ||
| ): TextBlockParam | ImageBlockParam | DocumentBlockParam { | ||
|
|
@@ -365,6 +386,7 @@ export class AnthropicTextAdapter< | |
|
|
||
| private formatMessages( | ||
| messages: Array<ModelMessage>, | ||
| thinkingByToolCallBatchKey: ThinkingByToolCallBatchKey, | ||
| ): InternalTextProviderOptions['messages'] { | ||
| const formattedMessages: InternalTextProviderOptions['messages'] = [] | ||
|
|
||
|
|
@@ -388,6 +410,21 @@ export class AnthropicTextAdapter< | |
|
|
||
| if (role === 'assistant' && message.toolCalls?.length) { | ||
| const contentBlocks: AnthropicContentBlocks = [] | ||
| const preservedThinkingBlocks = thinkingByToolCallBatchKey.get( | ||
| this.getToolCallBatchKey( | ||
| message.toolCalls.map((toolCall) => toolCall.id), | ||
| ), | ||
| ) | ||
|
|
||
| if (preservedThinkingBlocks) { | ||
| for (const thinkingBlock of preservedThinkingBlocks) { | ||
| contentBlocks.push({ | ||
| type: 'thinking', | ||
| thinking: thinkingBlock.thinking, | ||
| signature: thinkingBlock.signature, | ||
| } as unknown as AnthropicContentBlock) | ||
| } | ||
| } | ||
|
|
||
| if (message.content) { | ||
| const content = | ||
|
|
@@ -525,6 +562,7 @@ export class AnthropicTextAdapter< | |
| stream: AsyncIterable<Anthropic_SDK.Beta.BetaRawMessageStreamEvent>, | ||
| model: string, | ||
| genId: () => string, | ||
| thinkingByToolCallBatchKey: ThinkingByToolCallBatchKey, | ||
| ): AsyncIterable<StreamChunk> { | ||
| let accumulatedContent = '' | ||
| let accumulatedThinking = '' | ||
|
|
@@ -533,6 +571,7 @@ export class AnthropicTextAdapter< | |
| number, | ||
| { id: string; name: string; input: string; started: boolean } | ||
| >() | ||
| const completedThinkingBlocks: Array<AnthropicThinkingBlock> = [] | ||
| let currentToolIndex = -1 | ||
|
|
||
| // AG-UI lifecycle tracking | ||
|
|
@@ -544,6 +583,7 @@ export class AnthropicTextAdapter< | |
| let hasEmittedRunFinished = false | ||
| // Track current content block type for proper content_block_stop handling | ||
| let currentBlockType: string | null = null | ||
| let currentThinkingBlock: AnthropicThinkingBlock | null = null | ||
|
|
||
| try { | ||
| for await (const event of stream) { | ||
|
|
@@ -570,6 +610,10 @@ export class AnthropicTextAdapter< | |
| }) | ||
| } else if (event.content_block.type === 'thinking') { | ||
| accumulatedThinking = '' | ||
| currentThinkingBlock = { | ||
| thinking: '', | ||
| signature: '', | ||
| } | ||
| // Emit STEP_STARTED for thinking | ||
| stepId = genId() | ||
| yield { | ||
|
|
@@ -607,6 +651,9 @@ export class AnthropicTextAdapter< | |
| } else if (event.delta.type === 'thinking_delta') { | ||
| const delta = event.delta.thinking | ||
| accumulatedThinking += delta | ||
| if (currentThinkingBlock) { | ||
| currentThinkingBlock.thinking += delta | ||
| } | ||
| yield { | ||
| type: 'STEP_FINISHED', | ||
| stepId: stepId || genId(), | ||
|
|
@@ -615,6 +662,10 @@ export class AnthropicTextAdapter< | |
| delta, | ||
| content: accumulatedThinking, | ||
| } | ||
| } else if (event.delta.type === 'signature_delta') { | ||
| if (currentThinkingBlock) { | ||
| currentThinkingBlock.signature = event.delta.signature | ||
| } | ||
| } else if (event.delta.type === 'input_json_delta') { | ||
| const existing = toolCallsMap.get(currentToolIndex) | ||
| if (existing) { | ||
|
|
@@ -681,6 +732,11 @@ export class AnthropicTextAdapter< | |
| // Reset so a new TEXT_MESSAGE_START is emitted if text follows tool calls | ||
| hasEmittedTextMessageStart = false | ||
| } | ||
| } else if (currentBlockType === 'thinking') { | ||
| if (currentThinkingBlock) { | ||
| completedThinkingBlocks.push(currentThinkingBlock) | ||
| currentThinkingBlock = null | ||
| } | ||
| } else { | ||
| // Emit TEXT_MESSAGE_END only for text blocks (not tool_use blocks) | ||
| if (hasEmittedTextMessageStart && accumulatedContent) { | ||
|
|
@@ -711,6 +767,18 @@ export class AnthropicTextAdapter< | |
| hasEmittedRunFinished = true | ||
| switch (event.delta.stop_reason) { | ||
| case 'tool_use': { | ||
| const toolCallIds = Array.from(toolCallsMap.values()).map( | ||
| (toolCall) => toolCall.id, | ||
| ) | ||
| if ( | ||
| completedThinkingBlocks.length > 0 && | ||
| toolCallIds.length > 0 | ||
| ) { | ||
| thinkingByToolCallBatchKey.set( | ||
| this.getToolCallBatchKey(toolCallIds), | ||
| completedThinkingBlocks, | ||
| ) | ||
| } | ||
| yield { | ||
| type: 'RUN_FINISHED', | ||
| runId, | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Persist thinking cache across turns, not per method call.
Line [124] and Line [168] create a fresh
thinkingByToolCallBatchKeyeach invocation, so data written at Line [777] is dropped before the next tool-loop turn. Then Line [413] can’t rehydrate prior thinking for assistant tool-call messages.💡 Proposed fix
export class AnthropicTextAdapter< @@ > extends BaseTextAdapter< @@ > { readonly kind = 'text' as const readonly name = 'anthropic' as const private client: Anthropic_SDK + private thinkingByToolCallBatchKey: ThinkingByToolCallBatchKey = new Map() @@ async *chatStream( options: TextOptions<AnthropicTextProviderOptions>, ): AsyncIterable<StreamChunk> { try { - const thinkingByToolCallBatchKey: ThinkingByToolCallBatchKey = new Map() const requestParams = this.mapCommonOptionsToAnthropic( options, - thinkingByToolCallBatchKey, + this.thinkingByToolCallBatchKey, ) @@ yield* this.processAnthropicStream( stream, options.model, () => generateId(this.name), - thinkingByToolCallBatchKey, + this.thinkingByToolCallBatchKey, ) @@ async structuredOutput( options: StructuredOutputOptions<AnthropicTextProviderOptions>, ): Promise<StructuredOutputResult<unknown>> { const { chatOptions, outputSchema } = options - const thinkingByToolCallBatchKey: ThinkingByToolCallBatchKey = new Map() const requestParams = this.mapCommonOptionsToAnthropic( chatOptions, - thinkingByToolCallBatchKey, + this.thinkingByToolCallBatchKey, )Also applies to: 168-173, 413-417, 777-781
🤖 Prompt for AI Agents
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
hm youre right. i looked into it, im not positve what the right way to do this is
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@imsherrill, happy to walk through the tradeoffs. Here's the core problem and the cleanest solution:Why the instance field is the right approach
The multi-turn tool loop calls
chatStreammultiple times on the same adapter instance, each time passing the full accumulated message history. The flow looks like:Since
ModelMessagehas no field for thinking-block metadata, the blocks can't travel inside the message objects themselves. The map has to survive between calls, and the adapter instance is the natural home for it.Revised fix
export class AnthropicTextAdapter<...> extends BaseTextAdapter<...> { readonly kind = 'text' as const readonly name = 'anthropic' as const private client: Anthropic_SDK + private thinkingByToolCallBatchKey: ThinkingByToolCallBatchKey = new Map() // chatStream — remove local map creation async *chatStream(options): AsyncIterable<StreamChunk> { try { - const thinkingByToolCallBatchKey: ThinkingByToolCallBatchKey = new Map() const requestParams = this.mapCommonOptionsToAnthropic( options, - thinkingByToolCallBatchKey, + this.thinkingByToolCallBatchKey, ) // ... yield* this.processAnthropicStream( stream, options.model, () => generateId(this.name), - thinkingByToolCallBatchKey, + this.thinkingByToolCallBatchKey, ) } } // structuredOutput — remove local map creation async structuredOutput(options): Promise<StructuredOutputResult<unknown>> { const { chatOptions, outputSchema } = options - const thinkingByToolCallBatchKey: ThinkingByToolCallBatchKey = new Map() const requestParams = this.mapCommonOptionsToAnthropic( chatOptions, - thinkingByToolCallBatchKey, + this.thinkingByToolCallBatchKey, ) // ... } }Why the alternatives don't work as well
Map(current)ModelMessageModelMessagetype across the whole@tanstack/aipackageOne thing worth noting: if the same adapter instance is reused across completely independent conversations, stale entries could accumulate. You could add a
clearThinkingCache()method or clear the map at the start of a new conversation if that's a concern — but for the typical single-conversation lifecycle this is fine.thinkingByToolCallBatchKeyto a private instance field initialized tonew Map()new Map()declarations inchatStreamandstructuredOutputthis.thinkingByToolCallBatchKey🧠 Learnings used
💡 Use
@coderabbitaiplan to create a plan for code changes