From cb47d3e7b492859d8186e838ecb5dba4d5f42868 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=9F=A9=E7=BF=94=E5=AE=87?= Date: Mon, 9 Feb 2026 21:17:39 +0800 Subject: [PATCH 1/2] fix(adapters): forward extra_body params and return reasoning_content properly MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Request adapters had a KNOWN_FIELDS set that excluded recognized fields (e.g. response_format, presence_penalty, seed) from extraParams passthrough, but those fields were never mapped in parse() either — causing them to be silently dropped when forwarding to upstream providers. Rename to MAPPED_FIELDS containing only fields actually consumed by parse(), so all other fields flow through extraParams to the upstream provider. The response adapter for OpenAI Chat wrapped reasoning_content in tags inside the content field. This was originally only used for DB logging, but was incorrectly carried over into client response serialization during the adapter refactor (09a0ce9). Restore the original behavior: return reasoning_content as a separate field for client responses, keep wrapping only for database storage. Co-Authored-By: Claude Opus 4.6 --- backend/src/adapters/request/anthropic.ts | 9 +++--- backend/src/adapters/request/openai-chat.ts | 18 ++++------- .../src/adapters/request/openai-response.ts | 13 ++++---- backend/src/adapters/response/openai-chat.ts | 30 ++++++++++++------- 4 files changed, 35 insertions(+), 35 deletions(-) diff --git a/backend/src/adapters/request/anthropic.ts b/backend/src/adapters/request/anthropic.ts index 10e1e99..7db6ffb 100644 --- a/backend/src/adapters/request/anthropic.ts +++ b/backend/src/adapters/request/anthropic.ts @@ -70,10 +70,12 @@ interface AnthropicRequest { } // ============================================================================= -// Known Fields (for extracting extra body) +// Mapped Fields (fields explicitly extracted by parse(), excluded from extraParams) +// All other fields (e.g. metadata) are passed through via extraParams +// to the upstream provider. // ============================================================================= -const KNOWN_FIELDS = new Set([ +const MAPPED_FIELDS = new Set([ "model", "messages", "system", @@ -85,7 +87,6 @@ const KNOWN_FIELDS = new Set([ "stop_sequences", "tools", "tool_choice", - "metadata", ]); // ============================================================================= @@ -343,7 +344,7 @@ export const anthropicRequestAdapter: RequestAdapter = { let hasExtra = false; for (const [key, value] of Object.entries(body)) { - if (!KNOWN_FIELDS.has(key)) { + if (!MAPPED_FIELDS.has(key)) { extra[key] = value; hasExtra = true; } diff --git a/backend/src/adapters/request/openai-chat.ts b/backend/src/adapters/request/openai-chat.ts index ef6839f..4a5a188 100644 --- a/backend/src/adapters/request/openai-chat.ts +++ b/backend/src/adapters/request/openai-chat.ts @@ -112,30 +112,22 @@ interface OpenAIChatRequest { } // ============================================================================= -// Known Fields (for extracting extra body) +// Mapped Fields (fields explicitly extracted by parse(), excluded from extraParams) +// All other fields (e.g. response_format, presence_penalty, seed, etc.) +// are passed through via extraParams to the upstream provider. // ============================================================================= -const KNOWN_FIELDS = new Set([ +const MAPPED_FIELDS = new Set([ "model", "messages", "max_tokens", "max_completion_tokens", "temperature", "top_p", - "n", "stream", - "stream_options", "stop", "tools", "tool_choice", - "presence_penalty", - "frequency_penalty", - "logit_bias", - "logprobs", - "top_logprobs", - "user", - "seed", - "response_format", ]); // ============================================================================= @@ -340,7 +332,7 @@ export const openaiChatRequestAdapter: RequestAdapter = { let hasExtra = false; for (const [key, value] of Object.entries(body)) { - if (!KNOWN_FIELDS.has(key)) { + if (!MAPPED_FIELDS.has(key)) { extra[key] = value; hasExtra = true; } diff --git a/backend/src/adapters/request/openai-response.ts b/backend/src/adapters/request/openai-response.ts index 296ed7e..777d83f 100644 --- a/backend/src/adapters/request/openai-response.ts +++ b/backend/src/adapters/request/openai-response.ts @@ -109,24 +109,21 @@ interface ResponseApiRequest { } // ============================================================================= -// Known Fields (for extracting extra body) +// Mapped Fields (fields explicitly extracted by parse(), excluded from extraParams) +// All other fields (e.g. modalities, parallel_tool_calls, store, metadata, etc.) +// are passed through via extraParams to the upstream provider. // ============================================================================= -const KNOWN_FIELDS = new Set([ +const MAPPED_FIELDS = new Set([ "model", "input", "instructions", - "modalities", "max_output_tokens", "temperature", "top_p", "stream", "tools", "tool_choice", - "parallel_tool_calls", - "previous_response_id", - "store", - "metadata", ]); // ============================================================================= @@ -298,7 +295,7 @@ export const openaiResponseRequestAdapter: RequestAdapter = let hasExtra = false; for (const [key, value] of Object.entries(body)) { - if (!KNOWN_FIELDS.has(key)) { + if (!MAPPED_FIELDS.has(key)) { extra[key] = value; hasExtra = true; } diff --git a/backend/src/adapters/response/openai-chat.ts b/backend/src/adapters/response/openai-chat.ts index e6a4660..d5a7a40 100644 --- a/backend/src/adapters/response/openai-chat.ts +++ b/backend/src/adapters/response/openai-chat.ts @@ -35,6 +35,7 @@ interface OpenAIChatChoice { interface OpenAIChatMessage { role: "assistant"; content: string | null; + reasoning_content?: string | null; tool_calls?: OpenAIToolCall[]; refusal?: string | null; } @@ -113,28 +114,35 @@ function convertStopReason(stopReason: StopReason): string | null { } /** - * Extract text content from internal content blocks + * Extract text content from internal content blocks (excludes thinking) */ function extractTextContent(content: InternalContentBlock[]): string { const textParts: string[] = []; - const thinkingParts: string[] = []; for (const block of content) { if (block.type === "text") { textParts.push(block.text); - } else if (block.type === "thinking") { - thinkingParts.push(block.thinking); } } - // Prepend thinking content wrapped in tags if present - let result = ""; - if (thinkingParts.length > 0) { - result += `${thinkingParts.join("")}\n`; + return textParts.join(""); +} + +/** + * Extract reasoning/thinking content from internal content blocks + */ +function extractReasoningContent( + content: InternalContentBlock[], +): string | undefined { + const thinkingParts: string[] = []; + + for (const block of content) { + if (block.type === "thinking") { + thinkingParts.push(block.thinking); + } } - result += textParts.join(""); - return result; + return thinkingParts.length > 0 ? thinkingParts.join("") : undefined; } /** @@ -169,6 +177,7 @@ export const openaiChatResponseAdapter: ResponseAdapter = serialize(response: InternalResponse): OpenAIChatCompletion { const content = extractTextContent(response.content); + const reasoningContent = extractReasoningContent(response.content); const toolCalls = convertToolCalls(response.content); return { @@ -182,6 +191,7 @@ export const openaiChatResponseAdapter: ResponseAdapter = message: { role: "assistant", content: content || null, + ...(reasoningContent !== undefined && { reasoning_content: reasoningContent }), tool_calls: toolCalls, }, finish_reason: convertStopReason(response.stopReason), From edd8abe457e5b6cc5045c78caac6319ea837d005 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=9F=A9=E7=BF=94=E5=AE=87?= Date: Mon, 9 Feb 2026 21:23:05 +0800 Subject: [PATCH 2/2] refactor: combine extractTextContent and extractReasoningContent into single pass Merge two separate functions that iterated over the same content blocks array into a single extractContent() function that returns both text and reasoning in one pass. Co-Authored-By: Claude Opus 4.6 --- backend/src/adapters/response/openai-chat.ts | 33 +++++++------------- 1 file changed, 12 insertions(+), 21 deletions(-) diff --git a/backend/src/adapters/response/openai-chat.ts b/backend/src/adapters/response/openai-chat.ts index d5a7a40..2fa81ea 100644 --- a/backend/src/adapters/response/openai-chat.ts +++ b/backend/src/adapters/response/openai-chat.ts @@ -114,35 +114,26 @@ function convertStopReason(stopReason: StopReason): string | null { } /** - * Extract text content from internal content blocks (excludes thinking) + * Extract text and reasoning content from internal content blocks */ -function extractTextContent(content: InternalContentBlock[]): string { +function extractContent( + content: InternalContentBlock[], +): { text: string; reasoning?: string } { const textParts: string[] = []; + const thinkingParts: string[] = []; for (const block of content) { if (block.type === "text") { textParts.push(block.text); - } - } - - return textParts.join(""); -} - -/** - * Extract reasoning/thinking content from internal content blocks - */ -function extractReasoningContent( - content: InternalContentBlock[], -): string | undefined { - const thinkingParts: string[] = []; - - for (const block of content) { - if (block.type === "thinking") { + } else if (block.type === "thinking") { thinkingParts.push(block.thinking); } } - return thinkingParts.length > 0 ? thinkingParts.join("") : undefined; + return { + text: textParts.join(""), + reasoning: thinkingParts.length > 0 ? thinkingParts.join("") : undefined, + }; } /** @@ -176,8 +167,8 @@ export const openaiChatResponseAdapter: ResponseAdapter = format: "openai-chat", serialize(response: InternalResponse): OpenAIChatCompletion { - const content = extractTextContent(response.content); - const reasoningContent = extractReasoningContent(response.content); + const { text: content, reasoning: reasoningContent } = + extractContent(response.content); const toolCalls = convertToolCalls(response.content); return {