From 9fbb8a4d3b1671c5367fe920ba01943785fd13b1 Mon Sep 17 00:00:00 2001 From: ihoo <1506821833@qq.com> Date: Sat, 25 Apr 2026 03:08:51 +0800 Subject: [PATCH] fix: preserve empty reasoning_content for DeepSeek V4 in non-streaming and streaming paths Three truthy checks were dropping empty reasoning_content ('') from DeepSeek V4's thinking mode responses, causing 'must be passed back to the API' errors in multi-turn tool call chains: 1. Non-streaming parser: if (reasoning != null && reasoning.length > 0) -> Now: if (reasoning != null) to preserve empty strings 2. Streaming parser: if (reasoningContent) truthy check -> Now: if ('reasoning_text' in delta) to detect field presence 3. Outbound converter: if (part.text) reasoningText = part.text -> Now: reasoningText = part.text ?? '' to preserve empty text PR #24146 fixed the transform.ts path; this completes the remaining cases. --- ...vert-to-openai-compatible-chat-messages.ts | 4 +++- .../openai-compatible-chat-language-model.ts | 22 +++++++++++-------- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/packages/opencode/src/provider/sdk/copilot/chat/convert-to-openai-compatible-chat-messages.ts b/packages/opencode/src/provider/sdk/copilot/chat/convert-to-openai-compatible-chat-messages.ts index c4e15e0b4fb2..77744b835402 100644 --- a/packages/opencode/src/provider/sdk/copilot/chat/convert-to-openai-compatible-chat-messages.ts +++ b/packages/opencode/src/provider/sdk/copilot/chat/convert-to-openai-compatible-chat-messages.ts @@ -95,7 +95,9 @@ export function convertToOpenAICompatibleChatMessages(prompt: LanguageModelV3Pro break } case "reasoning": { - if (part.text) reasoningText = part.text + // Preserve empty reasoning text — some providers (e.g. DeepSeek V4) + // require reasoning_content: "" to be sent back in multi-turn chains. + reasoningText = part.text ?? "" break } case "tool-call": { diff --git a/packages/opencode/src/provider/sdk/copilot/chat/openai-compatible-chat-language-model.ts b/packages/opencode/src/provider/sdk/copilot/chat/openai-compatible-chat-language-model.ts index 280970c41b4f..5e910d97faf5 100644 --- a/packages/opencode/src/provider/sdk/copilot/chat/openai-compatible-chat-language-model.ts +++ b/packages/opencode/src/provider/sdk/copilot/chat/openai-compatible-chat-language-model.ts @@ -227,8 +227,10 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV3 { } // reasoning content (Copilot uses reasoning_text): + // Preserve empty reasoning_text (e.g. DeepSeek V4 returns reasoning_content: "") + // because some providers require it to be sent back verbatim in multi-turn chains. const reasoning = choice.message.reasoning_text - if (reasoning != null && reasoning.length > 0) { + if (reasoning != null) { content.push({ type: "reasoning", text: reasoning, @@ -477,9 +479,10 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV3 { reasoningOpaque = delta.reasoning_opaque } - // enqueue reasoning before text deltas (Copilot uses reasoning_text): + // enqueue reasoning before text deltas (Copilot uses reasoning_text). + // Handle empty reasoning_text (e.g. DeepSeek V4 may stream reasoning_content: ""). const reasoningContent = delta.reasoning_text - if (reasoningContent) { + if ("reasoning_text" in delta) { if (!isActiveReasoning) { controller.enqueue({ type: "reasoning-start", @@ -487,12 +490,13 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV3 { }) isActiveReasoning = true } - - controller.enqueue({ - type: "reasoning-delta", - id: "reasoning-0", - delta: reasoningContent, - }) + if (reasoningContent) { + controller.enqueue({ + type: "reasoning-delta", + id: "reasoning-0", + delta: reasoningContent, + }) + } } if (delta.content) {