Skip to content

Commit 53abc6c

Browse files
EBrownclaude
andcommitted
feat: cherry-pick 4 upstream fixes (PRs anomalyco#13502, anomalyco#12585, anomalyco#15183, anomalyco#14783)
- anomalyco#13502: Retry on timeout errors instead of failing — DOMException TimeoutError now marked as retryable - anomalyco#12585: Generate fallback tool call IDs for providers (NVIDIA NIM, GLM, Bedrock, Chutes) that omit the id field in streaming responses - anomalyco#15183: Prevent literal NUL file creation on POSIX — redirects to /dev/null instead of writing "nul" files - anomalyco#14783: Block reasoningSummary injection for GPT-5 through openai-compatible adapter (keeps reasoningEffort, strips summary) Co-Authored-By: Claude Opus 4.6 <[email protected]>
1 parent b605517 commit 53abc6c

9 files changed

Lines changed: 144 additions & 18 deletions

File tree

packages/opencode/src/provider/sdk/copilot/chat/openai-compatible-chat-language-model.ts

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,8 @@ import { defaultOpenAICompatibleErrorStructure, type ProviderErrorStructure } fr
3030
import type { MetadataExtractor } from "./openai-compatible-metadata-extractor"
3131
import { prepareTools } from "./openai-compatible-prepare-tools"
3232

33+
const UNSUPPORTED_CHAT_COMPLETIONS_OPTIONS = new Set(["reasoningSummary"])
34+
3335
export type OpenAICompatibleChatConfig = {
3436
provider: string
3537
headers: () => Record<string, string | undefined>
@@ -168,7 +170,9 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {
168170
seed,
169171
...Object.fromEntries(
170172
Object.entries(providerOptions?.[this.providerOptionsName] ?? {}).filter(
171-
([key]) => !Object.keys(openaiCompatibleProviderOptions.shape).includes(key),
173+
([key]) =>
174+
!Object.keys(openaiCompatibleProviderOptions.shape).includes(key) &&
175+
!UNSUPPORTED_CHAT_COMPLETIONS_OPTIONS.has(key),
172176
),
173177
),
174178

@@ -517,13 +521,21 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {
517521
const index = toolCallDelta.index
518522

519523
if (toolCalls[index] == null) {
520-
if (toolCallDelta.id == null) {
524+
// Providers known to not send tool call IDs (see https://github.com/anomalyco/opencode/issues/6290)
525+
const providersMissingToolCallId = ["nvidia", "glm", "bedrock", "chutes"]
526+
const shouldGenerateFallbackId = providersMissingToolCallId.some((p) =>
527+
providerOptionsName.toLowerCase().includes(p),
528+
)
529+
530+
if (toolCallDelta.id == null && !shouldGenerateFallbackId) {
521531
throw new InvalidResponseDataError({
522532
data: toolCallDelta,
523533
message: `Expected 'id' to be a string.`,
524534
})
525535
}
526536

537+
const toolCallId = toolCallDelta.id ?? generateId()
538+
527539
if (toolCallDelta.function?.name == null) {
528540
throw new InvalidResponseDataError({
529541
data: toolCallDelta,
@@ -533,12 +545,12 @@ export class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {
533545

534546
controller.enqueue({
535547
type: "tool-input-start",
536-
id: toolCallDelta.id,
548+
id: toolCallId,
537549
toolName: toolCallDelta.function.name,
538550
})
539551

540552
toolCalls[index] = {
541-
id: toolCallDelta.id,
553+
id: toolCallId,
542554
type: "function",
543555
function: {
544556
name: toolCallDelta.function.name,

packages/opencode/src/provider/transform.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -759,7 +759,9 @@ export namespace ProviderTransform {
759759
if (input.model.api.id.includes("gpt-5") && !input.model.api.id.includes("gpt-5-chat")) {
760760
if (!input.model.api.id.includes("gpt-5-pro")) {
761761
result["reasoningEffort"] = "medium"
762-
result["reasoningSummary"] = "auto"
762+
if (input.model.api.npm !== "@ai-sdk/openai-compatible") {
763+
result["reasoningSummary"] = "auto"
764+
}
763765
}
764766

765767
// Only set textVerbosity for non-chat gpt-5.x models

packages/opencode/src/session/message-v2.ts

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -840,6 +840,14 @@ export namespace MessageV2 {
840840
cause: e,
841841
},
842842
).toObject()
843+
case e instanceof DOMException && e.name === "TimeoutError":
844+
return new MessageV2.APIError(
845+
{
846+
message: e.message || "Operation timed out",
847+
isRetryable: true,
848+
},
849+
{ cause: e },
850+
).toObject()
843851
case MessageV2.OutputLengthError.isInstance(e):
844852
return e
845853
case LoadAPIKeyError.isInstance(e):

packages/opencode/src/session/prompt.ts

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ import { InjectionBudget } from "../util/injection-budget"
5151
import { iife } from "@/util/iife"
5252
import { Shell } from "@/shell/shell"
5353
import { Truncate } from "@/tool/truncation"
54+
import { normalizeNul } from "@/util/redirection"
5455

5556
// @ts-ignore
5657
globalThis.AI_SDK_LOG_WARNINGS = false
@@ -1581,6 +1582,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the
15811582
})
15821583
export type ShellInput = z.infer<typeof ShellInput>
15831584
export async function shell(input: ShellInput) {
1585+
const command = normalizeNul(input.command, process.platform)
15841586
const abort = start(input.sessionID)
15851587
if (!abort) {
15861588
throw new Session.BusyError(input.sessionID)
@@ -1667,7 +1669,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the
16671669
start: Date.now(),
16681670
},
16691671
input: {
1670-
command: input.command,
1672+
command,
16711673
},
16721674
},
16731675
}
@@ -1679,10 +1681,10 @@ NOTE: At any point in time through this workflow you should feel free to ask the
16791681

16801682
const invocations: Record<string, { args: string[] }> = {
16811683
nu: {
1682-
args: ["-c", input.command],
1684+
args: ["-c", command],
16831685
},
16841686
fish: {
1685-
args: ["-c", input.command],
1687+
args: ["-c", command],
16861688
},
16871689
zsh: {
16881690
args: [
@@ -1691,7 +1693,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the
16911693
`
16921694
[[ -f ~/.zshenv ]] && source ~/.zshenv >/dev/null 2>&1 || true
16931695
[[ -f "\${ZDOTDIR:-$HOME}/.zshrc" ]] && source "\${ZDOTDIR:-$HOME}/.zshrc" >/dev/null 2>&1 || true
1694-
eval ${JSON.stringify(input.command)}
1696+
eval ${JSON.stringify(command)}
16951697
`,
16961698
],
16971699
},
@@ -1702,25 +1704,25 @@ NOTE: At any point in time through this workflow you should feel free to ask the
17021704
`
17031705
shopt -s expand_aliases
17041706
[[ -f ~/.bashrc ]] && source ~/.bashrc >/dev/null 2>&1 || true
1705-
eval ${JSON.stringify(input.command)}
1707+
eval ${JSON.stringify(command)}
17061708
`,
17071709
],
17081710
},
17091711
// Windows cmd
17101712
cmd: {
1711-
args: ["/c", input.command],
1713+
args: ["/c", command],
17121714
},
17131715
// Windows PowerShell
17141716
powershell: {
1715-
args: ["-NoProfile", "-Command", input.command],
1717+
args: ["-NoProfile", "-Command", command],
17161718
},
17171719
pwsh: {
1718-
args: ["-NoProfile", "-Command", input.command],
1720+
args: ["-NoProfile", "-Command", command],
17191721
},
17201722
// Fallback: any shell that doesn't match those above
17211723
// - No -l, for max compatibility
17221724
"": {
1723-
args: ["-c", `${input.command}`],
1725+
args: ["-c", `${command}`],
17241726
},
17251727
}
17261728

@@ -1736,13 +1738,14 @@ NOTE: At any point in time through this workflow you should feel free to ask the
17361738
const proc = spawn(shell, args, {
17371739
cwd,
17381740
detached: process.platform !== "win32",
1739-
stdio: ["ignore", "pipe", "pipe"],
1741+
stdio: ["pipe", "pipe", "pipe"],
17401742
env: {
17411743
...process.env,
17421744
...shellEnv.env,
17431745
TERM: "dumb",
17441746
},
17451747
})
1748+
proc.stdin?.end()
17461749

17471750
let output = ""
17481751

packages/opencode/src/tool/bash.ts

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ import { Shell } from "@/shell/shell"
1717
import { BashArity } from "@/permission/arity"
1818
import { Truncate } from "./truncation"
1919
import { Plugin } from "@/plugin"
20+
import { normalizeNul } from "@/util/redirection"
2021

2122
const MAX_METADATA_LENGTH = 30_000
2223
const DEFAULT_TIMEOUT = Flag.OPENCODE_EXPERIMENTAL_BASH_DEFAULT_TIMEOUT_MS || 2 * 60 * 1000
@@ -76,12 +77,13 @@ export const BashTool = Tool.define("bash", async () => {
7677
),
7778
}),
7879
async execute(params, ctx) {
80+
const command = normalizeNul(params.command, process.platform)
7981
const cwd = params.workdir || Instance.directory
8082
if (params.timeout !== undefined && params.timeout < 0) {
8183
throw new Error(`Invalid timeout value: ${params.timeout}. Timeout must be a positive number.`)
8284
}
8385
const timeout = params.timeout ?? DEFAULT_TIMEOUT
84-
const tree = await parser().then((p) => p.parse(params.command))
86+
const tree = await parser().then((p) => p.parse(command))
8587
if (!tree) {
8688
throw new Error("Failed to parse command")
8789
}
@@ -169,16 +171,17 @@ export const BashTool = Tool.define("bash", async () => {
169171
{ cwd, sessionID: ctx.sessionID, callID: ctx.callID },
170172
{ env: {} },
171173
)
172-
const proc = spawn(params.command, {
174+
const proc = spawn(command, {
173175
shell,
174176
cwd,
175177
env: {
176178
...process.env,
177179
...shellEnv.env,
178180
},
179-
stdio: ["ignore", "pipe", "pipe"],
181+
stdio: ["pipe", "pipe", "pipe"],
180182
detached: process.platform !== "win32",
181183
})
184+
proc.stdin?.end()
182185

183186
let output = ""
184187

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
const OPS = "(?:\\d+>>?|>>|>|&>|>&)"
2+
3+
export function normalizeNul(command: string, platform: string) {
4+
if (platform === "win32") return command
5+
6+
return command
7+
.replace(new RegExp(`(^|[\\s;|&(])(${OPS}\\s*)["']nul["'](?=($|[\\s;|&)]))`, "gi"), "$1$2/dev/null")
8+
.replace(new RegExp(`(^|[\\s;|&(])(${OPS}\\s*)nul(?=($|[\\s;|&)]))`, "gi"), "$1$2/dev/null")
9+
}

packages/opencode/test/provider/copilot/copilot-chat-model.test.ts

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -589,4 +589,38 @@ describe("request body", () => {
589589
},
590590
])
591591
})
592+
593+
test("should strip reasoningSummary but keep reasoning effort and custom options", async () => {
594+
let capturedBody: any
595+
const mockFetch = mock(async (_url: string, init?: RequestInit) => {
596+
capturedBody = JSON.parse(init?.body as string)
597+
return new Response(
598+
new ReadableStream({
599+
start(controller) {
600+
controller.enqueue(new TextEncoder().encode(`data: [DONE]\n\n`))
601+
controller.close()
602+
},
603+
}),
604+
{ status: 200, headers: { "Content-Type": "text/event-stream" } },
605+
)
606+
})
607+
608+
const model = createModel(mockFetch)
609+
610+
await model.doStream({
611+
prompt: TEST_PROMPT,
612+
providerOptions: {
613+
copilot: {
614+
reasoningEffort: "high",
615+
reasoningSummary: "auto",
616+
custom_field: true,
617+
},
618+
},
619+
includeRawChunks: false,
620+
})
621+
622+
expect(capturedBody.reasoning_effort).toBe("high")
623+
expect(capturedBody.reasoningSummary).toBeUndefined()
624+
expect(capturedBody.custom_field).toBe(true)
625+
})
592626
})

packages/opencode/test/provider/transform.test.ts

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -132,6 +132,32 @@ describe("ProviderTransform.options - gpt-5 textVerbosity", () => {
132132
headers: {},
133133
}) as any
134134

135+
const createOpenAICompatibleGpt5Model = (apiId: string) =>
136+
({
137+
id: `myprovider/${apiId}`,
138+
providerID: "myprovider",
139+
api: {
140+
id: apiId,
141+
url: "https://inference.do-ai.run/v1",
142+
npm: "@ai-sdk/openai-compatible",
143+
},
144+
name: apiId,
145+
capabilities: {
146+
temperature: true,
147+
reasoning: true,
148+
attachment: true,
149+
toolcall: true,
150+
input: { text: true, audio: false, image: true, video: false, pdf: false },
151+
output: { text: true, audio: false, image: false, video: false, pdf: false },
152+
interleaved: false,
153+
},
154+
cost: { input: 0.03, output: 0.06, cache: { read: 0.001, write: 0.002 } },
155+
limit: { context: 128000, output: 4096 },
156+
status: "active",
157+
options: {},
158+
headers: {},
159+
}) as any
160+
135161
test("gpt-5.2 should have textVerbosity set to low", () => {
136162
const model = createGpt5Model("gpt-5.2")
137163
const result = ProviderTransform.options({ model, sessionID, providerOptions: {} })
@@ -173,6 +199,19 @@ describe("ProviderTransform.options - gpt-5 textVerbosity", () => {
173199
const result = ProviderTransform.options({ model, sessionID, providerOptions: {} })
174200
expect(result.textVerbosity).toBeUndefined()
175201
})
202+
203+
test("openai gpt-5 should include reasoningSummary by default", () => {
204+
const model = createGpt5Model("gpt-5")
205+
const result = ProviderTransform.options({ model, sessionID, providerOptions: {} })
206+
expect(result.reasoningSummary).toBe("auto")
207+
})
208+
209+
test("openai-compatible gpt-5 should not include reasoningSummary by default", () => {
210+
const model = createOpenAICompatibleGpt5Model("openai-gpt-5")
211+
const result = ProviderTransform.options({ model, sessionID, providerOptions: {} })
212+
expect(result.reasoningEffort).toBe("medium")
213+
expect(result.reasoningSummary).toBeUndefined()
214+
})
176215
})
177216

178217
describe("ProviderTransform.options - gateway", () => {
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
import { describe, expect, test } from "bun:test"
2+
import { normalizeNul } from "../../src/util/redirection"
3+
4+
describe("util.redirection", () => {
5+
test("rewrites nul redirect targets on non-windows", () => {
6+
expect(normalizeNul("dir /s *.dll >nul 2>&1", "linux")).toBe("dir /s *.dll >/dev/null 2>&1")
7+
expect(normalizeNul("echo hi 2> NUL", "linux")).toBe("echo hi 2> /dev/null")
8+
expect(normalizeNul("echo hi >> 'nul'", "linux")).toBe("echo hi >> /dev/null")
9+
expect(normalizeNul("dir /s *.dll >nul 2>&1", "win32")).toBe("dir /s *.dll >nul 2>&1")
10+
})
11+
12+
test("does not touch ordinary file paths", () => {
13+
expect(normalizeNul("cat ./nul.txt", "linux")).toBe("cat ./nul.txt")
14+
expect(normalizeNul("echo hi > ./nul", "linux")).toBe("echo hi > ./nul")
15+
})
16+
})

0 commit comments

Comments
 (0)