Skip to content

Commit 706d322

Browse files
Apply PR #11311: fix: add handling for codex model served from openrouter to include xhigh reasoning mode
2 parents 5c848d5 + 35c7739 commit 706d322

1 file changed

Lines changed: 7 additions & 0 deletions

File tree

packages/opencode/src/provider/transform.ts

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -346,6 +346,13 @@ export namespace ProviderTransform {
346346
switch (model.api.npm) {
347347
case "@openrouter/ai-sdk-provider":
348348
if (!model.id.includes("gpt") && !model.id.includes("gemini-3")) return {}
349+
350+
// Following logic for the `@ai-sdk/openai` case
351+
if (id.includes("codex")) {
352+
if (id.includes("5.2")) return Object.fromEntries([...WIDELY_SUPPORTED_EFFORTS, "xhigh"].map((effort) => [effort, { reasoning: { effort } }]))
353+
return Object.fromEntries(WIDELY_SUPPORTED_EFFORTS.map((effort) => [effort, { reasoning: { effort } }]))
354+
}
355+
349356
return Object.fromEntries(OPENAI_EFFORTS.map((effort) => [effort, { reasoning: { effort } }]))
350357

351358
// TODO: YOU CANNOT SET max_tokens if this is set!!!

0 commit comments

Comments
 (0)