Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions packages/opencode/src/config/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,9 @@ export const Info = Schema.Struct({
description:
"Timeout in milliseconds between streamed SSE chunks for this provider. If no chunk arrives within this window, the request is aborted.",
}),
cache_point_ttl: Schema.optional(Schema.Literals(["5m", "1h"])).annotate({
description: "TTL for Bedrock cache points. Only applies to amazon-bedrock provider.",
}),
}),
[Schema.Record(Schema.String, Schema.Any)],
),
Expand Down
1 change: 1 addition & 0 deletions packages/opencode/src/plugin/codex.ts
Original file line number Diff line number Diff line change
Expand Up @@ -374,6 +374,7 @@ export async function CodexAuthPlugin(input: PluginInput): Promise<Hooks> {
"gpt-5.3-codex",
"gpt-5.4",
"gpt-5.4-mini",
"gpt-5.5",
])
for (const [modelId, model] of Object.entries(provider.models)) {
if (modelId.includes("codex")) continue
Expand Down
25 changes: 21 additions & 4 deletions packages/opencode/src/provider/transform.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@ import type * as Provider from "./provider"
import type * as ModelsDev from "./models"
import { iife } from "@/util/iife"
import { Flag } from "@opencode-ai/core/flag/flag"
import * as Log from "@opencode-ai/core/util/log"

const log = Log.create({ service: "transform" })

type Modality = NonNullable<ModelsDev.Model["modalities"]>["input"][number]

Expand Down Expand Up @@ -251,10 +254,16 @@ function normalizeMessages(
return msgs
}

function applyCaching(msgs: ModelMessage[], model: Provider.Model): ModelMessage[] {
function applyCaching(msgs: ModelMessage[], model: Provider.Model, options?: Record<string, unknown>): ModelMessage[] {
const system = msgs.filter((msg) => msg.role === "system").slice(0, 2)
const final = msgs.filter((msg) => msg.role !== "system").slice(-2)

const ttl = options?.cache_point_ttl as string | undefined
const bedrockCachePoint: Record<string, unknown> = { type: "default" }
if (ttl) {
bedrockCachePoint.ttl = ttl
}

const providerOptions = {
anthropic: {
cacheControl: { type: "ephemeral" },
Expand All @@ -263,7 +272,7 @@ function applyCaching(msgs: ModelMessage[], model: Provider.Model): ModelMessage
cacheControl: { type: "ephemeral" },
},
bedrock: {
cachePoint: { type: "default" },
cachePoint: bedrockCachePoint,
},
openaiCompatible: {
cache_control: { type: "ephemeral" },
Expand Down Expand Up @@ -299,6 +308,12 @@ function applyCaching(msgs: ModelMessage[], model: Provider.Model): ModelMessage
msg.providerOptions = mergeDeep(msg.providerOptions ?? {}, providerOptions)
}

log.info("cache points applied", {
messageCount: unique([...system, ...final]).length,
providerID: model.providerID,
ttl,
})

return msgs
}

Expand Down Expand Up @@ -351,10 +366,12 @@ export function message(msgs: ModelMessage[], model: Provider.Model, options: Re
model.id.includes("anthropic") ||
model.id.includes("claude") ||
model.api.npm === "@ai-sdk/anthropic" ||
model.api.npm === "@ai-sdk/alibaba") &&
model.api.npm === "@ai-sdk/alibaba" ||
model.providerID.includes("bedrock") ||
model.api.npm === "@ai-sdk/amazon-bedrock") &&
model.api.npm !== "@ai-sdk/gateway"
) {
msgs = applyCaching(msgs, model)
msgs = applyCaching(msgs, model, options)
}

// Remap providerOptions keys from stored providerID to expected SDK key
Expand Down
9 changes: 8 additions & 1 deletion packages/opencode/src/session/session.ts
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,7 @@ export const getUsage = (input: { model: Provider.Model; usage: LanguageModelUsa
input.model.cost?.experimentalOver200K && tokens.input + tokens.cache.read > 200_000
? input.model.cost.experimentalOver200K
: input.model.cost
return {
const result = {
cost: safe(
new Decimal(0)
.add(new Decimal(tokens.input).mul(costInfo?.input ?? 0).div(1_000_000))
Expand All @@ -377,6 +377,13 @@ export const getUsage = (input: { model: Provider.Model; usage: LanguageModelUsa
),
tokens,
}
log.info("usage", {
input: tokens.input,
output: tokens.output,
cache_write: tokens.cache.write,
cache_read: tokens.cache.read,
})
return result
}

export class BusyError extends Error {
Expand Down
107 changes: 107 additions & 0 deletions packages/opencode/test/provider/transform.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3331,3 +3331,110 @@ describe("ProviderTransform.variants", () => {
})
})
})

describe("ProviderTransform.message - bedrock cache_point_ttl", () => {
const bedrockModel = {
id: "amazon-bedrock/anthropic.claude-opus-4-6",
providerID: "amazon-bedrock",
api: {
id: "anthropic.claude-opus-4-6",
url: "https://bedrock-runtime.us-east-1.amazonaws.com",
npm: "@ai-sdk/amazon-bedrock",
},
name: "Claude Opus 4.6",
capabilities: {
temperature: true,
reasoning: false,
attachment: true,
toolcall: true,
input: { text: true, audio: false, image: true, video: false, pdf: true },
output: { text: true, audio: false, image: false, video: false, pdf: false },
interleaved: false,
},
cost: {
input: 0.003,
output: 0.015,
cache: { read: 0.0003, write: 0.00375 },
},
limit: {
context: 200000,
output: 8192,
},
status: "active",
options: {},
headers: {},
} as any

test("should add ttl to bedrock cachePoint when cache_point_ttl is set to 5m", () => {
const msgs = [
{
role: "system",
content: "You are a helpful assistant.",
},
{
role: "user",
content: "Hello",
},
] as any[]

const result = ProviderTransform.message(msgs, bedrockModel, { cache_point_ttl: "5m" })

expect(result).toHaveLength(2)
expect(result[0].providerOptions?.bedrock?.cachePoint).toEqual({
type: "default",
ttl: "5m",
})
expect(result[1].providerOptions?.bedrock?.cachePoint).toEqual({
type: "default",
ttl: "5m",
})
})

test("should add ttl to bedrock cachePoint when cache_point_ttl is set to 1h", () => {
const msgs = [
{
role: "system",
content: "You are a helpful assistant.",
},
{
role: "user",
content: "Hello",
},
] as any[]

const result = ProviderTransform.message(msgs, bedrockModel, { cache_point_ttl: "1h" })

expect(result).toHaveLength(2)
expect(result[0].providerOptions?.bedrock?.cachePoint).toEqual({
type: "default",
ttl: "1h",
})
expect(result[1].providerOptions?.bedrock?.cachePoint).toEqual({
type: "default",
ttl: "1h",
})
})

test("should not add ttl to bedrock cachePoint when cache_point_ttl is not set", () => {
const msgs = [
{
role: "system",
content: "You are a helpful assistant.",
},
{
role: "user",
content: "Hello",
},
] as any[]

const result = ProviderTransform.message(msgs, bedrockModel, {})

expect(result).toHaveLength(2)
expect(result[0].providerOptions?.bedrock?.cachePoint).toEqual({
type: "default",
})
expect(result[1].providerOptions?.bedrock?.cachePoint).toEqual({
type: "default",
})
})
})
Loading