diff --git a/bun.lock b/bun.lock
index 77ab24240bb9..64b32feac4eb 100644
--- a/bun.lock
+++ b/bun.lock
@@ -688,7 +688,7 @@
"@tailwindcss/vite": "4.1.11",
"@tsconfig/bun": "1.0.9",
"@tsconfig/node22": "22.0.2",
- "@types/bun": "1.3.11",
+ "@types/bun": "1.3.12",
"@types/cross-spawn": "6.0.6",
"@types/luxon": "3.7.1",
"@types/node": "22.13.9",
@@ -2302,7 +2302,7 @@
"@types/braces": ["@types/braces@3.0.5", "", {}, "sha512-SQFof9H+LXeWNz8wDe7oN5zu7ket0qwMu5vZubW4GCJ8Kkeh6nBWUz87+KTz/G3Kqsrp0j/W253XJb3KMEeg3w=="],
- "@types/bun": ["@types/bun@1.3.11", "", { "dependencies": { "bun-types": "1.3.11" } }, "sha512-5vPne5QvtpjGpsGYXiFyycfpDF2ECyPcTSsFBMa0fraoxiQyMJ3SmuQIGhzPg2WJuWxVBoxWJ2kClYTcw/4fAg=="],
+ "@types/bun": ["@types/bun@1.3.12", "", { "dependencies": { "bun-types": "1.3.12" } }, "sha512-DBv81elK+/VSwXHDlnH3Qduw+KxkTIWi7TXkAeh24zpi5l0B2kUg9Ga3tb4nJaPcOFswflgi/yAvMVBPrxMB+A=="],
"@types/cacache": ["@types/cacache@20.0.1", "", { "dependencies": { "@types/node": "*", "minipass": "*" } }, "sha512-QlKW3AFoFr/hvPHwFHMIVUH/ZCYeetBNou3PCmxu5LaNDvrtBlPJtIA6uhmU9JRt9oxj7IYoqoLcpxtzpPiTcw=="],
@@ -2720,7 +2720,7 @@
"bun-pty": ["bun-pty@0.4.8", "", {}, "sha512-rO70Mrbr13+jxHHHu2YBkk2pNqrJE5cJn29WE++PUr+GFA0hq/VgtQPZANJ8dJo6d7XImvBk37Innt8GM7O28w=="],
- "bun-types": ["bun-types@1.3.11", "", { "dependencies": { "@types/node": "*" } }, "sha512-1KGPpoxQWl9f6wcZh57LvrPIInQMn2TQ7jsgxqpRzg+l0QPOFvJVH7HmvHo/AiPgwXy+/Thf6Ov3EdVn1vOabg=="],
+ "bun-types": ["bun-types@1.3.12", "", { "dependencies": { "@types/node": "*" } }, "sha512-HqOLj5PoFajAQciOMRiIZGNoKxDJSr6qigAttOX40vJuSp6DN/CxWp9s3C1Xwm4oH7ybueITwiaOcWXoYVoRkA=="],
"bun-webgpu": ["bun-webgpu@0.1.5", "", { "dependencies": { "@webgpu/types": "^0.1.60" }, "optionalDependencies": { "bun-webgpu-darwin-arm64": "^0.1.5", "bun-webgpu-darwin-x64": "^0.1.5", "bun-webgpu-linux-x64": "^0.1.5", "bun-webgpu-win32-x64": "^0.1.5" } }, "sha512-91/K6S5whZKX7CWAm9AylhyKrLGRz6BUiiPiM/kXadSnD4rffljCD/q9cNFftm5YXhx4MvLqw33yEilxogJvwA=="],
diff --git a/nix/hashes.json b/nix/hashes.json
index 21279a327d0a..c09604610638 100644
--- a/nix/hashes.json
+++ b/nix/hashes.json
@@ -1,8 +1,8 @@
{
"nodeModules": {
- "x86_64-linux": "sha256-NczRp8MPppkqP8PQfWMUWJ/Wofvf2YVy5m4i22Pi3jg=",
- "aarch64-linux": "sha256-QIxGOu8Fj+sWgc9hKvm1BLiIErxEtd17SPlwZGac9sQ=",
- "aarch64-darwin": "sha256-Rb9qbMM+ARn0iBCaZurwcoUBCplbMXEZwrXVKextp3I=",
- "x86_64-darwin": "sha256-KVxOKkaVV7W+K4reEk14MTLgmtoqwCYDqDNXNeS6ync="
+ "x86_64-linux": "sha256-AgHhYsiygxbsBo3JN4HqHXKAwh8n1qeuSCe2qqxlxW4=",
+ "aarch64-linux": "sha256-h2lpWRQ5EDYnjpqZXtUAp1mxKLQxJ4m8MspgSY8Ev78=",
+ "aarch64-darwin": "sha256-xnd91+WyeAqn06run2ajsekxJvTMiLsnqNPe/rR8VTM=",
+ "x86_64-darwin": "sha256-rXpz45IOjGEk73xhP9VY86eOj2CZBg2l1vzwzTIOOOQ="
}
}
diff --git a/package.json b/package.json
index 06bf9c91aef0..f918bcd025f5 100644
--- a/package.json
+++ b/package.json
@@ -4,7 +4,7 @@
"description": "AI-powered development tool",
"private": true,
"type": "module",
- "packageManager": "bun@1.3.11",
+ "packageManager": "bun@1.3.13",
"scripts": {
"dev": "bun run --cwd packages/opencode --conditions=browser src/index.ts",
"dev:desktop": "bun --cwd packages/desktop-electron dev",
@@ -30,7 +30,7 @@
"@effect/opentelemetry": "4.0.0-beta.48",
"@effect/platform-node": "4.0.0-beta.48",
"@npmcli/arborist": "9.4.0",
- "@types/bun": "1.3.11",
+ "@types/bun": "1.3.12",
"@types/cross-spawn": "6.0.6",
"@octokit/rest": "22.0.0",
"@hono/zod-validator": "0.4.2",
diff --git a/packages/app/src/components/dialog-edit-project.tsx b/packages/app/src/components/dialog-edit-project.tsx
index ea5d70065adc..8eb12daf52e5 100644
--- a/packages/app/src/components/dialog-edit-project.tsx
+++ b/packages/app/src/components/dialog-edit-project.tsx
@@ -12,6 +12,7 @@ import { type LocalProject, getAvatarColors } from "@/context/layout"
import { getFilename } from "@opencode-ai/shared/util/path"
import { Avatar } from "@opencode-ai/ui/avatar"
import { useLanguage } from "@/context/language"
+import { getProjectAvatarSource } from "@/pages/layout/sidebar-items"
const AVATAR_COLOR_KEYS = ["pink", "mint", "orange", "purple", "cyan", "lime"] as const
@@ -26,8 +27,8 @@ export function DialogEditProject(props: { project: LocalProject }) {
const [store, setStore] = createStore({
name: defaultName(),
- color: props.project.icon?.color || "pink",
- iconUrl: props.project.icon?.override || "",
+ color: props.project.icon?.color,
+ iconOverride: props.project.icon?.override,
startup: props.project.commands?.start ?? "",
dragOver: false,
iconHover: false,
@@ -39,7 +40,7 @@ export function DialogEditProject(props: { project: LocalProject }) {
if (!file.type.startsWith("image/")) return
const reader = new FileReader()
reader.onload = (e) => {
- setStore("iconUrl", e.target?.result as string)
+ setStore("iconOverride", e.target?.result as string)
setStore("iconHover", false)
}
reader.readAsDataURL(file)
@@ -68,7 +69,7 @@ export function DialogEditProject(props: { project: LocalProject }) {
}
function clearIcon() {
- setStore("iconUrl", "")
+ setStore("iconOverride", "")
}
const saveMutation = useMutation(() => ({
@@ -81,17 +82,17 @@ export function DialogEditProject(props: { project: LocalProject }) {
projectID: props.project.id,
directory: props.project.worktree,
name,
- icon: { color: store.color, override: store.iconUrl },
+ icon: { color: store.color || "", override: store.iconOverride || "" },
commands: { start },
})
- globalSync.project.icon(props.project.worktree, store.iconUrl || undefined)
+ globalSync.project.icon(props.project.worktree, store.iconOverride || undefined)
dialog.close()
return
}
globalSync.project.meta(props.project.worktree, {
name,
- icon: { color: store.color, override: store.iconUrl || undefined },
+ icon: { color: store.color || undefined, override: store.iconOverride || undefined },
commands: { start: start || undefined },
})
dialog.close()
@@ -130,13 +131,13 @@ export function DialogEditProject(props: { project: LocalProject }) {
classList={{
"border-text-interactive-base bg-surface-info-base/20": store.dragOver,
"border-border-base hover:border-border-strong": !store.dragOver,
- "overflow-hidden": !!store.iconUrl,
+ "overflow-hidden": !!store.iconOverride,
}}
onDrop={handleDrop}
onDragOver={handleDragOver}
onDragLeave={handleDragLeave}
onClick={() => {
- if (store.iconUrl && store.iconHover) {
+ if (store.iconOverride && store.iconHover) {
clearIcon()
} else {
iconInput?.click()
@@ -144,7 +145,11 @@ export function DialogEditProject(props: { project: LocalProject }) {
}}
>
}
>
-
+ {(src) => (
+
+ )}
@@ -174,8 +181,8 @@ export function DialogEditProject(props: { project: LocalProject }) {
@@ -198,7 +205,7 @@ export function DialogEditProject(props: { project: LocalProject }) {
-
+
@@ -215,7 +222,10 @@ export function DialogEditProject(props: { project: LocalProject }) {
"bg-transparent border border-transparent hover:bg-surface-base-hover hover:border-border-weak-base":
store.color !== color,
}}
- onClick={() => setStore("color", color)}
+ onClick={() => {
+ if (store.color === color && !props.project.icon?.url) return
+ setStore("color", store.color === color ? undefined : color)
+ }}
>
{
const globalSync = useGlobalSync()
const notification = useNotification()
@@ -42,11 +50,7 @@ export const ProjectIcon = (props: { project: LocalProject; class?: string; noti
export interface Interface {
readonly init: () => Effect.Effect
readonly status: () => Effect.Effect
- readonly file: (filepath: string) => Effect.Effect
+ readonly file: (filepath: string) => Effect.Effect
}
export class Service extends Context.Service()("@opencode/Format") {}
@@ -70,16 +70,19 @@ export const layer = Layer.effect(
}
}),
)
- return checks.filter((x) => x.cmd).map((x) => ({ item: x.item, cmd: x.cmd! }))
+ return checks
+ .filter((x): x is { item: Formatter.Info; cmd: string[] } => x.cmd !== false)
+ .map((x) => ({ item: x.item, cmd: x.cmd }))
}
function formatFile(filepath: string) {
return Effect.gen(function* () {
log.info("formatting", { file: filepath })
- const ext = path.extname(filepath)
+ const formatters = yield* Effect.promise(() => getFormatter(path.extname(filepath)))
- for (const { item, cmd } of yield* Effect.promise(() => getFormatter(ext))) {
- if (cmd === false) continue
+ if (!formatters.length) return false
+
+ for (const { item, cmd } of formatters) {
log.info("running", { command: cmd })
const replaced = cmd.map((x) => x.replace("$FILE", filepath))
const dir = yield* InstanceState.directory
@@ -113,6 +116,8 @@ export const layer = Layer.effect(
})
}
}
+
+ return true
})
}
@@ -188,7 +193,7 @@ export const layer = Layer.effect(
const file = Effect.fn("Format.file")(function* (filepath: string) {
const { formatFile } = yield* InstanceState.get(state)
- yield* formatFile(filepath)
+ return yield* formatFile(filepath)
})
return Service.of({ init, status, file })
diff --git a/packages/opencode/src/patch/index.ts b/packages/opencode/src/patch/index.ts
index 19e1d7555bb0..3662f9e908ae 100644
--- a/packages/opencode/src/patch/index.ts
+++ b/packages/opencode/src/patch/index.ts
@@ -3,6 +3,7 @@ import * as path from "path"
import * as fs from "fs/promises"
import { readFileSync } from "fs"
import { Log } from "../util"
+import * as Bom from "../util/bom"
const log = Log.create({ service: "patch" })
@@ -305,18 +306,19 @@ export function maybeParseApplyPatch(
interface ApplyPatchFileUpdate {
unified_diff: string
content: string
+ bom: boolean
}
export function deriveNewContentsFromChunks(filePath: string, chunks: UpdateFileChunk[]): ApplyPatchFileUpdate {
// Read original file content
- let originalContent: string
+ let originalContent: ReturnType
try {
- originalContent = readFileSync(filePath, "utf-8")
+ originalContent = Bom.split(readFileSync(filePath, "utf-8"))
} catch (error) {
throw new Error(`Failed to read file ${filePath}: ${error}`, { cause: error })
}
- let originalLines = originalContent.split("\n")
+ let originalLines = originalContent.text.split("\n")
// Drop trailing empty element for consistent line counting
if (originalLines.length > 0 && originalLines[originalLines.length - 1] === "") {
@@ -331,14 +333,16 @@ export function deriveNewContentsFromChunks(filePath: string, chunks: UpdateFile
newLines.push("")
}
- const newContent = newLines.join("\n")
+ const next = Bom.split(newLines.join("\n"))
+ const newContent = next.text
// Generate unified diff
- const unifiedDiff = generateUnifiedDiff(originalContent, newContent)
+ const unifiedDiff = generateUnifiedDiff(originalContent.text, newContent)
return {
unified_diff: unifiedDiff,
content: newContent,
+ bom: originalContent.bom || next.bom,
}
}
@@ -553,13 +557,13 @@ export async function applyHunksToFiles(hunks: Hunk[]): Promise {
await fs.mkdir(moveDir, { recursive: true })
}
- await fs.writeFile(hunk.move_path, fileUpdate.content, "utf-8")
+ await fs.writeFile(hunk.move_path, Bom.join(fileUpdate.content, fileUpdate.bom), "utf-8")
await fs.unlink(hunk.path)
modified.push(hunk.move_path)
log.info(`Moved file: ${hunk.path} -> ${hunk.move_path}`)
} else {
// Regular update
- await fs.writeFile(hunk.path, fileUpdate.content, "utf-8")
+ await fs.writeFile(hunk.path, Bom.join(fileUpdate.content, fileUpdate.bom), "utf-8")
modified.push(hunk.path)
log.info(`Updated file: ${hunk.path}`)
}
diff --git a/packages/opencode/src/plugin/codex.ts b/packages/opencode/src/plugin/codex.ts
index c61cb7850900..84d314f476ff 100644
--- a/packages/opencode/src/plugin/codex.ts
+++ b/packages/opencode/src/plugin/codex.ts
@@ -374,6 +374,7 @@ export async function CodexAuthPlugin(input: PluginInput): Promise {
"gpt-5.3-codex",
"gpt-5.4",
"gpt-5.4-mini",
+ "gpt-5.5",
])
for (const [modelId, model] of Object.entries(provider.models)) {
if (modelId.includes("codex")) continue
diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts
index 1d84c7c93127..7a1b7856a3d5 100644
--- a/packages/opencode/src/provider/transform.ts
+++ b/packages/opencode/src/provider/transform.ts
@@ -61,9 +61,15 @@ function normalizeMessages(
}
if (!Array.isArray(msg.content)) return msg
const filtered = msg.content.filter((part) => {
- if (part.type === "text" || part.type === "reasoning") {
+ if (part.type === "text") {
return part.text !== ""
}
+ if (part.type === "reasoning") {
+ // Keep reasoning parts that carry providerOptions (e.g. redacted_thinking
+ // blocks which have empty text but contain signature/redactedData metadata
+ // that must be preserved for the Anthropic API)
+ return part.text !== "" || part.providerOptions != null
+ }
return true
})
if (filtered.length === 0) return undefined
@@ -116,6 +122,9 @@ function normalizeMessages(
if (msg.role !== "assistant" || !Array.isArray(msg.content)) return [msg]
const parts = msg.content
+ // Don't reorder messages containing reasoning/thinking blocks — Anthropic
+ // requires thinking and redacted_thinking blocks to remain exactly as returned.
+ if (parts.some((part) => part.type === "reasoning")) return [msg]
const first = parts.findIndex((part) => part.type === "tool-call")
if (first === -1) return [msg]
if (!parts.slice(first).some((part) => part.type !== "tool-call")) return [msg]
@@ -246,13 +255,20 @@ function applyCaching(msgs: ModelMessage[], model: Provider.Model): ModelMessage
const shouldUseContentOptions = !useMessageLevelOptions && Array.isArray(msg.content) && msg.content.length > 0
if (shouldUseContentOptions) {
- const lastContent = msg.content[msg.content.length - 1]
- if (
- lastContent &&
- typeof lastContent === "object" &&
- lastContent.type !== "tool-approval-request" &&
- lastContent.type !== "tool-approval-response"
- ) {
+ // Find the last content block that is safe to annotate with cache hints.
+ // Reasoning blocks must not be modified — Anthropic requires thinking and
+ // redacted_thinking blocks to remain exactly as returned in prior responses.
+ const lastContent = [...msg.content]
+ .reverse()
+ .find(
+ (part) =>
+ part &&
+ typeof part === "object" &&
+ part.type !== "tool-approval-request" &&
+ part.type !== "tool-approval-response" &&
+ part.type !== "reasoning",
+ )
+ if (lastContent) {
lastContent.providerOptions = mergeDeep(lastContent.providerOptions ?? {}, providerOptions)
continue
}
diff --git a/packages/opencode/src/tool/apply_patch.ts b/packages/opencode/src/tool/apply_patch.ts
index 7da7dd255c52..a4cf1e853f3c 100644
--- a/packages/opencode/src/tool/apply_patch.ts
+++ b/packages/opencode/src/tool/apply_patch.ts
@@ -14,6 +14,7 @@ import { AppFileSystem } from "@opencode-ai/shared/filesystem"
import DESCRIPTION from "./apply_patch.txt"
import { File } from "../file"
import { Format } from "../format"
+import * as Bom from "@/util/bom"
const PatchParams = z.object({
patchText: z.string().describe("The full patch text that describes all changes to be made"),
@@ -59,6 +60,7 @@ export const ApplyPatchTool = Tool.define(
diff: string
additions: number
deletions: number
+ bom: boolean
}> = []
let totalDiff = ""
@@ -72,11 +74,12 @@ export const ApplyPatchTool = Tool.define(
const oldContent = ""
const newContent =
hunk.contents.length === 0 || hunk.contents.endsWith("\n") ? hunk.contents : `${hunk.contents}\n`
- const diff = trimDiff(createTwoFilesPatch(filePath, filePath, oldContent, newContent))
+ const next = Bom.split(newContent)
+ const diff = trimDiff(createTwoFilesPatch(filePath, filePath, oldContent, next.text))
let additions = 0
let deletions = 0
- for (const change of diffLines(oldContent, newContent)) {
+ for (const change of diffLines(oldContent, next.text)) {
if (change.added) additions += change.count || 0
if (change.removed) deletions += change.count || 0
}
@@ -84,11 +87,12 @@ export const ApplyPatchTool = Tool.define(
fileChanges.push({
filePath,
oldContent,
- newContent,
+ newContent: next.text,
type: "add",
diff,
additions,
deletions,
+ bom: next.bom,
})
totalDiff += diff + "\n"
@@ -104,13 +108,16 @@ export const ApplyPatchTool = Tool.define(
)
}
- const oldContent = yield* afs.readFileString(filePath)
+ const source = yield* Bom.readFile(afs, filePath)
+ const oldContent = source.text
let newContent = oldContent
+ let bom = source.bom
// Apply the update chunks to get new content
try {
const fileUpdate = Patch.deriveNewContentsFromChunks(filePath, hunk.chunks)
newContent = fileUpdate.content
+ bom = fileUpdate.bom
} catch (error) {
return yield* Effect.fail(new Error(`apply_patch verification failed: ${error}`))
}
@@ -136,6 +143,7 @@ export const ApplyPatchTool = Tool.define(
diff,
additions,
deletions,
+ bom,
})
totalDiff += diff + "\n"
@@ -143,17 +151,16 @@ export const ApplyPatchTool = Tool.define(
}
case "delete": {
- const contentToDelete = yield* afs
- .readFileString(filePath)
- .pipe(
- Effect.catch((error) =>
- Effect.fail(
- new Error(
- `apply_patch verification failed: ${error instanceof Error ? error.message : String(error)}`,
- ),
+ const source = yield* Bom.readFile(afs, filePath).pipe(
+ Effect.catch((error) =>
+ Effect.fail(
+ new Error(
+ `apply_patch verification failed: ${error instanceof Error ? error.message : String(error)}`,
),
),
- )
+ ),
+ )
+ const contentToDelete = source.text
const deleteDiff = trimDiff(createTwoFilesPatch(filePath, filePath, contentToDelete, ""))
const deletions = contentToDelete.split("\n").length
@@ -166,6 +173,7 @@ export const ApplyPatchTool = Tool.define(
diff: deleteDiff,
additions: 0,
deletions,
+ bom: source.bom,
})
totalDiff += deleteDiff + "\n"
@@ -207,12 +215,12 @@ export const ApplyPatchTool = Tool.define(
case "add":
// Create parent directories (recursive: true is safe on existing/root dirs)
- yield* afs.writeWithDirs(change.filePath, change.newContent)
+ yield* afs.writeWithDirs(change.filePath, Bom.join(change.newContent, change.bom))
updates.push({ file: change.filePath, event: "add" })
break
case "update":
- yield* afs.writeWithDirs(change.filePath, change.newContent)
+ yield* afs.writeWithDirs(change.filePath, Bom.join(change.newContent, change.bom))
updates.push({ file: change.filePath, event: "change" })
break
@@ -220,7 +228,7 @@ export const ApplyPatchTool = Tool.define(
if (change.movePath) {
// Create parent directories (recursive: true is safe on existing/root dirs)
- yield* afs.writeWithDirs(change.movePath!, change.newContent)
+ yield* afs.writeWithDirs(change.movePath!, Bom.join(change.newContent, change.bom))
yield* afs.remove(change.filePath)
updates.push({ file: change.filePath, event: "unlink" })
updates.push({ file: change.movePath, event: "add" })
@@ -234,7 +242,9 @@ export const ApplyPatchTool = Tool.define(
}
if (edited) {
- yield* format.file(edited)
+ if (yield* format.file(edited)) {
+ yield* Bom.syncFile(afs, edited, change.bom)
+ }
yield* bus.publish(File.Event.Edited, { file: edited })
}
}
diff --git a/packages/opencode/src/tool/edit.ts b/packages/opencode/src/tool/edit.ts
index 2c6c2c13084a..858d14e043fe 100644
--- a/packages/opencode/src/tool/edit.ts
+++ b/packages/opencode/src/tool/edit.ts
@@ -18,6 +18,7 @@ import { Instance } from "../project/instance"
import { Snapshot } from "@/snapshot"
import { assertExternalDirectoryEffect } from "./external-directory"
import { AppFileSystem } from "@opencode-ai/shared/filesystem"
+import * as Bom from "@/util/bom"
function normalizeLineEndings(text: string): string {
return text.replaceAll("\r\n", "\n")
@@ -84,7 +85,11 @@ export const EditTool = Tool.define(
Effect.gen(function* () {
if (params.oldString === "") {
const existed = yield* afs.existsSafe(filePath)
- contentNew = params.newString
+ const source = existed ? yield* Bom.readFile(afs, filePath) : { bom: false, text: "" }
+ const next = Bom.split(params.newString)
+ const desiredBom = source.bom || next.bom
+ contentOld = source.text
+ contentNew = next.text
diff = trimDiff(createTwoFilesPatch(filePath, filePath, contentOld, contentNew))
yield* ctx.ask({
permission: "edit",
@@ -95,8 +100,10 @@ export const EditTool = Tool.define(
diff,
},
})
- yield* afs.writeWithDirs(filePath, params.newString)
- yield* format.file(filePath)
+ yield* afs.writeWithDirs(filePath, Bom.join(contentNew, desiredBom))
+ if (yield* format.file(filePath)) {
+ contentNew = yield* Bom.syncFile(afs, filePath, desiredBom)
+ }
yield* bus.publish(File.Event.Edited, { file: filePath })
yield* bus.publish(FileWatcher.Event.Updated, {
file: filePath,
@@ -108,13 +115,16 @@ export const EditTool = Tool.define(
const info = yield* afs.stat(filePath).pipe(Effect.catch(() => Effect.succeed(undefined)))
if (!info) throw new Error(`File ${filePath} not found`)
if (info.type === "Directory") throw new Error(`Path is a directory, not a file: ${filePath}`)
- contentOld = yield* afs.readFileString(filePath)
+ const source = yield* Bom.readFile(afs, filePath)
+ contentOld = source.text
const ending = detectLineEnding(contentOld)
const old = convertToLineEnding(normalizeLineEndings(params.oldString), ending)
- const next = convertToLineEnding(normalizeLineEndings(params.newString), ending)
+ const replacement = convertToLineEnding(normalizeLineEndings(params.newString), ending)
- contentNew = replace(contentOld, old, next, params.replaceAll)
+ const next = Bom.split(replace(contentOld, old, replacement, params.replaceAll))
+ const desiredBom = source.bom || next.bom
+ contentNew = next.text
diff = trimDiff(
createTwoFilesPatch(
@@ -134,14 +144,15 @@ export const EditTool = Tool.define(
},
})
- yield* afs.writeWithDirs(filePath, contentNew)
- yield* format.file(filePath)
+ yield* afs.writeWithDirs(filePath, Bom.join(contentNew, desiredBom))
+ if (yield* format.file(filePath)) {
+ contentNew = yield* Bom.syncFile(afs, filePath, desiredBom)
+ }
yield* bus.publish(File.Event.Edited, { file: filePath })
yield* bus.publish(FileWatcher.Event.Updated, {
file: filePath,
event: "change",
})
- contentNew = yield* afs.readFileString(filePath)
diff = trimDiff(
createTwoFilesPatch(
filePath,
diff --git a/packages/opencode/src/tool/write.ts b/packages/opencode/src/tool/write.ts
index 741091b21d3c..79ed58519831 100644
--- a/packages/opencode/src/tool/write.ts
+++ b/packages/opencode/src/tool/write.ts
@@ -13,6 +13,7 @@ import { AppFileSystem } from "@opencode-ai/shared/filesystem"
import { Instance } from "../project/instance"
import { trimDiff } from "./edit"
import { assertExternalDirectoryEffect } from "./external-directory"
+import * as Bom from "@/util/bom"
const MAX_PROJECT_DIAGNOSTICS_FILES = 5
@@ -38,9 +39,13 @@ export const WriteTool = Tool.define(
yield* assertExternalDirectoryEffect(ctx, filepath)
const exists = yield* fs.existsSafe(filepath)
- const contentOld = exists ? yield* fs.readFileString(filepath) : ""
+ const source = exists ? yield* Bom.readFile(fs, filepath) : { bom: false, text: "" }
+ const next = Bom.split(params.content)
+ const desiredBom = source.bom || next.bom
+ const contentOld = source.text
+ const contentNew = next.text
- const diff = trimDiff(createTwoFilesPatch(filepath, filepath, contentOld, params.content))
+ const diff = trimDiff(createTwoFilesPatch(filepath, filepath, contentOld, contentNew))
yield* ctx.ask({
permission: "edit",
patterns: [path.relative(Instance.worktree, filepath)],
@@ -51,8 +56,10 @@ export const WriteTool = Tool.define(
},
})
- yield* fs.writeWithDirs(filepath, params.content)
- yield* format.file(filepath)
+ yield* fs.writeWithDirs(filepath, Bom.join(contentNew, desiredBom))
+ if (yield* format.file(filepath)) {
+ yield* Bom.syncFile(fs, filepath, desiredBom)
+ }
yield* bus.publish(File.Event.Edited, { file: filepath })
yield* bus.publish(FileWatcher.Event.Updated, {
file: filepath,
diff --git a/packages/opencode/src/util/bom.ts b/packages/opencode/src/util/bom.ts
new file mode 100644
index 000000000000..484228f3d415
--- /dev/null
+++ b/packages/opencode/src/util/bom.ts
@@ -0,0 +1,31 @@
+import { Effect } from "effect"
+import { AppFileSystem } from "@opencode-ai/shared/filesystem"
+
+const BOM_CODE = 0xfeff
+const BOM = String.fromCharCode(BOM_CODE)
+
+export function split(text: string) {
+ if (text.charCodeAt(0) !== BOM_CODE) return { bom: false, text }
+ return { bom: true, text: text.slice(1) }
+}
+
+export function join(text: string, bom: boolean) {
+ const stripped = split(text).text
+ if (!bom) return stripped
+ return BOM + stripped
+}
+
+export const readFile = Effect.fn("Bom.readFile")(function* (fs: AppFileSystem.Interface, filePath: string) {
+ return split(new TextDecoder("utf-8", { ignoreBOM: true }).decode(yield* fs.readFile(filePath)))
+})
+
+export const syncFile = Effect.fn("Bom.syncFile")(function* (
+ fs: AppFileSystem.Interface,
+ filePath: string,
+ bom: boolean,
+) {
+ const current = yield* readFile(fs, filePath)
+ if (current.bom === bom) return current.text
+ yield* fs.writeWithDirs(filePath, join(current.text, bom))
+ return current.text
+})
diff --git a/packages/opencode/test/effect/cross-spawn-spawner.test.ts b/packages/opencode/test/effect/cross-spawn-spawner.test.ts
index 5990635aa211..b4e52529c1de 100644
--- a/packages/opencode/test/effect/cross-spawn-spawner.test.ts
+++ b/packages/opencode/test/effect/cross-spawn-spawner.test.ts
@@ -169,7 +169,9 @@ describe("cross-spawn spawner", () => {
'process.stderr.write("stderr\\n", done)',
].join("\n"),
)
- const [stdout, stderr] = yield* Effect.all([decodeByteStream(handle.stdout), decodeByteStream(handle.stderr)])
+ const [stdout, stderr] = yield* Effect.all([decodeByteStream(handle.stdout), decodeByteStream(handle.stderr)], {
+ concurrency: 2,
+ })
expect(stdout).toBe("stdout")
expect(stderr).toBe("stderr")
}),
diff --git a/packages/opencode/test/format/format.test.ts b/packages/opencode/test/format/format.test.ts
index 5530e195b268..2f6f235aa165 100644
--- a/packages/opencode/test/format/format.test.ts
+++ b/packages/opencode/test/format/format.test.ts
@@ -126,6 +126,24 @@ describe("Format", () => {
it.live("service initializes without error", () => provideTmpdirInstance(() => Format.Service.use(() => Effect.void)))
+ it.live("file() returns false when no formatter runs", () =>
+ provideTmpdirInstance(
+ (dir) =>
+ Effect.gen(function* () {
+ const file = `${dir}/test.txt`
+ yield* Effect.promise(() => Bun.write(file, "x"))
+
+ const formatted = yield* Format.Service.use((fmt) => fmt.file(file))
+ expect(formatted).toBe(false)
+ }),
+ {
+ config: {
+ formatter: false,
+ },
+ },
+ ),
+ )
+
it.live("status() initializes formatter state per directory", () =>
Effect.gen(function* () {
const a = yield* provideTmpdirInstance(() => Format.Service.use((fmt) => fmt.status()), {
@@ -219,7 +237,7 @@ describe("Format", () => {
yield* Format.Service.use((fmt) =>
Effect.gen(function* () {
yield* fmt.init()
- yield* fmt.file(file)
+ expect(yield* fmt.file(file)).toBe(true)
}),
)
@@ -229,11 +247,21 @@ describe("Format", () => {
config: {
formatter: {
first: {
- command: ["sh", "-c", 'sleep 0.05; v=$(cat "$1"); printf \'%sA\' "$v" > "$1"', "sh", "$FILE"],
+ command: [
+ "node",
+ "-e",
+ "const fs = require('fs'); const file = process.argv[1]; fs.writeFileSync(file, fs.readFileSync(file, 'utf8') + 'A')",
+ "$FILE",
+ ],
extensions: [".seq"],
},
second: {
- command: ["sh", "-c", 'v=$(cat "$1"); printf \'%sB\' "$v" > "$1"', "sh", "$FILE"],
+ command: [
+ "node",
+ "-e",
+ "const fs = require('fs'); const file = process.argv[1]; fs.writeFileSync(file, fs.readFileSync(file, 'utf8') + 'B')",
+ "$FILE",
+ ],
extensions: [".seq"],
},
},
diff --git a/packages/opencode/test/provider/transform.test.ts b/packages/opencode/test/provider/transform.test.ts
index 791fcdedc676..db40bc2b1535 100644
--- a/packages/opencode/test/provider/transform.test.ts
+++ b/packages/opencode/test/provider/transform.test.ts
@@ -3148,3 +3148,441 @@ describe("ProviderTransform.variants", () => {
})
})
})
+describe("ProviderTransform.message - preserve redacted_thinking blocks", () => {
+ const anthropicModel = {
+ id: "anthropic/claude-sonnet-4",
+ providerID: "anthropic",
+ api: {
+ id: "claude-sonnet-4-20250514",
+ url: "https://api.anthropic.com",
+ npm: "@ai-sdk/anthropic",
+ },
+ name: "Claude Sonnet 4",
+ capabilities: {
+ temperature: true,
+ reasoning: true,
+ attachment: true,
+ toolcall: true,
+ input: { text: true, audio: false, image: true, video: false, pdf: true },
+ output: { text: true, audio: false, image: false, video: false, pdf: false },
+ interleaved: true,
+ },
+ cost: {
+ input: 0.003,
+ output: 0.015,
+ cache: { read: 0.0003, write: 0.00375 },
+ },
+ limit: {
+ context: 200000,
+ output: 8192,
+ },
+ status: "active",
+ options: {},
+ headers: {},
+ } as any
+
+ test("preserves redacted_thinking blocks (empty text with providerOptions)", () => {
+ const msgs = [
+ {
+ role: "assistant",
+ content: [
+ {
+ type: "reasoning",
+ text: "Let me think about this...",
+ providerOptions: { anthropic: { signature: "sig_abc123" } },
+ },
+ {
+ type: "reasoning",
+ text: "",
+ providerOptions: { anthropic: { redactedData: "opaque_encrypted_data_blob" } },
+ },
+ { type: "text", text: "Here is my answer." },
+ ],
+ },
+ ] as any[]
+
+ const result = ProviderTransform.message(msgs, anthropicModel, {})
+
+ expect(result).toHaveLength(1)
+ expect(result[0].content).toHaveLength(3)
+ expect(result[0].content[0]).toEqual({
+ type: "reasoning",
+ text: "Let me think about this...",
+ providerOptions: { anthropic: { signature: "sig_abc123" } },
+ })
+ expect(result[0].content[1]).toEqual({
+ type: "reasoning",
+ text: "",
+ providerOptions: { anthropic: { redactedData: "opaque_encrypted_data_blob" } },
+ })
+ expect(result[0].content[2]).toEqual({ type: "text", text: "Here is my answer." })
+ })
+
+ test("still filters empty reasoning parts without providerOptions", () => {
+ const msgs = [
+ {
+ role: "assistant",
+ content: [
+ { type: "reasoning", text: "" },
+ { type: "text", text: "Answer" },
+ ],
+ },
+ ] as any[]
+
+ const result = ProviderTransform.message(msgs, anthropicModel, {})
+
+ expect(result).toHaveLength(1)
+ expect(result[0].content).toHaveLength(1)
+ expect(result[0].content[0]).toEqual({ type: "text", text: "Answer" })
+ })
+
+ test("preserves multiple redacted_thinking blocks interleaved with thinking blocks", () => {
+ const msgs = [
+ {
+ role: "assistant",
+ content: [
+ {
+ type: "reasoning",
+ text: "First thought",
+ providerOptions: { anthropic: { signature: "sig_1" } },
+ },
+ {
+ type: "reasoning",
+ text: "",
+ providerOptions: { anthropic: { redactedData: "redacted_1" } },
+ },
+ {
+ type: "reasoning",
+ text: "Second thought",
+ providerOptions: { anthropic: { signature: "sig_2" } },
+ },
+ {
+ type: "reasoning",
+ text: "",
+ providerOptions: { anthropic: { redactedData: "redacted_2" } },
+ },
+ { type: "text", text: "Final answer" },
+ ],
+ },
+ ] as any[]
+
+ const result = ProviderTransform.message(msgs, anthropicModel, {})
+
+ expect(result).toHaveLength(1)
+ expect(result[0].content).toHaveLength(5)
+ expect(result[0].content[1].type).toBe("reasoning")
+ expect(result[0].content[1].text).toBe("")
+ expect(result[0].content[1].providerOptions.anthropic.redactedData).toBe("redacted_1")
+ expect(result[0].content[3].type).toBe("reasoning")
+ expect(result[0].content[3].text).toBe("")
+ expect(result[0].content[3].providerOptions.anthropic.redactedData).toBe("redacted_2")
+ })
+
+ test("preserves redacted_thinking on bedrock provider", () => {
+ const bedrockModel = {
+ ...anthropicModel,
+ id: "amazon-bedrock/anthropic.claude-sonnet-4",
+ providerID: "amazon-bedrock",
+ api: {
+ id: "anthropic.claude-sonnet-4",
+ url: "https://bedrock-runtime.us-east-1.amazonaws.com",
+ npm: "@ai-sdk/amazon-bedrock",
+ },
+ }
+
+ const msgs = [
+ {
+ role: "assistant",
+ content: [
+ {
+ type: "reasoning",
+ text: "",
+ providerOptions: { anthropic: { redactedData: "opaque_data" } },
+ },
+ { type: "text", text: "Answer" },
+ ],
+ },
+ ] as any[]
+
+ const result = ProviderTransform.message(msgs, bedrockModel, {})
+
+ expect(result).toHaveLength(1)
+ expect(result[0].content).toHaveLength(2)
+ expect(result[0].content[0]).toEqual({
+ type: "reasoning",
+ text: "",
+ providerOptions: { anthropic: { redactedData: "opaque_data" } },
+ })
+ })
+})
+
+describe("ProviderTransform.message - preserve thinking blocks during tool-use reordering", () => {
+ const anthropicModel = {
+ id: "anthropic/claude-sonnet-4",
+ providerID: "anthropic",
+ api: {
+ id: "claude-sonnet-4-20250514",
+ url: "https://api.anthropic.com",
+ npm: "@ai-sdk/anthropic",
+ },
+ name: "Claude Sonnet 4",
+ capabilities: {
+ temperature: true,
+ reasoning: true,
+ attachment: true,
+ toolcall: true,
+ input: { text: true, audio: false, image: true, video: false, pdf: true },
+ output: { text: true, audio: false, image: false, video: false, pdf: false },
+ interleaved: true,
+ },
+ cost: {
+ input: 0.003,
+ output: 0.015,
+ cache: { read: 0.0003, write: 0.00375 },
+ },
+ limit: {
+ context: 200000,
+ output: 8192,
+ },
+ status: "active",
+ options: {},
+ headers: {},
+ } as any
+
+ test("does not split assistant message when reasoning blocks are present with trailing tool calls", () => {
+ const msgs = [
+ {
+ role: "assistant",
+ content: [
+ {
+ type: "reasoning",
+ text: "I need to check the file...",
+ providerOptions: { anthropic: { signature: "sig_abc" } },
+ },
+ { type: "tool-call", toolCallId: "toolu_1", toolName: "read", input: { filePath: "/root" } },
+ { type: "text", text: "Let me check that for you." },
+ ],
+ },
+ ] as any[]
+
+ const result = ProviderTransform.message(msgs, anthropicModel, {})
+
+ // Should NOT be split — message must remain intact to preserve thinking block positions
+ expect(result).toHaveLength(1)
+ expect(result[0].content).toHaveLength(3)
+ expect(result[0].content[0].type).toBe("reasoning")
+ expect(result[0].content[1].type).toBe("tool-call")
+ expect(result[0].content[2].type).toBe("text")
+ })
+
+ test("does not split when redacted_thinking blocks are present", () => {
+ const msgs = [
+ {
+ role: "assistant",
+ content: [
+ {
+ type: "reasoning",
+ text: "",
+ providerOptions: { anthropic: { redactedData: "opaque_data" } },
+ },
+ { type: "tool-call", toolCallId: "toolu_1", toolName: "bash", input: { command: "ls" } },
+ { type: "text", text: "Done." },
+ ],
+ },
+ ] as any[]
+
+ const result = ProviderTransform.message(msgs, anthropicModel, {})
+
+ expect(result).toHaveLength(1)
+ expect(result[0].content).toHaveLength(3)
+ })
+
+ test("still splits messages without reasoning blocks", () => {
+ const msgs = [
+ {
+ role: "assistant",
+ content: [
+ { type: "tool-call", toolCallId: "toolu_1", toolName: "read", input: { filePath: "/root" } },
+ { type: "text", text: "I checked the file." },
+ ],
+ },
+ ] as any[]
+
+ const result = ProviderTransform.message(msgs, anthropicModel, {})
+
+ // Should still split when no reasoning blocks present
+ expect(result).toHaveLength(2)
+ expect(result[0].content).toEqual([{ type: "text", text: "I checked the file." }])
+ expect(result[1].content).toEqual([
+ { type: "tool-call", toolCallId: "toolu_1", toolName: "read", input: { filePath: "/root" } },
+ ])
+ })
+
+ test("works on vertex anthropic with reasoning blocks", () => {
+ const vertexModel = {
+ ...anthropicModel,
+ providerID: "google-vertex-anthropic",
+ api: {
+ id: "claude-sonnet-4@20250514",
+ url: "https://us-central1-aiplatform.googleapis.com",
+ npm: "@ai-sdk/google-vertex/anthropic",
+ },
+ }
+
+ const msgs = [
+ {
+ role: "assistant",
+ content: [
+ {
+ type: "reasoning",
+ text: "Thinking...",
+ providerOptions: { anthropic: { signature: "sig_xyz" } },
+ },
+ { type: "tool-call", toolCallId: "toolu_1", toolName: "read", input: { filePath: "/tmp" } },
+ { type: "text", text: "Here are the results." },
+ ],
+ },
+ ] as any[]
+
+ const result = ProviderTransform.message(msgs, vertexModel, {})
+
+ expect(result).toHaveLength(1)
+ expect(result[0].content).toHaveLength(3)
+ })
+})
+
+describe("ProviderTransform.message - cache control skips reasoning blocks", () => {
+ test("cache hint is applied to non-reasoning block when reasoning is last", () => {
+ // This tests providers where cache control is applied at the content-part level
+ // (not message level), e.g. openrouter routing to Anthropic
+ const openrouterModel = {
+ id: "openrouter/anthropic/claude-sonnet-4",
+ providerID: "openrouter",
+ api: {
+ id: "anthropic/claude-sonnet-4",
+ url: "https://openrouter.ai/api/v1",
+ npm: "@openrouter/ai-sdk-provider",
+ },
+ name: "Claude Sonnet 4",
+ capabilities: {
+ temperature: true,
+ reasoning: true,
+ attachment: true,
+ toolcall: true,
+ input: { text: true, audio: false, image: true, video: false, pdf: true },
+ output: { text: true, audio: false, image: false, video: false, pdf: false },
+ interleaved: true,
+ },
+ cost: {
+ input: 0.003,
+ output: 0.015,
+ cache: { read: 0.0003, write: 0.00375 },
+ },
+ limit: {
+ context: 200000,
+ output: 8192,
+ },
+ status: "active",
+ options: {},
+ headers: {},
+ } as any
+
+ const msgs = [
+ {
+ role: "system",
+ content: [{ type: "text", text: "You are a helpful assistant." }],
+ },
+ {
+ role: "user",
+ content: [{ type: "text", text: "Hello" }],
+ },
+ {
+ role: "assistant",
+ content: [
+ { type: "text", text: "I'll help you." },
+ {
+ type: "reasoning",
+ text: "Thinking about next steps...",
+ providerOptions: { anthropic: { signature: "sig_123" } },
+ },
+ ],
+ },
+ ] as any[]
+
+ const result = ProviderTransform.message(msgs, openrouterModel, {}) as any[]
+
+ // The last message is in the "final" set for caching.
+ // Cache hint should be on the text block, NOT the reasoning block.
+ const assistantMsg = result[result.length - 1]
+ const reasoningPart = assistantMsg.content.find((p: any) => p.type === "reasoning")
+ const textPart = assistantMsg.content.find((p: any) => p.type === "text")
+
+ // Reasoning block should not have cache control added
+ expect(reasoningPart.providerOptions?.anthropic?.cacheControl).toBeUndefined()
+ expect(reasoningPart.providerOptions?.openrouter?.cacheControl).toBeUndefined()
+ // The text block should receive the cache hint instead
+ expect(textPart.providerOptions).toBeDefined()
+ })
+
+ test("cache hint falls back to message level when all content blocks are reasoning", () => {
+ const openrouterModel = {
+ id: "openrouter/anthropic/claude-sonnet-4",
+ providerID: "openrouter",
+ api: {
+ id: "anthropic/claude-sonnet-4",
+ url: "https://openrouter.ai/api/v1",
+ npm: "@openrouter/ai-sdk-provider",
+ },
+ name: "Claude Sonnet 4",
+ capabilities: {
+ temperature: true,
+ reasoning: true,
+ attachment: true,
+ toolcall: true,
+ input: { text: true, audio: false, image: true, video: false, pdf: true },
+ output: { text: true, audio: false, image: false, video: false, pdf: false },
+ interleaved: true,
+ },
+ cost: {
+ input: 0.003,
+ output: 0.015,
+ cache: { read: 0.0003, write: 0.00375 },
+ },
+ limit: {
+ context: 200000,
+ output: 8192,
+ },
+ status: "active",
+ options: {},
+ headers: {},
+ } as any
+
+ const msgs = [
+ {
+ role: "system",
+ content: [{ type: "text", text: "You are a helpful assistant." }],
+ },
+ {
+ role: "assistant",
+ content: [
+ {
+ type: "reasoning",
+ text: "Only reasoning here...",
+ providerOptions: { anthropic: { signature: "sig_only" } },
+ },
+ ],
+ },
+ ] as any[]
+
+ const result = ProviderTransform.message(msgs, openrouterModel, {}) as any[]
+
+ // When no suitable content block is found, cache hint should go to message level
+ const assistantMsg = result[result.length - 1]
+ const reasoningPart = assistantMsg.content[0]
+
+ // Reasoning block must NOT be modified
+ expect(reasoningPart.providerOptions?.openrouter?.cacheControl).toBeUndefined()
+ // Cache falls back to message-level providerOptions
+ expect(assistantMsg.providerOptions).toBeDefined()
+ })
+})
diff --git a/packages/opencode/test/tool/apply_patch.test.ts b/packages/opencode/test/tool/apply_patch.test.ts
index ebfa9a531eec..fa88432136a5 100644
--- a/packages/opencode/test/tool/apply_patch.test.ts
+++ b/packages/opencode/test/tool/apply_patch.test.ts
@@ -195,6 +195,35 @@ describe("tool.apply_patch freeform", () => {
})
})
+ test("does not invent a first-line diff for BOM files", async () => {
+ await using fixture = await tmpdir()
+ const { ctx, calls } = makeCtx()
+
+ await Instance.provide({
+ directory: fixture.path,
+ fn: async () => {
+ const bom = String.fromCharCode(0xfeff)
+ const target = path.join(fixture.path, "example.cs")
+ await fs.writeFile(target, `${bom}using System;\n\nclass Test {}\n`, "utf-8")
+
+ const patchText =
+ "*** Begin Patch\n*** Update File: example.cs\n@@\n class Test {}\n+class Next {}\n*** End Patch"
+
+ await execute({ patchText }, ctx)
+
+ expect(calls.length).toBe(1)
+ const shown = calls[0].metadata.files[0]?.patch ?? ""
+ expect(shown).not.toContain(bom)
+ expect(shown).not.toContain("-using System;")
+ expect(shown).not.toContain("+using System;")
+
+ const content = await fs.readFile(target, "utf-8")
+ expect(content.charCodeAt(0)).toBe(0xfeff)
+ expect(content.slice(1)).toBe("using System;\n\nclass Test {}\nclass Next {}\n")
+ },
+ })
+ })
+
test("inserts lines with insert-only hunk", async () => {
await using fixture = await tmpdir()
const { ctx } = makeCtx()
diff --git a/packages/opencode/test/tool/edit.test.ts b/packages/opencode/test/tool/edit.test.ts
index b5fbc0a67dde..82e1b4a7fd4b 100644
--- a/packages/opencode/test/tool/edit.test.ts
+++ b/packages/opencode/test/tool/edit.test.ts
@@ -96,6 +96,37 @@ describe("tool.edit", () => {
})
})
+ test("preserves BOM when oldString is empty on existing files", async () => {
+ await using tmp = await tmpdir()
+ const filepath = path.join(tmp.path, "existing.cs")
+ const bom = String.fromCharCode(0xfeff)
+ await fs.writeFile(filepath, `${bom}using System;\n`, "utf-8")
+
+ await Instance.provide({
+ directory: tmp.path,
+ fn: async () => {
+ const edit = await resolve()
+ const result = await Effect.runPromise(
+ edit.execute(
+ {
+ filePath: filepath,
+ oldString: "",
+ newString: "using Up;\n",
+ },
+ ctx,
+ ),
+ )
+
+ expect(result.metadata.diff).toContain("-using System;")
+ expect(result.metadata.diff).toContain("+using Up;")
+
+ const content = await fs.readFile(filepath, "utf-8")
+ expect(content.charCodeAt(0)).toBe(0xfeff)
+ expect(content.slice(1)).toBe("using Up;\n")
+ },
+ })
+ })
+
test("creates new file with nested directories", async () => {
await using tmp = await tmpdir()
const filepath = path.join(tmp.path, "nested", "dir", "file.txt")
@@ -183,6 +214,38 @@ describe("tool.edit", () => {
})
})
+ test("replaces the first visible line in BOM files", async () => {
+ await using tmp = await tmpdir()
+ const filepath = path.join(tmp.path, "existing.cs")
+ const bom = String.fromCharCode(0xfeff)
+ await fs.writeFile(filepath, `${bom}using System;\nclass Test {}\n`, "utf-8")
+
+ await Instance.provide({
+ directory: tmp.path,
+ fn: async () => {
+ const edit = await resolve()
+ const result = await Effect.runPromise(
+ edit.execute(
+ {
+ filePath: filepath,
+ oldString: "using System;",
+ newString: "using Up;",
+ },
+ ctx,
+ ),
+ )
+
+ expect(result.metadata.diff).toContain("-using System;")
+ expect(result.metadata.diff).toContain("+using Up;")
+ expect(result.metadata.diff).not.toContain(bom)
+
+ const content = await fs.readFile(filepath, "utf-8")
+ expect(content.charCodeAt(0)).toBe(0xfeff)
+ expect(content.slice(1)).toBe("using Up;\nclass Test {}\n")
+ },
+ })
+ })
+
test("throws error when file does not exist", async () => {
await using tmp = await tmpdir()
const filepath = path.join(tmp.path, "nonexistent.txt")
diff --git a/packages/opencode/test/tool/write.test.ts b/packages/opencode/test/tool/write.test.ts
index 50d3b57527f9..36131f9596a3 100644
--- a/packages/opencode/test/tool/write.test.ts
+++ b/packages/opencode/test/tool/write.test.ts
@@ -114,6 +114,54 @@ describe("tool.write", () => {
),
)
+ it.live("preserves BOM when overwriting existing files", () =>
+ provideTmpdirInstance((dir) =>
+ Effect.gen(function* () {
+ const filepath = path.join(dir, "existing.cs")
+ const bom = String.fromCharCode(0xfeff)
+ yield* Effect.promise(() => fs.writeFile(filepath, `${bom}using System;\n`, "utf-8"))
+
+ yield* run({ filePath: filepath, content: "using Up;\n" })
+
+ const content = yield* Effect.promise(() => fs.readFile(filepath, "utf-8"))
+ expect(content.charCodeAt(0)).toBe(0xfeff)
+ expect(content.slice(1)).toBe("using Up;\n")
+ }),
+ ),
+ )
+
+ it.live("restores BOM after formatter strips it", () =>
+ provideTmpdirInstance(
+ (dir) =>
+ Effect.gen(function* () {
+ const filepath = path.join(dir, "formatted.cs")
+ const bom = String.fromCharCode(0xfeff)
+ yield* Effect.promise(() => fs.writeFile(filepath, `${bom}using System;\n`, "utf-8"))
+
+ yield* run({ filePath: filepath, content: "using Up;\n" })
+
+ const content = yield* Effect.promise(() => fs.readFile(filepath, "utf-8"))
+ expect(content.charCodeAt(0)).toBe(0xfeff)
+ expect(content.slice(1)).toBe("using Up;\n")
+ }),
+ {
+ config: {
+ formatter: {
+ stripbom: {
+ extensions: [".cs"],
+ command: [
+ "node",
+ "-e",
+ "const fs = require('fs'); const file = process.argv[1]; let text = fs.readFileSync(file, 'utf8'); if (text.charCodeAt(0) === 0xfeff) text = text.slice(1); fs.writeFileSync(file, text, 'utf8')",
+ "$FILE",
+ ],
+ },
+ },
+ },
+ },
+ ),
+ )
+
it.live("returns diff in metadata for existing files", () =>
provideTmpdirInstance((dir) =>
Effect.gen(function* () {
diff --git a/packages/ui/src/components/timeline-playground.stories.tsx b/packages/ui/src/components/timeline-playground.stories.tsx
index c071db303b7a..72f5730612c5 100644
--- a/packages/ui/src/components/timeline-playground.stories.tsx
+++ b/packages/ui/src/components/timeline-playground.stories.tsx
@@ -318,7 +318,7 @@ const TOOL_SAMPLES = {
tool: "bash",
input: { command: "bun test --filter session", description: "Run session tests" },
output:
- "bun test v1.3.11\n\n✓ session-turn.test.tsx (3 tests) 45ms\n✓ message-part.test.tsx (7 tests) 120ms\n\nTest Suites: 2 passed, 2 total\nTests: 10 passed, 10 total\nTime: 0.89s",
+ "bun test v1.3.13\n\n✓ session-turn.test.tsx (3 tests) 45ms\n✓ message-part.test.tsx (7 tests) 120ms\n\nTest Suites: 2 passed, 2 total\nTests: 10 passed, 10 total\nTime: 0.89s",
title: "Run session tests",
metadata: { command: "bun test --filter session" },
},