Skip to content

Commit 66941b6

Browse files
authored
Merge branch 'dev' into config/path-resolve
2 parents 310e319 + 46122d9 commit 66941b6

45 files changed

Lines changed: 2504 additions & 53 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

.github/workflows/beta.yml

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,15 @@
11
name: beta
22

33
on:
4-
push:
5-
branches: [dev]
6-
pull_request:
7-
types: [opened, synchronize, labeled, unlabeled]
4+
workflow_dispatch:
5+
schedule:
6+
- cron: "0 * * * *"
87

98
jobs:
109
sync:
11-
if: |
12-
github.event_name == 'push' ||
13-
(github.event_name == 'pull_request' &&
14-
contains(github.event.pull_request.labels.*.name, 'contributor'))
1510
runs-on: blacksmith-4vcpu-ubuntu-2404
1611
permissions:
1712
contents: write
18-
pull-requests: write
1913
steps:
2014
- name: Checkout repository
2115
uses: actions/checkout@v4

.github/workflows/test.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
11
name: test
22

33
on:
4+
push:
5+
branches:
6+
- dev
47
pull_request:
58
workflow_dispatch:
69
jobs:

packages/opencode/script/build.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,11 +14,11 @@ process.chdir(dir)
1414

1515
import pkg from "../package.json"
1616
import { Script } from "@opencode-ai/script"
17-
17+
const modelsUrl = process.env.OPENCODE_MODELS_URL || "https://models.dev"
1818
// Fetch and generate models.dev snapshot
1919
const modelsData = process.env.MODELS_DEV_API_JSON
2020
? await Bun.file(process.env.MODELS_DEV_API_JSON).text()
21-
: await fetch(`https://models.dev/api.json`).then((x) => x.text())
21+
: await fetch(`${modelsUrl}/api.json`).then((x) => x.text())
2222
await Bun.write(
2323
path.join(dir, "src/provider/models-snapshot.ts"),
2424
`// Auto-generated by build.ts - do not edit\nexport const snapshot = ${modelsData} as const\n`,

packages/opencode/src/cli/cmd/tui/component/prompt/autocomplete.tsx

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -345,8 +345,9 @@ export function Autocomplete(props: {
345345
const results: AutocompleteOption[] = [...command.slashes()]
346346

347347
for (const serverCommand of sync.data.command) {
348+
const label = serverCommand.source === "mcp" ? ":mcp" : serverCommand.source === "skill" ? ":skill" : ""
348349
results.push({
349-
display: "/" + serverCommand.name + (serverCommand.mcp ? " (MCP)" : ""),
350+
display: "/" + serverCommand.name + label,
350351
description: serverCommand.description,
351352
onSelect: () => {
352353
const newText = "/" + serverCommand.name + " "

packages/opencode/src/command/index.ts

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ import { Identifier } from "../id/id"
66
import PROMPT_INITIALIZE from "./template/initialize.txt"
77
import PROMPT_REVIEW from "./template/review.txt"
88
import { MCP } from "../mcp"
9+
import { Skill } from "../skill"
910

1011
export namespace Command {
1112
export const Event = {
@@ -26,7 +27,7 @@ export namespace Command {
2627
description: z.string().optional(),
2728
agent: z.string().optional(),
2829
model: z.string().optional(),
29-
mcp: z.boolean().optional(),
30+
source: z.enum(["command", "mcp", "skill"]).optional(),
3031
// workaround for zod not supporting async functions natively so we use getters
3132
// https://zod.dev/v4/changelog?id=zfunction
3233
template: z.promise(z.string()).or(z.string()),
@@ -94,7 +95,7 @@ export namespace Command {
9495
for (const [name, prompt] of Object.entries(await MCP.prompts())) {
9596
result[name] = {
9697
name,
97-
mcp: true,
98+
source: "mcp",
9899
description: prompt.description,
99100
get template() {
100101
// since a getter can't be async we need to manually return a promise here
@@ -118,6 +119,21 @@ export namespace Command {
118119
}
119120
}
120121

122+
// Add skills as invokable commands
123+
for (const skill of await Skill.all()) {
124+
// Skip if a command with this name already exists
125+
if (result[skill.name]) continue
126+
result[skill.name] = {
127+
name: skill.name,
128+
description: skill.description,
129+
source: "skill",
130+
get template() {
131+
return skill.content
132+
},
133+
hints: [],
134+
}
135+
}
136+
121137
return result
122138
})
123139

packages/opencode/src/env/index.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,9 @@ import { Instance } from "../project/instance"
22

33
export namespace Env {
44
const state = Instance.state(() => {
5-
return process.env as Record<string, string | undefined>
5+
// Create a shallow copy to isolate environment per instance
6+
// Prevents parallel tests from interfering with each other's env vars
7+
return { ...process.env } as Record<string, string | undefined>
68
})
79

810
export function get(key: string) {

packages/opencode/src/file/ripgrep.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -214,8 +214,8 @@ export namespace Ripgrep {
214214
input.signal?.throwIfAborted()
215215

216216
const args = [await filepath(), "--files", "--glob=!.git/*"]
217-
if (input.follow !== false) args.push("--follow")
218-
if (input.hidden !== false) args.push("--hidden")
217+
if (input.follow) args.push("--follow")
218+
if (input.hidden) args.push("--hidden")
219219
if (input.maxDepth !== undefined) args.push(`--max-depth=${input.maxDepth}`)
220220
if (input.glob) {
221221
for (const g of input.glob) {
@@ -381,7 +381,7 @@ export namespace Ripgrep {
381381
follow?: boolean
382382
}) {
383383
const args = [`${await filepath()}`, "--json", "--hidden", "--glob='!.git/*'"]
384-
if (input.follow !== false) args.push("--follow")
384+
if (input.follow) args.push("--follow")
385385

386386
if (input.glob) {
387387
for (const g of input.glob) {

packages/opencode/src/provider/provider.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import { createVertexAnthropic } from "@ai-sdk/google-vertex/anthropic"
2424
import { createOpenAI } from "@ai-sdk/openai"
2525
import { createOpenAICompatible } from "@ai-sdk/openai-compatible"
2626
import { createOpenRouter, type LanguageModelV2 } from "@openrouter/ai-sdk-provider"
27-
import { createOpenaiCompatible as createGitHubCopilotOpenAICompatible } from "./sdk/openai-compatible/src"
27+
import { createOpenaiCompatible as createGitHubCopilotOpenAICompatible } from "./sdk/copilot"
2828
import { createXai } from "@ai-sdk/xai"
2929
import { createMistral } from "@ai-sdk/mistral"
3030
import { createGroq } from "@ai-sdk/groq"

packages/opencode/src/provider/sdk/openai-compatible/src/README.md renamed to packages/opencode/src/provider/sdk/copilot/README.md

File renamed without changes.
Lines changed: 169 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,169 @@
1+
import {
2+
type LanguageModelV2Prompt,
3+
type SharedV2ProviderMetadata,
4+
UnsupportedFunctionalityError,
5+
} from "@ai-sdk/provider"
6+
import type { OpenAICompatibleChatPrompt } from "./openai-compatible-api-types"
7+
import { convertToBase64 } from "@ai-sdk/provider-utils"
8+
9+
function getOpenAIMetadata(message: { providerOptions?: SharedV2ProviderMetadata }) {
10+
return message?.providerOptions?.copilot ?? {}
11+
}
12+
13+
export function convertToOpenAICompatibleChatMessages(prompt: LanguageModelV2Prompt): OpenAICompatibleChatPrompt {
14+
const messages: OpenAICompatibleChatPrompt = []
15+
for (const { role, content, ...message } of prompt) {
16+
const metadata = getOpenAIMetadata({ ...message })
17+
switch (role) {
18+
case "system": {
19+
messages.push({
20+
role: "system",
21+
content: [
22+
{
23+
type: "text",
24+
text: content,
25+
},
26+
],
27+
...metadata,
28+
})
29+
break
30+
}
31+
32+
case "user": {
33+
if (content.length === 1 && content[0].type === "text") {
34+
messages.push({
35+
role: "user",
36+
content: content[0].text,
37+
...getOpenAIMetadata(content[0]),
38+
})
39+
break
40+
}
41+
42+
messages.push({
43+
role: "user",
44+
content: content.map((part) => {
45+
const partMetadata = getOpenAIMetadata(part)
46+
switch (part.type) {
47+
case "text": {
48+
return { type: "text", text: part.text, ...partMetadata }
49+
}
50+
case "file": {
51+
if (part.mediaType.startsWith("image/")) {
52+
const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType
53+
54+
return {
55+
type: "image_url",
56+
image_url: {
57+
url:
58+
part.data instanceof URL
59+
? part.data.toString()
60+
: `data:${mediaType};base64,${convertToBase64(part.data)}`,
61+
},
62+
...partMetadata,
63+
}
64+
} else {
65+
throw new UnsupportedFunctionalityError({
66+
functionality: `file part media type ${part.mediaType}`,
67+
})
68+
}
69+
}
70+
}
71+
}),
72+
...metadata,
73+
})
74+
75+
break
76+
}
77+
78+
case "assistant": {
79+
let text = ""
80+
let reasoningText: string | undefined
81+
let reasoningOpaque: string | undefined
82+
const toolCalls: Array<{
83+
id: string
84+
type: "function"
85+
function: { name: string; arguments: string }
86+
}> = []
87+
88+
for (const part of content) {
89+
const partMetadata = getOpenAIMetadata(part)
90+
// Check for reasoningOpaque on any part (may be attached to text/tool-call)
91+
const partOpaque = (part.providerOptions as { copilot?: { reasoningOpaque?: string } })?.copilot
92+
?.reasoningOpaque
93+
if (partOpaque && !reasoningOpaque) {
94+
reasoningOpaque = partOpaque
95+
}
96+
97+
switch (part.type) {
98+
case "text": {
99+
text += part.text
100+
break
101+
}
102+
case "reasoning": {
103+
reasoningText = part.text
104+
break
105+
}
106+
case "tool-call": {
107+
toolCalls.push({
108+
id: part.toolCallId,
109+
type: "function",
110+
function: {
111+
name: part.toolName,
112+
arguments: JSON.stringify(part.input),
113+
},
114+
...partMetadata,
115+
})
116+
break
117+
}
118+
}
119+
}
120+
121+
messages.push({
122+
role: "assistant",
123+
content: text || null,
124+
tool_calls: toolCalls.length > 0 ? toolCalls : undefined,
125+
reasoning_text: reasoningText,
126+
reasoning_opaque: reasoningOpaque,
127+
...metadata,
128+
})
129+
130+
break
131+
}
132+
133+
case "tool": {
134+
for (const toolResponse of content) {
135+
const output = toolResponse.output
136+
137+
let contentValue: string
138+
switch (output.type) {
139+
case "text":
140+
case "error-text":
141+
contentValue = output.value
142+
break
143+
case "content":
144+
case "json":
145+
case "error-json":
146+
contentValue = JSON.stringify(output.value)
147+
break
148+
}
149+
150+
const toolResponseMetadata = getOpenAIMetadata(toolResponse)
151+
messages.push({
152+
role: "tool",
153+
tool_call_id: toolResponse.toolCallId,
154+
content: contentValue,
155+
...toolResponseMetadata,
156+
})
157+
}
158+
break
159+
}
160+
161+
default: {
162+
const _exhaustiveCheck: never = role
163+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`)
164+
}
165+
}
166+
}
167+
168+
return messages
169+
}

0 commit comments

Comments
 (0)