diff --git a/bun.lock b/bun.lock
index 325eeb15f5..e211191034 100644
--- a/bun.lock
+++ b/bun.lock
@@ -26,7 +26,7 @@
},
"packages/app": {
"name": "@opencode-ai/app",
- "version": "1.2.18",
+ "version": "1.2.20",
"dependencies": {
"@kobalte/core": "catalog:",
"@opencode-ai/sdk": "workspace:*",
@@ -76,7 +76,7 @@
},
"packages/console/app": {
"name": "@opencode-ai/console-app",
- "version": "1.2.18",
+ "version": "1.2.20",
"dependencies": {
"@cloudflare/vite-plugin": "1.15.2",
"@ibm/plex": "6.4.1",
@@ -110,7 +110,7 @@
},
"packages/console/core": {
"name": "@opencode-ai/console-core",
- "version": "1.2.18",
+ "version": "1.2.20",
"dependencies": {
"@aws-sdk/client-sts": "3.782.0",
"@jsx-email/render": "1.1.1",
@@ -137,7 +137,7 @@
},
"packages/console/function": {
"name": "@opencode-ai/console-function",
- "version": "1.2.18",
+ "version": "1.2.20",
"dependencies": {
"@ai-sdk/anthropic": "2.0.0",
"@ai-sdk/openai": "2.0.2",
@@ -161,7 +161,7 @@
},
"packages/console/mail": {
"name": "@opencode-ai/console-mail",
- "version": "1.2.18",
+ "version": "1.2.20",
"dependencies": {
"@jsx-email/all": "2.2.3",
"@jsx-email/cli": "1.4.3",
@@ -185,7 +185,7 @@
},
"packages/desktop": {
"name": "@opencode-ai/desktop",
- "version": "1.2.18",
+ "version": "1.2.20",
"dependencies": {
"@opencode-ai/app": "workspace:*",
"@opencode-ai/ui": "workspace:*",
@@ -218,7 +218,7 @@
},
"packages/desktop-electron": {
"name": "@opencode-ai/desktop-electron",
- "version": "1.2.18",
+ "version": "1.2.20",
"dependencies": {
"@opencode-ai/app": "workspace:*",
"@opencode-ai/ui": "workspace:*",
@@ -248,7 +248,7 @@
},
"packages/enterprise": {
"name": "@opencode-ai/enterprise",
- "version": "1.2.18",
+ "version": "1.2.20",
"dependencies": {
"@opencode-ai/ui": "workspace:*",
"@opencode-ai/util": "workspace:*",
@@ -277,7 +277,7 @@
},
"packages/function": {
"name": "@opencode-ai/function",
- "version": "1.2.18",
+ "version": "1.2.20",
"dependencies": {
"@octokit/auth-app": "8.0.1",
"@octokit/rest": "catalog:",
@@ -293,9 +293,11 @@
},
"packages/opencode": {
"name": "opencode",
- "version": "1.2.18",
+ "version": "1.2.20",
"bin": {
"opencode": "./bin/opencode",
+ "altimate": "./bin/altimate",
+ "altimate-code": "./bin/altimate",
},
"dependencies": {
"@actions/core": "1.11.1",
@@ -373,6 +375,7 @@
"ulid": "catalog:",
"vscode-jsonrpc": "8.2.1",
"web-tree-sitter": "0.25.10",
+ "which": "6.0.1",
"xdg-basedir": "5.1.0",
"yargs": "18.0.0",
"zod": "catalog:",
@@ -395,6 +398,7 @@
"@types/bun": "catalog:",
"@types/mime-types": "3.0.1",
"@types/turndown": "5.0.5",
+ "@types/which": "3.0.4",
"@types/yargs": "17.0.33",
"@typescript/native-preview": "catalog:",
"drizzle-kit": "1.0.0-beta.12-a5629fb",
@@ -407,7 +411,7 @@
},
"packages/plugin": {
"name": "@opencode-ai/plugin",
- "version": "1.2.18",
+ "version": "1.2.20",
"dependencies": {
"@opencode-ai/sdk": "workspace:*",
"zod": "catalog:",
@@ -427,7 +431,7 @@
},
"packages/sdk/js": {
"name": "@opencode-ai/sdk",
- "version": "1.2.18",
+ "version": "1.2.20",
"devDependencies": {
"@hey-api/openapi-ts": "0.90.10",
"@tsconfig/node22": "catalog:",
@@ -438,7 +442,7 @@
},
"packages/slack": {
"name": "@opencode-ai/slack",
- "version": "1.2.18",
+ "version": "1.2.20",
"dependencies": {
"@opencode-ai/sdk": "workspace:*",
"@slack/bolt": "^3.17.1",
@@ -473,7 +477,7 @@
},
"packages/ui": {
"name": "@opencode-ai/ui",
- "version": "1.2.18",
+ "version": "1.2.20",
"dependencies": {
"@kobalte/core": "catalog:",
"@opencode-ai/sdk": "workspace:*",
@@ -519,7 +523,7 @@
},
"packages/util": {
"name": "@opencode-ai/util",
- "version": "1.2.18",
+ "version": "1.2.20",
"dependencies": {
"zod": "catalog:",
},
@@ -530,7 +534,7 @@
},
"packages/web": {
"name": "@opencode-ai/web",
- "version": "1.2.18",
+ "version": "1.2.20",
"dependencies": {
"@astrojs/cloudflare": "12.6.3",
"@astrojs/markdown-remark": "6.3.1",
@@ -2120,6 +2124,8 @@
"@types/whatwg-mimetype": ["@types/whatwg-mimetype@3.0.2", "", {}, "sha512-c2AKvDT8ToxLIOUlN51gTiHXflsfIFisS4pO7pDPoKouJCESkhZnEy623gwP9laCy5lnLDAw1vAzu2vM2YLOrA=="],
+ "@types/which": ["@types/which@3.0.4", "", {}, "sha512-liyfuo/106JdlgSchJzXEQCVArk0CvevqPote8F8HgWgJ3dRCcTHgJIsLDuee0kxk/mhbInzIZk3QWSZJ8R+2w=="],
+
"@types/ws": ["@types/ws@8.18.1", "", { "dependencies": { "@types/node": "*" } }, "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg=="],
"@types/yargs": ["@types/yargs@17.0.33", "", { "dependencies": { "@types/yargs-parser": "*" } }, "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA=="],
@@ -3236,7 +3242,7 @@
"isbinaryfile": ["isbinaryfile@5.0.7", "", {}, "sha512-gnWD14Jh3FzS3CPhF0AxNOJ8CxqeblPTADzI38r0wt8ZyQl5edpy75myt08EG2oKvpyiqSqsx+Wkz9vtkbTqYQ=="],
- "isexe": ["isexe@3.1.5", "", {}, "sha512-6B3tLtFqtQS4ekarvLVMZ+X+VlvQekbe4taUkf/rhVO3d/h0M2rfARm/pXLcPEsjjMsFgrFgSrhQIxcSVrBz8w=="],
+ "isexe": ["isexe@4.0.0", "", {}, "sha512-FFUtZMpoZ8RqHS3XeXEmHWLA4thH+ZxCv2lOiPIn1Xc7CxrqhWzNSDzD+/chS/zbYezmiwWLdQC09JdQKmthOw=="],
"isomorphic-ws": ["isomorphic-ws@5.0.0", "", { "peerDependencies": { "ws": "*" } }, "sha512-muId7Zzn9ywDsyXgTIafTry2sV3nySZeUDe6YedVd1Hvuuep5AsIlqK+XefWpYTyJG5e503F2xIuT2lcU6rCSw=="],
@@ -4586,7 +4592,7 @@
"when-exit": ["when-exit@2.1.5", "", {}, "sha512-VGkKJ564kzt6Ms1dbgPP/yuIoQCrsFAnRbptpC5wOEsDaNsbCB2bnfnaA8i/vRs5tjUSEOtIuvl9/MyVsvQZCg=="],
- "which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="],
+ "which": ["which@6.0.1", "", { "dependencies": { "isexe": "^4.0.0" }, "bin": { "node-which": "bin/which.js" } }, "sha512-oGLe46MIrCRqX7ytPUf66EAYvdeMIZYn3WaocqqKZAxrBpkqHfL/qvTyJ/bTk5+AqHCjXmrv3CEWgy368zhRUg=="],
"which-boxed-primitive": ["which-boxed-primitive@1.1.1", "", { "dependencies": { "is-bigint": "^1.1.0", "is-boolean-object": "^1.2.1", "is-number-object": "^1.1.1", "is-string": "^1.1.1", "is-symbol": "^1.1.1" } }, "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA=="],
@@ -5202,6 +5208,8 @@
"app-builder-lib/minimatch": ["minimatch@10.2.1", "", { "dependencies": { "brace-expansion": "^5.0.2" } }, "sha512-MClCe8IL5nRRmawL6ib/eT4oLyeKMGCghibcDWK+J0hh0Q8kqSdia6BvbRMVk6mPa6WqUa5uR2oxt6C5jd533A=="],
+ "app-builder-lib/which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="],
+
"archiver-utils/glob": ["glob@10.5.0", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg=="],
"archiver-utils/is-stream": ["is-stream@2.0.1", "", {}, "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="],
@@ -5388,6 +5396,8 @@
"node-gyp/nopt": ["nopt@8.1.0", "", { "dependencies": { "abbrev": "^3.0.0" }, "bin": { "nopt": "bin/nopt.js" } }, "sha512-ieGu42u/Qsa4TFktmaKEwM6MQH0pOWnaB3htzh0JRtx84+Mebc0cbZYN5bC+6WTZ4+77xrL9Pn5m7CV6VIkV7A=="],
+ "node-gyp/which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="],
+
"npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="],
"nypm/citty": ["citty@0.2.1", "", {}, "sha512-kEV95lFBhQgtogAPlQfJJ0WGVSokvLr/UEoFPiKKOXF7pl98HfUVUD0ejsuTCld/9xH9vogSywZ5KqHzXrZpqg=="],
@@ -5918,6 +5928,8 @@
"app-builder-lib/@electron/get/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="],
+ "app-builder-lib/which/isexe": ["isexe@3.1.5", "", {}, "sha512-6B3tLtFqtQS4ekarvLVMZ+X+VlvQekbe4taUkf/rhVO3d/h0M2rfARm/pXLcPEsjjMsFgrFgSrhQIxcSVrBz8w=="],
+
"archiver-utils/glob/jackspeak": ["jackspeak@3.4.3", "", { "dependencies": { "@isaacs/cliui": "^8.0.2" }, "optionalDependencies": { "@pkgjs/parseargs": "^0.11.0" } }, "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw=="],
"archiver-utils/glob/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="],
@@ -6000,6 +6012,8 @@
"node-gyp/nopt/abbrev": ["abbrev@3.0.1", "", {}, "sha512-AO2ac6pjRB3SJmGJo+v5/aK6Omggp6fsLrs6wN9bd35ulu4cCwaAU9+7ZhXjeqHVkaHThLuzH0nZr0YpCDhygg=="],
+ "node-gyp/which/isexe": ["isexe@3.1.5", "", {}, "sha512-6B3tLtFqtQS4ekarvLVMZ+X+VlvQekbe4taUkf/rhVO3d/h0M2rfARm/pXLcPEsjjMsFgrFgSrhQIxcSVrBz8w=="],
+
"opencode/@ai-sdk/openai/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@3.0.20", "", { "dependencies": { "@ai-sdk/provider": "2.0.1", "@standard-schema/spec": "^1.0.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-iXHVe0apM2zUEzauqJwqmpC37A5rihrStAih5Ks+JE32iTe4LZ58y17UGBjpQQTCRw9YxMeo2UFLxLpBluyvLQ=="],
"opencode/@ai-sdk/openai-compatible/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@3.0.20", "", { "dependencies": { "@ai-sdk/provider": "2.0.1", "@standard-schema/spec": "^1.0.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-iXHVe0apM2zUEzauqJwqmpC37A5rihrStAih5Ks+JE32iTe4LZ58y17UGBjpQQTCRw9YxMeo2UFLxLpBluyvLQ=="],
diff --git a/github/index.ts b/github/index.ts
index da310178a7..1a0a992622 100644
--- a/github/index.ts
+++ b/github/index.ts
@@ -8,6 +8,7 @@ import type { Context as GitHubContext } from "@actions/github/lib/context"
import type { IssueCommentEvent, PullRequestReviewCommentEvent } from "@octokit/webhooks-types"
import { createOpencodeClient } from "@opencode-ai/sdk"
import { spawn } from "node:child_process"
+import { setTimeout as sleep } from "node:timers/promises"
type GitHubAuthor = {
login: string
@@ -281,7 +282,7 @@ async function assertOpencodeConnected() {
connected = true
break
} catch (e) {}
- await Bun.sleep(300)
+ await sleep(300)
} while (retry++ < 30)
if (!connected) {
diff --git a/nix/hashes.json b/nix/hashes.json
index 47e3e240bb..326cc98a66 100644
--- a/nix/hashes.json
+++ b/nix/hashes.json
@@ -1,8 +1,8 @@
{
"nodeModules": {
- "x86_64-linux": "sha256-v83hWzYVg/g4zJiBpGsQ71wTdndPk3BQVZ2mjMApUIQ=",
- "aarch64-linux": "sha256-inpMwkQqwBFP2wL8w/pTOP7q3fg1aOqvE0wgzVd3/B8=",
- "aarch64-darwin": "sha256-r42LGrQWqDyIy62mBSU5Nf3M22dJ3NNo7mjN/1h8d8Y=",
- "x86_64-darwin": "sha256-J6XrrdK5qBK3sQBQOO/B3ZluOnsAf5f65l4q/K1nDTI="
+ "x86_64-linux": "sha256-pBTIT8Pgdm3272YhBjiAZsmj0SSpHTklh6lGc8YcMoE=",
+ "aarch64-linux": "sha256-prt039++d5UZgtldAN6+RVOR557ifIeusiy5XpzN8QU=",
+ "aarch64-darwin": "sha256-Y3f+cXcIGLqz6oyc5fG22t6CLD4wGkvwqO6RNXjFriQ=",
+ "x86_64-darwin": "sha256-BjbBBhQUgGhrlP56skABcrObvutNUZSWnrnPCg1OTKE="
}
}
diff --git a/packages/app/e2e/actions.ts b/packages/app/e2e/actions.ts
index a7ccba6175..fbb13008b2 100644
--- a/packages/app/e2e/actions.ts
+++ b/packages/app/e2e/actions.ts
@@ -197,6 +197,7 @@ export async function createTestProject() {
await fs.writeFile(path.join(root, "README.md"), "# e2e\n")
execSync("git init", { cwd: root, stdio: "ignore" })
+ execSync("git config core.fsmonitor false", { cwd: root, stdio: "ignore" })
execSync("git add -A", { cwd: root, stdio: "ignore" })
execSync('git -c user.name="e2e" -c user.email="e2e@example.com" commit -m "init" --allow-empty', {
cwd: root,
@@ -207,7 +208,10 @@ export async function createTestProject() {
}
export async function cleanupTestProject(directory: string) {
- await fs.rm(directory, { recursive: true, force: true }).catch(() => undefined)
+ try {
+ execSync("git fsmonitor--daemon stop", { cwd: directory, stdio: "ignore" })
+ } catch {}
+ await fs.rm(directory, { recursive: true, force: true, maxRetries: 5, retryDelay: 100 }).catch(() => undefined)
}
export function sessionIDFromUrl(url: string) {
diff --git a/packages/app/package.json b/packages/app/package.json
index 37ccd9b53a..c91a91383d 100644
--- a/packages/app/package.json
+++ b/packages/app/package.json
@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/app",
- "version": "1.2.18",
+ "version": "1.2.20",
"description": "",
"type": "module",
"exports": {
diff --git a/packages/app/src/pages/layout.tsx b/packages/app/src/pages/layout.tsx
index 2019ca4e5a..bd0315efbf 100644
--- a/packages/app/src/pages/layout.tsx
+++ b/packages/app/src/pages/layout.tsx
@@ -22,7 +22,7 @@ import { ResizeHandle } from "@opencode-ai/ui/resize-handle"
import { Button } from "@opencode-ai/ui/button"
import { Icon } from "@opencode-ai/ui/icon"
import { IconButton } from "@opencode-ai/ui/icon-button"
-import { Tooltip, TooltipKeybind } from "@opencode-ai/ui/tooltip"
+import { Tooltip } from "@opencode-ai/ui/tooltip"
import { DropdownMenu } from "@opencode-ai/ui/dropdown-menu"
import { Dialog } from "@opencode-ai/ui/dialog"
import { getFilename } from "@opencode-ai/util/path"
@@ -1937,20 +1937,14 @@ export default function Layout(props: ParentProps) {
fallback={
<>
- navigateWithSidebarReset(`/${base64Encode(p.worktree)}/session`)}
>
-
-
+ {language.t("command.session.new")}
+
<>
-
-
-
+
{
return `file://${path}`
},
openTab: (tab) => calls.push(`open:${tab}`),
+ setActive: (tab) => calls.push(`active:${tab}`),
loadFile: (path) => calls.push(`load:${path}`),
})
openReviewFile("src/a.ts")
- expect(calls).toEqual(["show", "load:src/a.ts", "tab:src/a.ts", "open:file://src/a.ts"])
+ expect(calls).toEqual(["show", "load:src/a.ts", "tab:src/a.ts", "open:file://src/a.ts", "active:file://src/a.ts"])
})
})
diff --git a/packages/app/src/pages/session/helpers.ts b/packages/app/src/pages/session/helpers.ts
index 20f1d99a8b..60b26cdf47 100644
--- a/packages/app/src/pages/session/helpers.ts
+++ b/packages/app/src/pages/session/helpers.ts
@@ -24,15 +24,20 @@ export const createOpenReviewFile = (input: {
showAllFiles: () => void
tabForPath: (path: string) => string
openTab: (tab: string) => void
+ setActive: (tab: string) => void
loadFile: (path: string) => any | Promise
}) => {
return (path: string) => {
batch(() => {
input.showAllFiles()
const maybePromise = input.loadFile(path)
- const openTab = () => input.openTab(input.tabForPath(path))
- if (maybePromise instanceof Promise) maybePromise.then(openTab)
- else openTab()
+ const open = () => {
+ const tab = input.tabForPath(path)
+ input.openTab(tab)
+ input.setActive(tab)
+ }
+ if (maybePromise instanceof Promise) maybePromise.then(open)
+ else open()
})
}
}
diff --git a/packages/app/src/pages/session/message-timeline.tsx b/packages/app/src/pages/session/message-timeline.tsx
index 7a3b72ae4e..f320a2ebbf 100644
--- a/packages/app/src/pages/session/message-timeline.tsx
+++ b/packages/app/src/pages/session/message-timeline.tsx
@@ -1,4 +1,4 @@
-import { For, createEffect, createMemo, on, onCleanup, Show, startTransition, Index, type JSX } from "solid-js"
+import { For, createEffect, createMemo, on, onCleanup, Show, Index, type JSX } from "solid-js"
import { createStore, produce } from "solid-js/store"
import { useNavigate, useParams } from "@solidjs/router"
import { Button } from "@opencode-ai/ui/button"
@@ -160,7 +160,7 @@ function createTimelineStaging(input: TimelineStageInput) {
}
const currentTotal = input.messages().length
count = Math.min(currentTotal, count + input.config.batch)
- startTransition(() => setState("count", count))
+ setState("count", count)
if (count >= currentTotal) {
setState({ completedSession: sessionKey, activeSession: "" })
frame = undefined
diff --git a/packages/console/app/package.json b/packages/console/app/package.json
index 67a7eafa7c..4d20c1b8bc 100644
--- a/packages/console/app/package.json
+++ b/packages/console/app/package.json
@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-app",
- "version": "1.2.18",
+ "version": "1.2.20",
"type": "module",
"license": "MIT",
"scripts": {
diff --git a/packages/console/app/src/lib/language.ts b/packages/console/app/src/lib/language.ts
index 54321d2343..5e80179e47 100644
--- a/packages/console/app/src/lib/language.ts
+++ b/packages/console/app/src/lib/language.ts
@@ -108,6 +108,26 @@ const DOCS_SEGMENT = new Set([
"zh-tw",
])
+const DOCS_LOCALE = {
+ ar: "ar",
+ da: "da",
+ de: "de",
+ en: "en",
+ es: "es",
+ fr: "fr",
+ it: "it",
+ ja: "ja",
+ ko: "ko",
+ nb: "no",
+ "pt-br": "br",
+ root: "en",
+ ru: "ru",
+ th: "th",
+ tr: "tr",
+ "zh-cn": "zh",
+ "zh-tw": "zht",
+} as const satisfies Record
+
function suffix(pathname: string) {
const index = pathname.search(/[?#]/)
if (index === -1) {
@@ -130,7 +150,12 @@ export function docs(locale: Locale, pathname: string) {
return `${next.path}${next.suffix}`
}
- if (value === "root") return `${next.path}${next.suffix}`
+ if (value === "root") {
+ if (next.path === "/docs/en") return `/docs${next.suffix}`
+ if (next.path === "/docs/en/") return `/docs/${next.suffix}`
+ if (next.path.startsWith("/docs/en/")) return `/docs/${next.path.slice("/docs/en/".length)}${next.suffix}`
+ return `${next.path}${next.suffix}`
+ }
if (next.path === "/docs") return `/docs/${value}${next.suffix}`
if (next.path === "/docs/") return `/docs/${value}/${next.suffix}`
@@ -154,6 +179,15 @@ export function fromPathname(pathname: string) {
return parseLocale(fix(pathname).split("/")[1])
}
+export function fromDocsPathname(pathname: string) {
+ const next = fix(pathname)
+ const value = next.split("/")[2]?.toLowerCase()
+ if (!value) return null
+ if (!next.startsWith("/docs/")) return null
+ if (!(value in DOCS_LOCALE)) return null
+ return DOCS_LOCALE[value as keyof typeof DOCS_LOCALE]
+}
+
export function strip(pathname: string) {
const locale = fromPathname(pathname)
if (!locale) return fix(pathname)
@@ -272,6 +306,9 @@ export function localeFromRequest(request: Request) {
const fromPath = fromPathname(new URL(request.url).pathname)
if (fromPath) return fromPath
+ const fromDocsPath = fromDocsPathname(new URL(request.url).pathname)
+ if (fromDocsPath) return fromDocsPath
+
return (
localeFromCookieHeader(request.headers.get("cookie")) ??
detectFromAcceptLanguage(request.headers.get("accept-language"))
diff --git a/packages/console/app/src/routes/docs/[...path].ts b/packages/console/app/src/routes/docs/[...path].ts
index bbe07f1f07..164bd2872e 100644
--- a/packages/console/app/src/routes/docs/[...path].ts
+++ b/packages/console/app/src/routes/docs/[...path].ts
@@ -1,6 +1,6 @@
import type { APIEvent } from "@solidjs/start/server"
import { Resource } from "@opencode-ai/console-resource"
-import { docs, localeFromRequest, tag } from "~/lib/language"
+import { cookie, docs, localeFromRequest, tag } from "~/lib/language"
async function handler(evt: APIEvent) {
const req = evt.request.clone()
@@ -17,7 +17,9 @@ async function handler(evt: APIEvent) {
headers,
body: req.body,
})
- return response
+ const next = new Response(response.body, response)
+ next.headers.append("set-cookie", cookie(locale))
+ return next
}
export const GET = handler
diff --git a/packages/console/app/src/routes/docs/index.ts b/packages/console/app/src/routes/docs/index.ts
index bbe07f1f07..164bd2872e 100644
--- a/packages/console/app/src/routes/docs/index.ts
+++ b/packages/console/app/src/routes/docs/index.ts
@@ -1,6 +1,6 @@
import type { APIEvent } from "@solidjs/start/server"
import { Resource } from "@opencode-ai/console-resource"
-import { docs, localeFromRequest, tag } from "~/lib/language"
+import { cookie, docs, localeFromRequest, tag } from "~/lib/language"
async function handler(evt: APIEvent) {
const req = evt.request.clone()
@@ -17,7 +17,9 @@ async function handler(evt: APIEvent) {
headers,
body: req.body,
})
- return response
+ const next = new Response(response.body, response)
+ next.headers.append("set-cookie", cookie(locale))
+ return next
}
export const GET = handler
diff --git a/packages/console/app/src/routes/s/[id].ts b/packages/console/app/src/routes/s/[id].ts
index 60f8d8ba87..374fd79ad0 100644
--- a/packages/console/app/src/routes/s/[id].ts
+++ b/packages/console/app/src/routes/s/[id].ts
@@ -1,6 +1,6 @@
import type { APIEvent } from "@solidjs/start/server"
import { Resource } from "@opencode-ai/console-resource"
-import { docs, localeFromRequest, tag } from "~/lib/language"
+import { cookie, docs, localeFromRequest, tag } from "~/lib/language"
async function handler(evt: APIEvent) {
const req = evt.request.clone()
@@ -17,7 +17,9 @@ async function handler(evt: APIEvent) {
headers,
body: req.body,
})
- return response
+ const next = new Response(response.body, response)
+ next.headers.append("set-cookie", cookie(locale))
+ return next
}
export const GET = handler
diff --git a/packages/console/core/package.json b/packages/console/core/package.json
index 37c94aecbb..408e2a7aca 100644
--- a/packages/console/core/package.json
+++ b/packages/console/core/package.json
@@ -1,7 +1,7 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@opencode-ai/console-core",
- "version": "1.2.18",
+ "version": "1.2.20",
"private": true,
"type": "module",
"license": "MIT",
diff --git a/packages/console/function/package.json b/packages/console/function/package.json
index bb7caccc58..8df6594d0b 100644
--- a/packages/console/function/package.json
+++ b/packages/console/function/package.json
@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-function",
- "version": "1.2.18",
+ "version": "1.2.20",
"$schema": "https://json.schemastore.org/package.json",
"private": true,
"type": "module",
diff --git a/packages/console/mail/package.json b/packages/console/mail/package.json
index 00e7378fe5..591c4bd369 100644
--- a/packages/console/mail/package.json
+++ b/packages/console/mail/package.json
@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/console-mail",
- "version": "1.2.18",
+ "version": "1.2.20",
"dependencies": {
"@jsx-email/all": "2.2.3",
"@jsx-email/cli": "1.4.3",
diff --git a/packages/desktop-electron/package.json b/packages/desktop-electron/package.json
index 41791066ea..31321c92a5 100644
--- a/packages/desktop-electron/package.json
+++ b/packages/desktop-electron/package.json
@@ -1,7 +1,7 @@
{
"name": "@opencode-ai/desktop-electron",
"private": true,
- "version": "1.2.18",
+ "version": "1.2.20",
"type": "module",
"license": "MIT",
"homepage": "https://opencode.ai",
diff --git a/packages/desktop/package.json b/packages/desktop/package.json
index 49699ff85e..da4d51bcc7 100644
--- a/packages/desktop/package.json
+++ b/packages/desktop/package.json
@@ -1,7 +1,7 @@
{
"name": "@opencode-ai/desktop",
"private": true,
- "version": "1.2.18",
+ "version": "1.2.20",
"type": "module",
"license": "MIT",
"scripts": {
diff --git a/packages/enterprise/package.json b/packages/enterprise/package.json
index 065015bc50..2531cf34fb 100644
--- a/packages/enterprise/package.json
+++ b/packages/enterprise/package.json
@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/enterprise",
- "version": "1.2.18",
+ "version": "1.2.20",
"private": true,
"type": "module",
"license": "MIT",
diff --git a/packages/extensions/zed/extension.toml b/packages/extensions/zed/extension.toml
index efd12836ee..d45bec2301 100644
--- a/packages/extensions/zed/extension.toml
+++ b/packages/extensions/zed/extension.toml
@@ -1,7 +1,7 @@
id = "opencode"
name = "OpenCode"
description = "The open source coding agent."
-version = "1.2.18"
+version = "1.2.20"
schema_version = 1
authors = ["Anomaly"]
repository = "https://github.com/anomalyco/opencode"
@@ -11,26 +11,26 @@ name = "OpenCode"
icon = "./icons/opencode.svg"
[agent_servers.opencode.targets.darwin-aarch64]
-archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.18/opencode-darwin-arm64.zip"
+archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.20/opencode-darwin-arm64.zip"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.darwin-x86_64]
-archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.18/opencode-darwin-x64.zip"
+archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.20/opencode-darwin-x64.zip"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.linux-aarch64]
-archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.18/opencode-linux-arm64.tar.gz"
+archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.20/opencode-linux-arm64.tar.gz"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.linux-x86_64]
-archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.18/opencode-linux-x64.tar.gz"
+archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.20/opencode-linux-x64.tar.gz"
cmd = "./opencode"
args = ["acp"]
[agent_servers.opencode.targets.windows-x86_64]
-archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.18/opencode-windows-x64.zip"
+archive = "https://github.com/anomalyco/opencode/releases/download/v1.2.20/opencode-windows-x64.zip"
cmd = "./opencode.exe"
args = ["acp"]
diff --git a/packages/function/package.json b/packages/function/package.json
index 162b564ad9..adc3bfcb05 100644
--- a/packages/function/package.json
+++ b/packages/function/package.json
@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/function",
- "version": "1.2.18",
+ "version": "1.2.20",
"$schema": "https://json.schemastore.org/package.json",
"private": true,
"type": "module",
diff --git a/packages/opencode/package.json b/packages/opencode/package.json
index 282334beaa..bd5c84f47e 100644
--- a/packages/opencode/package.json
+++ b/packages/opencode/package.json
@@ -1,6 +1,6 @@
{
"$schema": "https://json.schemastore.org/package.json",
- "version": "0.3.0",
+ "version": "1.2.20",
"name": "opencode",
"type": "module",
"license": "MIT",
@@ -38,6 +38,7 @@
"@types/mime-types": "3.0.1",
"@types/turndown": "5.0.5",
"@types/yargs": "17.0.33",
+ "@types/which": "3.0.4",
"@typescript/native-preview": "catalog:",
"drizzle-kit": "1.0.0-beta.12-a5629fb",
"drizzle-orm": "1.0.0-beta.12-a5629fb",
@@ -122,6 +123,7 @@
"ulid": "catalog:",
"vscode-jsonrpc": "8.2.1",
"web-tree-sitter": "0.25.10",
+ "which": "6.0.1",
"xdg-basedir": "5.1.0",
"yargs": "18.0.0",
"zod": "catalog:",
diff --git a/packages/opencode/src/acp/agent.ts b/packages/opencode/src/acp/agent.ts
index 030b2d7d26..d61d85aed3 100644
--- a/packages/opencode/src/acp/agent.ts
+++ b/packages/opencode/src/acp/agent.ts
@@ -31,6 +31,7 @@ import {
import { Log } from "../util/log"
import { pathToFileURL } from "bun"
import { Filesystem } from "../util/filesystem"
+import { Hash } from "../util/hash"
import { ACPSessionManager } from "./session"
import type { ACPConfig } from "./types"
import { Provider } from "../provider/provider"
@@ -41,7 +42,7 @@ import { Config } from "@/config/config"
import { Todo } from "@/session/todo"
import { z } from "zod"
import { LoadAPIKeyError } from "ai"
-import type { AssistantMessage, Event, OpencodeClient, SessionMessageResponse } from "@opencode-ai/sdk/v2"
+import type { AssistantMessage, Event, OpencodeClient, SessionMessageResponse, ToolPart } from "@opencode-ai/sdk/v2"
import { applyPatch } from "diff"
type ModeOption = { id: string; name: string; description?: string }
@@ -135,6 +136,7 @@ export namespace ACP {
private sessionManager: ACPSessionManager
private eventAbort = new AbortController()
private eventStarted = false
+ private bashSnapshots = new Map()
private permissionQueues = new Map>()
private permissionOptions: PermissionOption[] = [
{ optionId: "once", kind: "allow_once", name: "Allow once" },
@@ -307,6 +309,40 @@ export namespace ACP {
return
case "running":
+ const output = this.bashOutput(part)
+ const content: ToolCallContent[] = []
+ if (output) {
+ const hash = Hash.fast(output)
+ if (part.tool === "bash") {
+ if (this.bashSnapshots.get(part.callID) === hash) {
+ await this.connection
+ .sessionUpdate({
+ sessionId,
+ update: {
+ sessionUpdate: "tool_call_update",
+ toolCallId: part.callID,
+ status: "in_progress",
+ kind: toToolKind(part.tool),
+ title: part.tool,
+ locations: toLocations(part.tool, part.state.input),
+ rawInput: part.state.input,
+ },
+ })
+ .catch((error) => {
+ log.error("failed to send tool in_progress to ACP", { error })
+ })
+ return
+ }
+ this.bashSnapshots.set(part.callID, hash)
+ }
+ content.push({
+ type: "content",
+ content: {
+ type: "text",
+ text: output,
+ },
+ })
+ }
await this.connection
.sessionUpdate({
sessionId,
@@ -1432,6 +1468,14 @@ export namespace ACP {
{ throwOnError: true },
)
}
+
+ private bashOutput(part: ToolPart) {
+ if (part.tool !== "bash") return
+ if (!("metadata" in part.state) || !part.state.metadata || typeof part.state.metadata !== "object") return
+ const output = part.state.metadata["output"]
+ if (typeof output !== "string") return
+ return output
+ }
}
function toToolKind(toolName: string): ToolKind {
diff --git a/packages/opencode/src/cli/cmd/auth.ts b/packages/opencode/src/cli/cmd/auth.ts
index 6865d73daf..c156086c3c 100644
--- a/packages/opencode/src/cli/cmd/auth.ts
+++ b/packages/opencode/src/cli/cmd/auth.ts
@@ -12,6 +12,9 @@ import { Plugin } from "../../plugin"
import { Instance } from "../../project/instance"
import { Telemetry } from "../../telemetry"
import type { Hooks } from "@opencode-ai/plugin"
+import { Process } from "../../util/process"
+import { text } from "node:stream/consumers"
+import { setTimeout as sleep } from "node:timers/promises"
type PluginAuth = NonNullable
@@ -37,7 +40,7 @@ async function handlePluginAuth(plugin: { auth: PluginAuth }, provider: string):
const method = plugin.auth.methods[index]
// Handle prompts for all auth types
- await Bun.sleep(10)
+ await sleep(10)
const inputs: Record = {}
if (method.prompts) {
for (const prompt of method.prompts) {
diff --git a/packages/opencode/src/cli/cmd/debug/lsp.ts b/packages/opencode/src/cli/cmd/debug/lsp.ts
index d83c4ed8a4..4b8a3e7d45 100644
--- a/packages/opencode/src/cli/cmd/debug/lsp.ts
+++ b/packages/opencode/src/cli/cmd/debug/lsp.ts
@@ -3,6 +3,7 @@ import { bootstrap } from "../../bootstrap"
import { cmd } from "../cmd"
import { Log } from "../../../util/log"
import { EOL } from "os"
+import { setTimeout as sleep } from "node:timers/promises"
export const LSPCommand = cmd({
command: "lsp",
@@ -19,7 +20,7 @@ const DiagnosticsCommand = cmd({
async handler(args) {
await bootstrap(process.cwd(), async () => {
await LSP.touchFile(args.file, true)
- await Bun.sleep(1000)
+ await sleep(1000)
process.stdout.write(JSON.stringify(await LSP.diagnostics(), null, 2) + EOL)
})
},
diff --git a/packages/opencode/src/cli/cmd/github.ts b/packages/opencode/src/cli/cmd/github.ts
index 672e73d49a..2491abc567 100644
--- a/packages/opencode/src/cli/cmd/github.ts
+++ b/packages/opencode/src/cli/cmd/github.ts
@@ -28,6 +28,7 @@ import { Bus } from "../../bus"
import { MessageV2 } from "../../session/message-v2"
import { SessionPrompt } from "@/session/prompt"
import { $ } from "bun"
+import { setTimeout as sleep } from "node:timers/promises"
type GitHubAuthor = {
login: string
@@ -353,7 +354,7 @@ export const GithubInstallCommand = cmd({
}
retries++
- await Bun.sleep(1000)
+ await sleep(1000)
} while (true)
s.stop("Installed GitHub app")
@@ -1372,7 +1373,7 @@ Co-authored-by: ${actor} <${actor}@users.noreply.github.com>"`
} catch (e) {
if (retries > 0) {
console.log(`Retrying after ${delayMs}ms...`)
- await Bun.sleep(delayMs)
+ await sleep(delayMs)
return withRetry(fn, retries - 1, delayMs)
}
throw e
diff --git a/packages/opencode/src/cli/cmd/session.ts b/packages/opencode/src/cli/cmd/session.ts
index 7fb5fda97b..84840392a6 100644
--- a/packages/opencode/src/cli/cmd/session.ts
+++ b/packages/opencode/src/cli/cmd/session.ts
@@ -9,6 +9,7 @@ import { Filesystem } from "../../util/filesystem"
import { Process } from "../../util/process"
import { EOL } from "os"
import path from "path"
+import { which } from "../../util/which"
function pagerCmd(): string[] {
const lessOptions = ["-R", "-S"]
@@ -17,7 +18,7 @@ function pagerCmd(): string[] {
}
// user could have less installed via other options
- const lessOnPath = Bun.which("less")
+ const lessOnPath = which("less")
if (lessOnPath) {
if (Filesystem.stat(lessOnPath)?.size) return [lessOnPath, ...lessOptions]
}
@@ -27,7 +28,7 @@ function pagerCmd(): string[] {
if (Filesystem.stat(less)?.size) return [less, ...lessOptions]
}
- const git = Bun.which("git")
+ const git = which("git")
if (git) {
const less = path.join(git, "..", "..", "usr", "bin", "less.exe")
if (Filesystem.stat(less)?.size) return [less, ...lessOptions]
diff --git a/packages/opencode/src/cli/cmd/tui/util/clipboard.ts b/packages/opencode/src/cli/cmd/tui/util/clipboard.ts
index 1a8197bf4e..412ec654ff 100644
--- a/packages/opencode/src/cli/cmd/tui/util/clipboard.ts
+++ b/packages/opencode/src/cli/cmd/tui/util/clipboard.ts
@@ -6,6 +6,7 @@ import { tmpdir } from "os"
import path from "path"
import { Filesystem } from "../../../../util/filesystem"
import { Process } from "../../../../util/process"
+import { which } from "../../../../util/which"
/**
* Writes text to clipboard via OSC 52 escape sequence.
@@ -76,7 +77,7 @@ export namespace Clipboard {
const getCopyMethod = lazy(() => {
const os = platform()
- if (os === "darwin" && Bun.which("osascript")) {
+ if (os === "darwin" && which("osascript")) {
console.log("clipboard: using osascript")
return async (text: string) => {
const escaped = text.replace(/\\/g, "\\\\").replace(/"/g, '\\"')
@@ -85,7 +86,7 @@ export namespace Clipboard {
}
if (os === "linux") {
- if (process.env["WAYLAND_DISPLAY"] && Bun.which("wl-copy")) {
+ if (process.env["WAYLAND_DISPLAY"] && which("wl-copy")) {
console.log("clipboard: using wl-copy")
return async (text: string) => {
const proc = Process.spawn(["wl-copy"], { stdin: "pipe", stdout: "ignore", stderr: "ignore" })
@@ -95,7 +96,7 @@ export namespace Clipboard {
await proc.exited.catch(() => {})
}
}
- if (Bun.which("xclip")) {
+ if (which("xclip")) {
console.log("clipboard: using xclip")
return async (text: string) => {
const proc = Process.spawn(["xclip", "-selection", "clipboard"], {
@@ -109,7 +110,7 @@ export namespace Clipboard {
await proc.exited.catch(() => {})
}
}
- if (Bun.which("xsel")) {
+ if (which("xsel")) {
console.log("clipboard: using xsel")
return async (text: string) => {
const proc = Process.spawn(["xsel", "--clipboard", "--input"], {
diff --git a/packages/opencode/src/cli/cmd/tui/worker.ts b/packages/opencode/src/cli/cmd/tui/worker.ts
index 7d729bc786..590dafcdcd 100644
--- a/packages/opencode/src/cli/cmd/tui/worker.ts
+++ b/packages/opencode/src/cli/cmd/tui/worker.ts
@@ -11,6 +11,7 @@ import { createOpencodeClient, type Event } from "@opencode-ai/sdk/v2"
import type { BunWebSocketData } from "hono/bun"
import { Flag } from "@/flag/flag"
import { Telemetry } from "@/telemetry"
+import { setTimeout as sleep } from "node:timers/promises"
await Log.init({
print: process.argv.includes("--print-logs"),
@@ -79,7 +80,7 @@ const startEventStream = (directory: string) => {
).catch(() => undefined)
if (!events) {
- await Bun.sleep(250)
+ await sleep(250)
continue
}
@@ -88,7 +89,7 @@ const startEventStream = (directory: string) => {
}
if (!signal.aborted) {
- await Bun.sleep(250)
+ await sleep(250)
}
}
})().catch((error) => {
diff --git a/packages/opencode/src/cli/ui.ts b/packages/opencode/src/cli/ui.ts
index f242a77f6c..39396997c6 100644
--- a/packages/opencode/src/cli/ui.ts
+++ b/packages/opencode/src/cli/ui.ts
@@ -25,12 +25,12 @@ export namespace UI {
export function println(...message: string[]) {
print(...message)
- Bun.stderr.write(EOL)
+ process.stderr.write(EOL)
}
export function print(...message: string[]) {
blank = false
- Bun.stderr.write(message.join(" "))
+ process.stderr.write(message.join(" "))
}
let blank = false
@@ -44,7 +44,7 @@ export namespace UI {
const result: string[] = []
const reset = "\x1b[0m"
const left = {
- fg: Bun.color("gray", "ansi") ?? "",
+ fg: "\x1b[90m",
shadow: "\x1b[38;5;235m",
bg: "\x1b[48;5;235m",
}
diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts
index 50b0baf735..7b96af9903 100644
--- a/packages/opencode/src/config/config.ts
+++ b/packages/opencode/src/config/config.ts
@@ -1242,7 +1242,7 @@ export namespace Config {
if (!parsed.data.$schema && isFile) {
parsed.data.$schema = "https://opencode.ai/config.json"
const updated = original.replace(/^\s*\{/, '{\n "$schema": "https://opencode.ai/config.json",')
- await Bun.write(options.path, updated).catch(() => {})
+ await Filesystem.write(options.path, updated).catch(() => {})
}
const data = parsed.data
if (data.plugin && isFile) {
@@ -1403,3 +1403,5 @@ export namespace Config {
return state().then((x) => x.directories)
}
}
+Filesystem.write
+Filesystem.write
diff --git a/packages/opencode/src/config/migrate-tui-config.ts b/packages/opencode/src/config/migrate-tui-config.ts
index b426e4fbd1..dbe33ffb42 100644
--- a/packages/opencode/src/config/migrate-tui-config.ts
+++ b/packages/opencode/src/config/migrate-tui-config.ts
@@ -70,7 +70,7 @@ export async function migrateTuiConfig(input: MigrateInput) {
if (extracted.keybinds !== undefined) payload.keybinds = extracted.keybinds
if (tui) Object.assign(payload, tui)
- const wrote = await Bun.write(target, JSON.stringify(payload, null, 2))
+ const wrote = await Filesystem.write(target, JSON.stringify(payload, null, 2))
.then(() => true)
.catch((error) => {
log.warn("failed to write tui migration target", { from: file, to: target, error })
@@ -104,7 +104,7 @@ async function backupAndStripLegacy(file: string, source: string) {
const hasBackup = await Filesystem.exists(backup)
const backed = hasBackup
? true
- : await Bun.write(backup, source)
+ : await Filesystem.write(backup, source)
.then(() => true)
.catch((error) => {
log.warn("failed to backup source config during tui migration", { path: file, backup, error })
@@ -123,7 +123,7 @@ async function backupAndStripLegacy(file: string, source: string) {
return applyEdits(acc, edits)
}, source)
- return Bun.write(file, text)
+ return Filesystem.write(file, text)
.then(() => {
log.info("stripped tui keys from server config", { path: file, backup })
return true
diff --git a/packages/opencode/src/file/index.ts b/packages/opencode/src/file/index.ts
index b7daddc5fb..01f07c9afa 100644
--- a/packages/opencode/src/file/index.ts
+++ b/packages/opencode/src/file/index.ts
@@ -418,7 +418,7 @@ export namespace File {
const project = Instance.project
if (project.vcs !== "git") return []
- const diffOutput = await $`git -c core.quotepath=false diff --numstat HEAD`
+ const diffOutput = await $`git -c core.fsmonitor=false -c core.quotepath=false diff --numstat HEAD`
.cwd(Instance.directory)
.quiet()
.nothrow()
@@ -439,11 +439,12 @@ export namespace File {
}
}
- const untrackedOutput = await $`git -c core.quotepath=false ls-files --others --exclude-standard`
- .cwd(Instance.directory)
- .quiet()
- .nothrow()
- .text()
+ const untrackedOutput =
+ await $`git -c core.fsmonitor=false -c core.quotepath=false ls-files --others --exclude-standard`
+ .cwd(Instance.directory)
+ .quiet()
+ .nothrow()
+ .text()
if (untrackedOutput.trim()) {
const untrackedFiles = untrackedOutput.trim().split("\n")
@@ -464,11 +465,12 @@ export namespace File {
}
// Get deleted files
- const deletedOutput = await $`git -c core.quotepath=false diff --name-only --diff-filter=D HEAD`
- .cwd(Instance.directory)
- .quiet()
- .nothrow()
- .text()
+ const deletedOutput =
+ await $`git -c core.fsmonitor=false -c core.quotepath=false diff --name-only --diff-filter=D HEAD`
+ .cwd(Instance.directory)
+ .quiet()
+ .nothrow()
+ .text()
if (deletedOutput.trim()) {
const deletedFiles = deletedOutput.trim().split("\n")
@@ -539,8 +541,14 @@ export namespace File {
const content = (await Filesystem.readText(full).catch(() => "")).trim()
if (project.vcs === "git") {
- let diff = await $`git diff ${file}`.cwd(Instance.directory).quiet().nothrow().text()
- if (!diff.trim()) diff = await $`git diff --staged ${file}`.cwd(Instance.directory).quiet().nothrow().text()
+ let diff = await $`git -c core.fsmonitor=false diff ${file}`.cwd(Instance.directory).quiet().nothrow().text()
+ if (!diff.trim()) {
+ diff = await $`git -c core.fsmonitor=false diff --staged ${file}`
+ .cwd(Instance.directory)
+ .quiet()
+ .nothrow()
+ .text()
+ }
if (diff.trim()) {
const original = await $`git show HEAD:${file}`.cwd(Instance.directory).quiet().nothrow().text()
const patch = structuredPatch(file, file, original, content, "old", "new", {
diff --git a/packages/opencode/src/file/ripgrep.ts b/packages/opencode/src/file/ripgrep.ts
index 9c4e9cf028..09fef453c9 100644
--- a/packages/opencode/src/file/ripgrep.ts
+++ b/packages/opencode/src/file/ripgrep.ts
@@ -8,6 +8,7 @@ import { lazy } from "../util/lazy"
import { $ } from "bun"
import { Filesystem } from "../util/filesystem"
import { Process } from "../util/process"
+import { which } from "../util/which"
import { text } from "node:stream/consumers"
import { ZipReader, BlobReader, BlobWriter } from "@zip.js/zip.js"
@@ -126,7 +127,7 @@ export namespace Ripgrep {
)
const state = lazy(async () => {
- const system = Bun.which("rg")
+ const system = which("rg")
if (system) {
const stat = await fs.stat(system).catch(() => undefined)
if (stat?.isFile()) return { filepath: system }
diff --git a/packages/opencode/src/format/formatter.ts b/packages/opencode/src/format/formatter.ts
index 19b9e2cbe9..9e96b2305c 100644
--- a/packages/opencode/src/format/formatter.ts
+++ b/packages/opencode/src/format/formatter.ts
@@ -3,6 +3,7 @@ import { BunProc } from "../bun"
import { Instance } from "../project/instance"
import { Filesystem } from "../util/filesystem"
import { Process } from "../util/process"
+import { which } from "../util/which"
import { Flag } from "@/flag/flag"
export interface Info {
@@ -18,7 +19,7 @@ export const gofmt: Info = {
command: ["gofmt", "-w", "$FILE"],
extensions: [".go"],
async enabled() {
- return Bun.which("gofmt") !== null
+ return which("gofmt") !== null
},
}
@@ -27,7 +28,7 @@ export const mix: Info = {
command: ["mix", "format", "$FILE"],
extensions: [".ex", ".exs", ".eex", ".heex", ".leex", ".neex", ".sface"],
async enabled() {
- return Bun.which("mix") !== null
+ return which("mix") !== null
},
}
@@ -152,7 +153,7 @@ export const zig: Info = {
command: ["zig", "fmt", "$FILE"],
extensions: [".zig", ".zon"],
async enabled() {
- return Bun.which("zig") !== null
+ return which("zig") !== null
},
}
@@ -171,7 +172,7 @@ export const ktlint: Info = {
command: ["ktlint", "-F", "$FILE"],
extensions: [".kt", ".kts"],
async enabled() {
- return Bun.which("ktlint") !== null
+ return which("ktlint") !== null
},
}
@@ -180,7 +181,7 @@ export const ruff: Info = {
command: ["ruff", "format", "$FILE"],
extensions: [".py", ".pyi"],
async enabled() {
- if (!Bun.which("ruff")) return false
+ if (!which("ruff")) return false
const configs = ["pyproject.toml", "ruff.toml", ".ruff.toml"]
for (const config of configs) {
const found = await Filesystem.findUp(config, Instance.directory, Instance.worktree)
@@ -210,7 +211,7 @@ export const rlang: Info = {
command: ["air", "format", "$FILE"],
extensions: [".R"],
async enabled() {
- const airPath = Bun.which("air")
+ const airPath = which("air")
if (airPath == null) return false
try {
@@ -239,7 +240,7 @@ export const uvformat: Info = {
extensions: [".py", ".pyi"],
async enabled() {
if (await ruff.enabled()) return false
- if (Bun.which("uv") !== null) {
+ if (which("uv") !== null) {
const proc = Process.spawn(["uv", "format", "--help"], { stderr: "pipe", stdout: "pipe" })
const code = await proc.exited
return code === 0
@@ -253,7 +254,7 @@ export const rubocop: Info = {
command: ["rubocop", "--autocorrect", "$FILE"],
extensions: [".rb", ".rake", ".gemspec", ".ru"],
async enabled() {
- return Bun.which("rubocop") !== null
+ return which("rubocop") !== null
},
}
@@ -262,7 +263,7 @@ export const standardrb: Info = {
command: ["standardrb", "--fix", "$FILE"],
extensions: [".rb", ".rake", ".gemspec", ".ru"],
async enabled() {
- return Bun.which("standardrb") !== null
+ return which("standardrb") !== null
},
}
@@ -271,7 +272,7 @@ export const htmlbeautifier: Info = {
command: ["htmlbeautifier", "$FILE"],
extensions: [".erb", ".html.erb"],
async enabled() {
- return Bun.which("htmlbeautifier") !== null
+ return which("htmlbeautifier") !== null
},
}
@@ -280,7 +281,7 @@ export const dart: Info = {
command: ["dart", "format", "$FILE"],
extensions: [".dart"],
async enabled() {
- return Bun.which("dart") !== null
+ return which("dart") !== null
},
}
@@ -289,7 +290,7 @@ export const ocamlformat: Info = {
command: ["ocamlformat", "-i", "$FILE"],
extensions: [".ml", ".mli"],
async enabled() {
- if (!Bun.which("ocamlformat")) return false
+ if (!which("ocamlformat")) return false
const items = await Filesystem.findUp(".ocamlformat", Instance.directory, Instance.worktree)
return items.length > 0
},
@@ -300,7 +301,7 @@ export const terraform: Info = {
command: ["terraform", "fmt", "$FILE"],
extensions: [".tf", ".tfvars"],
async enabled() {
- return Bun.which("terraform") !== null
+ return which("terraform") !== null
},
}
@@ -309,7 +310,7 @@ export const latexindent: Info = {
command: ["latexindent", "-w", "-s", "$FILE"],
extensions: [".tex"],
async enabled() {
- return Bun.which("latexindent") !== null
+ return which("latexindent") !== null
},
}
@@ -318,7 +319,7 @@ export const gleam: Info = {
command: ["gleam", "format", "$FILE"],
extensions: [".gleam"],
async enabled() {
- return Bun.which("gleam") !== null
+ return which("gleam") !== null
},
}
@@ -327,7 +328,7 @@ export const shfmt: Info = {
command: ["shfmt", "-w", "$FILE"],
extensions: [".sh", ".bash"],
async enabled() {
- return Bun.which("shfmt") !== null
+ return which("shfmt") !== null
},
}
@@ -336,7 +337,7 @@ export const nixfmt: Info = {
command: ["nixfmt", "$FILE"],
extensions: [".nix"],
async enabled() {
- return Bun.which("nixfmt") !== null
+ return which("nixfmt") !== null
},
}
@@ -345,7 +346,7 @@ export const rustfmt: Info = {
command: ["rustfmt", "$FILE"],
extensions: [".rs"],
async enabled() {
- return Bun.which("rustfmt") !== null
+ return which("rustfmt") !== null
},
}
@@ -372,7 +373,7 @@ export const ormolu: Info = {
command: ["ormolu", "-i", "$FILE"],
extensions: [".hs"],
async enabled() {
- return Bun.which("ormolu") !== null
+ return which("ormolu") !== null
},
}
@@ -381,7 +382,7 @@ export const cljfmt: Info = {
command: ["cljfmt", "fix", "--quiet", "$FILE"],
extensions: [".clj", ".cljs", ".cljc", ".edn"],
async enabled() {
- return Bun.which("cljfmt") !== null
+ return which("cljfmt") !== null
},
}
@@ -390,6 +391,6 @@ export const dfmt: Info = {
command: ["dfmt", "-i", "$FILE"],
extensions: [".d"],
async enabled() {
- return Bun.which("dfmt") !== null
+ return which("dfmt") !== null
},
}
diff --git a/packages/opencode/src/lsp/server.ts b/packages/opencode/src/lsp/server.ts
index afd297a5ed..e09fbc97fe 100644
--- a/packages/opencode/src/lsp/server.ts
+++ b/packages/opencode/src/lsp/server.ts
@@ -12,6 +12,7 @@ import { Instance } from "../project/instance"
import { Flag } from "../flag/flag"
import { Archive } from "../util/archive"
import { Process } from "../util/process"
+import { which } from "../util/which"
export namespace LSPServer {
const log = Log.create({ service: "lsp.server" })
@@ -75,7 +76,7 @@ export namespace LSPServer {
},
extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs"],
async spawn(root) {
- const deno = Bun.which("deno")
+ const deno = which("deno")
if (!deno) {
log.info("deno not found, please install deno first")
return
@@ -122,7 +123,7 @@ export namespace LSPServer {
extensions: [".vue"],
root: NearestRoot(["package-lock.json", "bun.lockb", "bun.lock", "pnpm-lock.yaml", "yarn.lock"]),
async spawn(root) {
- let binary = Bun.which("vue-language-server")
+ let binary = which("vue-language-server")
const args: string[] = []
if (!binary) {
const js = path.join(
@@ -260,7 +261,7 @@ export namespace LSPServer {
let lintBin = await resolveBin(lintTarget)
if (!lintBin) {
- const found = Bun.which("oxlint")
+ const found = which("oxlint")
if (found) lintBin = found
}
@@ -281,7 +282,7 @@ export namespace LSPServer {
let serverBin = await resolveBin(serverTarget)
if (!serverBin) {
- const found = Bun.which("oxc_language_server")
+ const found = which("oxc_language_server")
if (found) serverBin = found
}
if (serverBin) {
@@ -332,7 +333,7 @@ export namespace LSPServer {
let bin: string | undefined
if (await Filesystem.exists(localBin)) bin = localBin
if (!bin) {
- const found = Bun.which("biome")
+ const found = which("biome")
if (found) bin = found
}
@@ -368,11 +369,11 @@ export namespace LSPServer {
},
extensions: [".go"],
async spawn(root) {
- let bin = Bun.which("gopls", {
+ let bin = which("gopls", {
PATH: process.env["PATH"] + path.delimiter + Global.Path.bin,
})
if (!bin) {
- if (!Bun.which("go")) return
+ if (!which("go")) return
if (Flag.OPENCODE_DISABLE_LSP_DOWNLOAD) return
log.info("installing gopls")
@@ -405,12 +406,12 @@ export namespace LSPServer {
root: NearestRoot(["Gemfile"]),
extensions: [".rb", ".rake", ".gemspec", ".ru"],
async spawn(root) {
- let bin = Bun.which("rubocop", {
+ let bin = which("rubocop", {
PATH: process.env["PATH"] + path.delimiter + Global.Path.bin,
})
if (!bin) {
- const ruby = Bun.which("ruby")
- const gem = Bun.which("gem")
+ const ruby = which("ruby")
+ const gem = which("gem")
if (!ruby || !gem) {
log.info("Ruby not found, please install Ruby first")
return
@@ -457,7 +458,7 @@ export namespace LSPServer {
return undefined
}
- let binary = Bun.which("ty")
+ let binary = which("ty")
const initialization: Record = {}
@@ -509,7 +510,7 @@ export namespace LSPServer {
extensions: [".py", ".pyi"],
root: NearestRoot(["pyproject.toml", "setup.py", "setup.cfg", "requirements.txt", "Pipfile", "pyrightconfig.json"]),
async spawn(root) {
- let binary = Bun.which("pyright-langserver")
+ let binary = which("pyright-langserver")
const args = []
if (!binary) {
const js = path.join(Global.Path.bin, "node_modules", "pyright", "dist", "pyright-langserver.js")
@@ -563,7 +564,7 @@ export namespace LSPServer {
extensions: [".ex", ".exs"],
root: NearestRoot(["mix.exs", "mix.lock"]),
async spawn(root) {
- let binary = Bun.which("elixir-ls")
+ let binary = which("elixir-ls")
if (!binary) {
const elixirLsPath = path.join(Global.Path.bin, "elixir-ls")
binary = path.join(
@@ -574,7 +575,7 @@ export namespace LSPServer {
)
if (!(await Filesystem.exists(binary))) {
- const elixir = Bun.which("elixir")
+ const elixir = which("elixir")
if (!elixir) {
log.error("elixir is required to run elixir-ls")
return
@@ -625,12 +626,12 @@ export namespace LSPServer {
extensions: [".zig", ".zon"],
root: NearestRoot(["build.zig"]),
async spawn(root) {
- let bin = Bun.which("zls", {
+ let bin = which("zls", {
PATH: process.env["PATH"] + path.delimiter + Global.Path.bin,
})
if (!bin) {
- const zig = Bun.which("zig")
+ const zig = which("zig")
if (!zig) {
log.error("Zig is required to use zls. Please install Zig first.")
return
@@ -737,11 +738,11 @@ export namespace LSPServer {
root: NearestRoot([".slnx", ".sln", ".csproj", "global.json"]),
extensions: [".cs"],
async spawn(root) {
- let bin = Bun.which("csharp-ls", {
+ let bin = which("csharp-ls", {
PATH: process.env["PATH"] + path.delimiter + Global.Path.bin,
})
if (!bin) {
- if (!Bun.which("dotnet")) {
+ if (!which("dotnet")) {
log.error(".NET SDK is required to install csharp-ls")
return
}
@@ -776,11 +777,11 @@ export namespace LSPServer {
root: NearestRoot([".slnx", ".sln", ".fsproj", "global.json"]),
extensions: [".fs", ".fsi", ".fsx", ".fsscript"],
async spawn(root) {
- let bin = Bun.which("fsautocomplete", {
+ let bin = which("fsautocomplete", {
PATH: process.env["PATH"] + path.delimiter + Global.Path.bin,
})
if (!bin) {
- if (!Bun.which("dotnet")) {
+ if (!which("dotnet")) {
log.error(".NET SDK is required to install fsautocomplete")
return
}
@@ -817,7 +818,7 @@ export namespace LSPServer {
async spawn(root) {
// Check if sourcekit-lsp is available in the PATH
// This is installed with the Swift toolchain
- const sourcekit = Bun.which("sourcekit-lsp")
+ const sourcekit = which("sourcekit-lsp")
if (sourcekit) {
return {
process: spawn(sourcekit, {
@@ -828,7 +829,7 @@ export namespace LSPServer {
// If sourcekit-lsp not found, check if xcrun is available
// This is specific to macOS where sourcekit-lsp is typically installed with Xcode
- if (!Bun.which("xcrun")) return
+ if (!which("xcrun")) return
const lspLoc = await $`xcrun --find sourcekit-lsp`.quiet().nothrow()
@@ -877,7 +878,7 @@ export namespace LSPServer {
},
extensions: [".rs"],
async spawn(root) {
- const bin = Bun.which("rust-analyzer")
+ const bin = which("rust-analyzer")
if (!bin) {
log.info("rust-analyzer not found in path, please install it")
return
@@ -896,7 +897,7 @@ export namespace LSPServer {
extensions: [".c", ".cpp", ".cc", ".cxx", ".c++", ".h", ".hpp", ".hh", ".hxx", ".h++"],
async spawn(root) {
const args = ["--background-index", "--clang-tidy"]
- const fromPath = Bun.which("clangd")
+ const fromPath = which("clangd")
if (fromPath) {
return {
process: spawn(fromPath, args, {
@@ -1041,7 +1042,7 @@ export namespace LSPServer {
extensions: [".svelte"],
root: NearestRoot(["package-lock.json", "bun.lockb", "bun.lock", "pnpm-lock.yaml", "yarn.lock"]),
async spawn(root) {
- let binary = Bun.which("svelteserver")
+ let binary = which("svelteserver")
const args: string[] = []
if (!binary) {
const js = path.join(Global.Path.bin, "node_modules", "svelte-language-server", "bin", "server.js")
@@ -1088,7 +1089,7 @@ export namespace LSPServer {
}
const tsdk = path.dirname(tsserver)
- let binary = Bun.which("astro-ls")
+ let binary = which("astro-ls")
const args: string[] = []
if (!binary) {
const js = path.join(Global.Path.bin, "node_modules", "@astrojs", "language-server", "bin", "nodeServer.js")
@@ -1132,7 +1133,7 @@ export namespace LSPServer {
root: NearestRoot(["pom.xml", "build.gradle", "build.gradle.kts", ".project", ".classpath"]),
extensions: [".java"],
async spawn(root) {
- const java = Bun.which("java")
+ const java = which("java")
if (!java) {
log.error("Java 21 or newer is required to run the JDTLS. Please install it first.")
return
@@ -1324,7 +1325,7 @@ export namespace LSPServer {
extensions: [".yaml", ".yml"],
root: NearestRoot(["package-lock.json", "bun.lockb", "bun.lock", "pnpm-lock.yaml", "yarn.lock"]),
async spawn(root) {
- let binary = Bun.which("yaml-language-server")
+ let binary = which("yaml-language-server")
const args: string[] = []
if (!binary) {
const js = path.join(
@@ -1380,7 +1381,7 @@ export namespace LSPServer {
]),
extensions: [".lua"],
async spawn(root) {
- let bin = Bun.which("lua-language-server", {
+ let bin = which("lua-language-server", {
PATH: process.env["PATH"] + path.delimiter + Global.Path.bin,
})
@@ -1512,7 +1513,7 @@ export namespace LSPServer {
extensions: [".php"],
root: NearestRoot(["composer.json", "composer.lock", ".php-version"]),
async spawn(root) {
- let binary = Bun.which("intelephense")
+ let binary = which("intelephense")
const args: string[] = []
if (!binary) {
const js = path.join(Global.Path.bin, "node_modules", "intelephense", "lib", "intelephense.js")
@@ -1556,7 +1557,7 @@ export namespace LSPServer {
extensions: [".prisma"],
root: NearestRoot(["schema.prisma", "prisma/schema.prisma", "prisma"], ["package.json"]),
async spawn(root) {
- const prisma = Bun.which("prisma")
+ const prisma = which("prisma")
if (!prisma) {
log.info("prisma not found, please install prisma")
return
@@ -1574,7 +1575,7 @@ export namespace LSPServer {
extensions: [".dart"],
root: NearestRoot(["pubspec.yaml", "analysis_options.yaml"]),
async spawn(root) {
- const dart = Bun.which("dart")
+ const dart = which("dart")
if (!dart) {
log.info("dart not found, please install dart first")
return
@@ -1592,7 +1593,7 @@ export namespace LSPServer {
extensions: [".ml", ".mli"],
root: NearestRoot(["dune-project", "dune-workspace", ".merlin", "opam"]),
async spawn(root) {
- const bin = Bun.which("ocamllsp")
+ const bin = which("ocamllsp")
if (!bin) {
log.info("ocamllsp not found, please install ocaml-lsp-server")
return
@@ -1609,7 +1610,7 @@ export namespace LSPServer {
extensions: [".sh", ".bash", ".zsh", ".ksh"],
root: async () => Instance.directory,
async spawn(root) {
- let binary = Bun.which("bash-language-server")
+ let binary = which("bash-language-server")
const args: string[] = []
if (!binary) {
const js = path.join(Global.Path.bin, "node_modules", "bash-language-server", "out", "cli.js")
@@ -1648,7 +1649,7 @@ export namespace LSPServer {
extensions: [".tf", ".tfvars"],
root: NearestRoot([".terraform.lock.hcl", "terraform.tfstate", "*.tf"]),
async spawn(root) {
- let bin = Bun.which("terraform-ls", {
+ let bin = which("terraform-ls", {
PATH: process.env["PATH"] + path.delimiter + Global.Path.bin,
})
@@ -1731,7 +1732,7 @@ export namespace LSPServer {
extensions: [".tex", ".bib"],
root: NearestRoot([".latexmkrc", "latexmkrc", ".texlabroot", "texlabroot"]),
async spawn(root) {
- let bin = Bun.which("texlab", {
+ let bin = which("texlab", {
PATH: process.env["PATH"] + path.delimiter + Global.Path.bin,
})
@@ -1821,7 +1822,7 @@ export namespace LSPServer {
extensions: [".dockerfile", "Dockerfile"],
root: async () => Instance.directory,
async spawn(root) {
- let binary = Bun.which("docker-langserver")
+ let binary = which("docker-langserver")
const args: string[] = []
if (!binary) {
const js = path.join(Global.Path.bin, "node_modules", "dockerfile-language-server-nodejs", "lib", "server.js")
@@ -1860,7 +1861,7 @@ export namespace LSPServer {
extensions: [".gleam"],
root: NearestRoot(["gleam.toml"]),
async spawn(root) {
- const gleam = Bun.which("gleam")
+ const gleam = which("gleam")
if (!gleam) {
log.info("gleam not found, please install gleam first")
return
@@ -1878,9 +1879,9 @@ export namespace LSPServer {
extensions: [".clj", ".cljs", ".cljc", ".edn"],
root: NearestRoot(["deps.edn", "project.clj", "shadow-cljs.edn", "bb.edn", "build.boot"]),
async spawn(root) {
- let bin = Bun.which("clojure-lsp")
+ let bin = which("clojure-lsp")
if (!bin && process.platform === "win32") {
- bin = Bun.which("clojure-lsp.exe")
+ bin = which("clojure-lsp.exe")
}
if (!bin) {
log.info("clojure-lsp not found, please install clojure-lsp first")
@@ -1909,7 +1910,7 @@ export namespace LSPServer {
return Instance.directory
},
async spawn(root) {
- const nixd = Bun.which("nixd")
+ const nixd = which("nixd")
if (!nixd) {
log.info("nixd not found, please install nixd first")
return
@@ -1930,7 +1931,7 @@ export namespace LSPServer {
extensions: [".typ", ".typc"],
root: NearestRoot(["typst.toml"]),
async spawn(root) {
- let bin = Bun.which("tinymist", {
+ let bin = which("tinymist", {
PATH: process.env["PATH"] + path.delimiter + Global.Path.bin,
})
@@ -2024,7 +2025,7 @@ export namespace LSPServer {
extensions: [".hs", ".lhs"],
root: NearestRoot(["stack.yaml", "cabal.project", "hie.yaml", "*.cabal"]),
async spawn(root) {
- const bin = Bun.which("haskell-language-server-wrapper")
+ const bin = which("haskell-language-server-wrapper")
if (!bin) {
log.info("haskell-language-server-wrapper not found, please install haskell-language-server")
return
@@ -2042,7 +2043,7 @@ export namespace LSPServer {
extensions: [".jl"],
root: NearestRoot(["Project.toml", "Manifest.toml", "*.jl"]),
async spawn(root) {
- const julia = Bun.which("julia")
+ const julia = which("julia")
if (!julia) {
log.info("julia not found, please install julia first (https://julialang.org/downloads/)")
return
diff --git a/packages/opencode/src/mcp/oauth-callback.ts b/packages/opencode/src/mcp/oauth-callback.ts
index bb3b56f2e9..db8e621d6c 100644
--- a/packages/opencode/src/mcp/oauth-callback.ts
+++ b/packages/opencode/src/mcp/oauth-callback.ts
@@ -1,3 +1,4 @@
+import { createConnection } from "net"
import { Log } from "../util/log"
import { OAUTH_CALLBACK_PORT, OAUTH_CALLBACK_PATH } from "./oauth-provider"
@@ -160,21 +161,12 @@ export namespace McpOAuthCallback {
export async function isPortInUse(): Promise {
return new Promise((resolve) => {
- Bun.connect({
- hostname: "127.0.0.1",
- port: OAUTH_CALLBACK_PORT,
- socket: {
- open(socket) {
- socket.end()
- resolve(true)
- },
- error() {
- resolve(false)
- },
- data() {},
- close() {},
- },
- }).catch(() => {
+ const socket = createConnection(OAUTH_CALLBACK_PORT, "127.0.0.1")
+ socket.on("connect", () => {
+ socket.destroy()
+ resolve(true)
+ })
+ socket.on("error", () => {
resolve(false)
})
})
diff --git a/packages/opencode/src/plugin/codex.ts b/packages/opencode/src/plugin/codex.ts
index 483e3f1277..d3bedc30ce 100644
--- a/packages/opencode/src/plugin/codex.ts
+++ b/packages/opencode/src/plugin/codex.ts
@@ -4,6 +4,7 @@ import { Installation } from "../installation"
import { Auth, OAUTH_DUMMY_KEY } from "../auth"
import os from "os"
import { ProviderTransform } from "@/provider/transform"
+import { setTimeout as sleep } from "node:timers/promises"
const log = Log.create({ service: "plugin.codex" })
@@ -361,6 +362,7 @@ export async function CodexAuthPlugin(input: PluginInput): Promise {
"gpt-5.1-codex-max",
"gpt-5.1-codex-mini",
"gpt-5.2",
+ "gpt-5.4",
"gpt-5.2-codex",
"gpt-5.3-codex",
"gpt-5.1-codex",
@@ -602,7 +604,7 @@ export async function CodexAuthPlugin(input: PluginInput): Promise {
return { type: "failed" as const }
}
- await Bun.sleep(interval + OAUTH_POLLING_SAFETY_MARGIN_MS)
+ await sleep(interval + OAUTH_POLLING_SAFETY_MARGIN_MS)
}
},
}
diff --git a/packages/opencode/src/plugin/copilot.ts b/packages/opencode/src/plugin/copilot.ts
index 39ea0d00d2..3945c63ce2 100644
--- a/packages/opencode/src/plugin/copilot.ts
+++ b/packages/opencode/src/plugin/copilot.ts
@@ -1,6 +1,7 @@
import type { Hooks, PluginInput } from "@opencode-ai/plugin"
import { Installation } from "@/installation"
import { iife } from "@/util/iife"
+import { setTimeout as sleep } from "node:timers/promises"
const CLIENT_ID = "Ov23li8tweQw6odWQebz"
// Add a small safety buffer when polling to avoid hitting the server
@@ -270,7 +271,7 @@ export async function CopilotAuthPlugin(input: PluginInput): Promise {
}
if (data.error === "authorization_pending") {
- await Bun.sleep(deviceData.interval * 1000 + OAUTH_POLLING_SAFETY_MARGIN_MS)
+ await sleep(deviceData.interval * 1000 + OAUTH_POLLING_SAFETY_MARGIN_MS)
continue
}
@@ -286,13 +287,13 @@ export async function CopilotAuthPlugin(input: PluginInput): Promise {
newInterval = serverInterval * 1000
}
- await Bun.sleep(newInterval + OAUTH_POLLING_SAFETY_MARGIN_MS)
+ await sleep(newInterval + OAUTH_POLLING_SAFETY_MARGIN_MS)
continue
}
if (data.error) return { type: "failed" as const }
- await Bun.sleep(deviceData.interval * 1000 + OAUTH_POLLING_SAFETY_MARGIN_MS)
+ await sleep(deviceData.interval * 1000 + OAUTH_POLLING_SAFETY_MARGIN_MS)
continue
}
},
diff --git a/packages/opencode/src/project/project.ts b/packages/opencode/src/project/project.ts
index 7d86b88143..aee8a26481 100644
--- a/packages/opencode/src/project/project.ts
+++ b/packages/opencode/src/project/project.ts
@@ -14,6 +14,7 @@ import { GlobalBus } from "@/bus/global"
import { existsSync } from "fs"
import { git } from "../util/git"
import { Glob } from "../util/glob"
+import { which } from "../util/which"
export namespace Project {
const log = Log.create({ service: "project" })
@@ -97,7 +98,7 @@ export namespace Project {
if (dotgit) {
let sandbox = path.dirname(dotgit)
- const gitBinary = Bun.which("git")
+ const gitBinary = which("git")
// cached id calculation
let id = await Filesystem.readText(path.join(dotgit, "altimate"))
diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts
index be027a84b4..a839e4ad6c 100644
--- a/packages/opencode/src/provider/provider.ts
+++ b/packages/opencode/src/provider/provider.ts
@@ -6,9 +6,10 @@ import { mapValues, mergeDeep, omit, pickBy, sortBy } from "remeda"
import { NoSuchModelError, type Provider as SDK } from "ai"
import { Log } from "../util/log"
import { BunProc } from "../bun"
+import { Hash } from "../util/hash"
import { Plugin } from "../plugin"
-import { ModelsDev } from "./models"
import { NamedError } from "@opencode-ai/util/error"
+import { ModelsDev } from "./models"
import { Auth } from "../auth"
import { Env } from "../env"
import { Instance } from "../project/instance"
@@ -774,7 +775,7 @@ export namespace Provider {
const modelLoaders: {
[providerID: string]: CustomModelLoader
} = {}
- const sdk = new Map()
+ const sdk = new Map()
log.info("init")
@@ -1064,7 +1065,7 @@ export namespace Provider {
...model.headers,
}
- const key = Bun.hash.xxHash32(JSON.stringify({ providerID: model.providerID, npm: model.api.npm, options }))
+ const key = Hash.fast(JSON.stringify({ providerID: model.providerID, npm: model.api.npm, options }))
const existing = s.sdk.get(key)
if (existing) return existing
diff --git a/packages/opencode/src/shell/shell.ts b/packages/opencode/src/shell/shell.ts
index e7b7cdb3e4..60ae46f5ee 100644
--- a/packages/opencode/src/shell/shell.ts
+++ b/packages/opencode/src/shell/shell.ts
@@ -1,8 +1,10 @@
import { Flag } from "@/flag/flag"
import { lazy } from "@/util/lazy"
import { Filesystem } from "@/util/filesystem"
+import { which } from "@/util/which"
import path from "path"
import { spawn, type ChildProcess } from "child_process"
+import { setTimeout as sleep } from "node:timers/promises"
const SIGKILL_TIMEOUT_MS = 200
@@ -22,13 +24,13 @@ export namespace Shell {
try {
process.kill(-pid, "SIGTERM")
- await Bun.sleep(SIGKILL_TIMEOUT_MS)
+ await sleep(SIGKILL_TIMEOUT_MS)
if (!opts?.exited?.()) {
process.kill(-pid, "SIGKILL")
}
} catch (_e) {
proc.kill("SIGTERM")
- await Bun.sleep(SIGKILL_TIMEOUT_MS)
+ await sleep(SIGKILL_TIMEOUT_MS)
if (!opts?.exited?.()) {
proc.kill("SIGKILL")
}
@@ -39,7 +41,7 @@ export namespace Shell {
function fallback() {
if (process.platform === "win32") {
if (Flag.OPENCODE_GIT_BASH_PATH) return Flag.OPENCODE_GIT_BASH_PATH
- const git = Bun.which("git")
+ const git = which("git")
if (git) {
// git.exe is typically at: C:\Program Files\Git\cmd\git.exe
// bash.exe is at: C:\Program Files\Git\bin\bash.exe
@@ -49,7 +51,7 @@ export namespace Shell {
return process.env.COMSPEC || "cmd.exe"
}
if (process.platform === "darwin") return "/bin/zsh"
- const bash = Bun.which("bash")
+ const bash = which("bash")
if (bash) return bash
return "/bin/sh"
}
diff --git a/packages/opencode/src/snapshot/index.ts b/packages/opencode/src/snapshot/index.ts
index cf254b4cef..1acbdba092 100644
--- a/packages/opencode/src/snapshot/index.ts
+++ b/packages/opencode/src/snapshot/index.ts
@@ -1,6 +1,7 @@
import { $ } from "bun"
import path from "path"
import fs from "fs/promises"
+import { Filesystem } from "../util/filesystem"
import { Log } from "../util/log"
import { Flag } from "../flag/flag"
import { Global } from "../global"
@@ -271,13 +272,12 @@ export namespace Snapshot {
const target = path.join(git, "info", "exclude")
await fs.mkdir(path.join(git, "info"), { recursive: true })
if (!file) {
- await Bun.write(target, "")
+ await Filesystem.write(target, "")
return
}
- const text = await Bun.file(file)
- .text()
- .catch(() => "")
- await Bun.write(target, text)
+ const text = await Filesystem.readText(file).catch(() => "")
+
+ await Filesystem.write(target, text)
}
async function excludes() {
diff --git a/packages/opencode/src/util/hash.ts b/packages/opencode/src/util/hash.ts
new file mode 100644
index 0000000000..680e0f40bc
--- /dev/null
+++ b/packages/opencode/src/util/hash.ts
@@ -0,0 +1,7 @@
+import { createHash } from "crypto"
+
+export namespace Hash {
+ export function fast(input: string | Buffer): string {
+ return createHash("sha1").update(input).digest("hex")
+ }
+}
diff --git a/packages/opencode/src/util/which.ts b/packages/opencode/src/util/which.ts
new file mode 100644
index 0000000000..78e651e8e8
--- /dev/null
+++ b/packages/opencode/src/util/which.ts
@@ -0,0 +1,10 @@
+import whichPkg from "which"
+
+export function which(cmd: string, env?: NodeJS.ProcessEnv) {
+ const result = whichPkg.sync(cmd, {
+ nothrow: true,
+ path: env?.PATH,
+ pathExt: env?.PATHEXT,
+ })
+ return typeof result === "string" ? result : null
+}
diff --git a/packages/opencode/src/worktree/index.ts b/packages/opencode/src/worktree/index.ts
index 2267322494..15efcea386 100644
--- a/packages/opencode/src/worktree/index.ts
+++ b/packages/opencode/src/worktree/index.ts
@@ -474,6 +474,11 @@ export namespace Worktree {
throw new RemoveFailedError({ message: message || "Failed to remove git worktree directory" })
})
+ const stop = async (target: string) => {
+ if (!(await exists(target))) return
+ await $`git fsmonitor--daemon stop`.quiet().nothrow().cwd(target)
+ }
+
const list = await $`git worktree list --porcelain`.quiet().nothrow().cwd(Instance.worktree)
if (list.exitCode !== 0) {
throw new RemoveFailedError({ message: errorText(list) || "Failed to read git worktrees" })
@@ -484,11 +489,13 @@ export namespace Worktree {
if (!entry?.path) {
const directoryExists = await exists(directory)
if (directoryExists) {
+ await stop(directory)
await clean(directory)
}
return true
}
+ await stop(entry.path)
const removed = await $`git worktree remove --force ${entry.path}`.quiet().nothrow().cwd(Instance.worktree)
if (removed.exitCode !== 0) {
const next = await $`git worktree list --porcelain`.quiet().nothrow().cwd(Instance.worktree)
@@ -637,7 +644,7 @@ export namespace Worktree {
throw new ResetFailedError({ message: errorText(subClean) || "Failed to clean submodules" })
}
- const status = await $`git status --porcelain=v1`.quiet().nothrow().cwd(worktreePath)
+ const status = await $`git -c core.fsmonitor=false status --porcelain=v1`.quiet().nothrow().cwd(worktreePath)
if (status.exitCode !== 0) {
throw new ResetFailedError({ message: errorText(status) || "Failed to read git status" })
}
diff --git a/packages/opencode/test/file/fsmonitor.test.ts b/packages/opencode/test/file/fsmonitor.test.ts
new file mode 100644
index 0000000000..8cdde014db
--- /dev/null
+++ b/packages/opencode/test/file/fsmonitor.test.ts
@@ -0,0 +1,62 @@
+import { $ } from "bun"
+import { describe, expect, test } from "bun:test"
+import fs from "fs/promises"
+import path from "path"
+import { File } from "../../src/file"
+import { Instance } from "../../src/project/instance"
+import { tmpdir } from "../fixture/fixture"
+
+const wintest = process.platform === "win32" ? test : test.skip
+
+describe("file fsmonitor", () => {
+ wintest("status does not start fsmonitor for readonly git checks", async () => {
+ await using tmp = await tmpdir({ git: true })
+ const target = path.join(tmp.path, "tracked.txt")
+
+ await fs.writeFile(target, "base\n")
+ await $`git add tracked.txt`.cwd(tmp.path).quiet()
+ await $`git commit -m init`.cwd(tmp.path).quiet()
+ await $`git config core.fsmonitor true`.cwd(tmp.path).quiet()
+ await $`git fsmonitor--daemon stop`.cwd(tmp.path).quiet().nothrow()
+ await fs.writeFile(target, "next\n")
+ await fs.writeFile(path.join(tmp.path, "new.txt"), "new\n")
+
+ const before = await $`git fsmonitor--daemon status`.cwd(tmp.path).quiet().nothrow()
+ expect(before.exitCode).not.toBe(0)
+
+ await Instance.provide({
+ directory: tmp.path,
+ fn: async () => {
+ await File.status()
+ },
+ })
+
+ const after = await $`git fsmonitor--daemon status`.cwd(tmp.path).quiet().nothrow()
+ expect(after.exitCode).not.toBe(0)
+ })
+
+ wintest("read does not start fsmonitor for git diffs", async () => {
+ await using tmp = await tmpdir({ git: true })
+ const target = path.join(tmp.path, "tracked.txt")
+
+ await fs.writeFile(target, "base\n")
+ await $`git add tracked.txt`.cwd(tmp.path).quiet()
+ await $`git commit -m init`.cwd(tmp.path).quiet()
+ await $`git config core.fsmonitor true`.cwd(tmp.path).quiet()
+ await $`git fsmonitor--daemon stop`.cwd(tmp.path).quiet().nothrow()
+ await fs.writeFile(target, "next\n")
+
+ const before = await $`git fsmonitor--daemon status`.cwd(tmp.path).quiet().nothrow()
+ expect(before.exitCode).not.toBe(0)
+
+ await Instance.provide({
+ directory: tmp.path,
+ fn: async () => {
+ await File.read("tracked.txt")
+ },
+ })
+
+ const after = await $`git fsmonitor--daemon status`.cwd(tmp.path).quiet().nothrow()
+ expect(after.exitCode).not.toBe(0)
+ })
+})
diff --git a/packages/opencode/test/fixture/fixture.test.ts b/packages/opencode/test/fixture/fixture.test.ts
new file mode 100644
index 0000000000..153276a283
--- /dev/null
+++ b/packages/opencode/test/fixture/fixture.test.ts
@@ -0,0 +1,26 @@
+import { $ } from "bun"
+import { describe, expect, test } from "bun:test"
+import fs from "fs/promises"
+import { tmpdir } from "./fixture"
+
+describe("tmpdir", () => {
+ test("disables fsmonitor for git fixtures", async () => {
+ await using tmp = await tmpdir({ git: true })
+
+ const value = (await $`git config core.fsmonitor`.cwd(tmp.path).quiet().text()).trim()
+ expect(value).toBe("false")
+ })
+
+ test("removes directories on dispose", async () => {
+ const tmp = await tmpdir({ git: true })
+ const dir = tmp.path
+
+ await tmp[Symbol.asyncDispose]()
+
+ const exists = await fs
+ .stat(dir)
+ .then(() => true)
+ .catch(() => false)
+ expect(exists).toBe(false)
+ })
+})
diff --git a/packages/opencode/test/fixture/fixture.ts b/packages/opencode/test/fixture/fixture.ts
index ed8c5e344a..63f93bcafe 100644
--- a/packages/opencode/test/fixture/fixture.ts
+++ b/packages/opencode/test/fixture/fixture.ts
@@ -9,6 +9,27 @@ function sanitizePath(p: string): string {
return p.replace(/\0/g, "")
}
+function exists(dir: string) {
+ return fs
+ .stat(dir)
+ .then(() => true)
+ .catch(() => false)
+}
+
+function clean(dir: string) {
+ return fs.rm(dir, {
+ recursive: true,
+ force: true,
+ maxRetries: 5,
+ retryDelay: 100,
+ })
+}
+
+async function stop(dir: string) {
+ if (!(await exists(dir))) return
+ await $`git fsmonitor--daemon stop`.cwd(dir).quiet().nothrow()
+}
+
type TmpDirOptions = {
git?: boolean
config?: Partial
@@ -20,6 +41,7 @@ export async function tmpdir(options?: TmpDirOptions) {
await fs.mkdir(dirpath, { recursive: true })
if (options?.git) {
await $`git init`.cwd(dirpath).quiet()
+ await $`git config core.fsmonitor false`.cwd(dirpath).quiet()
await $`git commit --allow-empty -m "root commit ${dirpath}"`.cwd(dirpath).quiet()
}
if (options?.config) {
@@ -31,12 +53,16 @@ export async function tmpdir(options?: TmpDirOptions) {
}),
)
}
- const extra = await options?.init?.(dirpath)
const realpath = sanitizePath(await fs.realpath(dirpath))
+ const extra = await options?.init?.(realpath)
const result = {
[Symbol.asyncDispose]: async () => {
- await options?.dispose?.(dirpath)
- // await fs.rm(dirpath, { recursive: true, force: true })
+ try {
+ await options?.dispose?.(realpath)
+ } finally {
+ if (options?.git) await stop(realpath).catch(() => undefined)
+ await clean(realpath).catch(() => undefined)
+ }
},
path: realpath,
extra: extra as T,
diff --git a/packages/opencode/test/preload.ts b/packages/opencode/test/preload.ts
index 41028633e8..caac3bb0de 100644
--- a/packages/opencode/test/preload.ts
+++ b/packages/opencode/test/preload.ts
@@ -3,6 +3,7 @@
import os from "os"
import path from "path"
import fs from "fs/promises"
+import { setTimeout as sleep } from "node:timers/promises"
import { afterAll } from "bun:test"
// Set XDG env vars FIRST, before any src/ imports
@@ -15,7 +16,7 @@ afterAll(async () => {
typeof error === "object" && error !== null && "code" in error && error.code === "EBUSY"
const rm = async (left: number): Promise => {
Bun.gc(true)
- await Bun.sleep(100)
+ await sleep(100)
return fs.rm(dir, { recursive: true, force: true }).catch((error) => {
if (!busy(error)) throw error
if (left <= 1) throw error
diff --git a/packages/opencode/test/project/worktree-remove.test.ts b/packages/opencode/test/project/worktree-remove.test.ts
index e17a5392bc..a6b5bb7c34 100644
--- a/packages/opencode/test/project/worktree-remove.test.ts
+++ b/packages/opencode/test/project/worktree-remove.test.ts
@@ -7,6 +7,8 @@ import { Worktree } from "../../src/worktree"
import { Filesystem } from "../../src/util/filesystem"
import { tmpdir } from "../fixture/fixture"
+const wintest = process.platform === "win32" ? test : test.skip
+
describe("Worktree.remove", () => {
test("continues when git remove exits non-zero after detaching", async () => {
await using tmp = await tmpdir({ git: true })
@@ -62,4 +64,33 @@ describe("Worktree.remove", () => {
const ref = await $`git show-ref --verify --quiet refs/heads/${branch}`.cwd(root).quiet().nothrow()
expect(ref.exitCode).not.toBe(0)
})
+
+ wintest("stops fsmonitor before removing a worktree", async () => {
+ await using tmp = await tmpdir({ git: true })
+ const root = tmp.path
+ const name = `remove-fsmonitor-${Date.now().toString(36)}`
+ const branch = `opencode/${name}`
+ const dir = path.join(root, "..", name)
+
+ await $`git worktree add --no-checkout -b ${branch} ${dir}`.cwd(root).quiet()
+ await $`git reset --hard`.cwd(dir).quiet()
+ await $`git config core.fsmonitor true`.cwd(dir).quiet()
+ await $`git fsmonitor--daemon stop`.cwd(dir).quiet().nothrow()
+ await Bun.write(path.join(dir, "tracked.txt"), "next\n")
+ await $`git diff`.cwd(dir).quiet()
+
+ const before = await $`git fsmonitor--daemon status`.cwd(dir).quiet().nothrow()
+ expect(before.exitCode).toBe(0)
+
+ const ok = await Instance.provide({
+ directory: root,
+ fn: () => Worktree.remove({ directory: dir }),
+ })
+
+ expect(ok).toBe(true)
+ expect(await Filesystem.exists(dir)).toBe(false)
+
+ const ref = await $`git show-ref --verify --quiet refs/heads/${branch}`.cwd(root).quiet().nothrow()
+ expect(ref.exitCode).not.toBe(0)
+ })
})
diff --git a/packages/opencode/test/pty/pty-output-isolation.test.ts b/packages/opencode/test/pty/pty-output-isolation.test.ts
index 44858a0ed2..ec1bbd4690 100644
--- a/packages/opencode/test/pty/pty-output-isolation.test.ts
+++ b/packages/opencode/test/pty/pty-output-isolation.test.ts
@@ -2,6 +2,7 @@ import { describe, expect, test } from "bun:test"
import { Instance } from "../../src/project/instance"
import { Pty } from "../../src/pty"
import { tmpdir } from "../fixture/fixture"
+import { setTimeout as sleep } from "node:timers/promises"
describe("pty", () => {
test("does not leak output when websocket objects are reused", async () => {
@@ -43,7 +44,7 @@ describe("pty", () => {
// Output from a must never show up in b.
Pty.write(a.id, "AAA\n")
- await Bun.sleep(100)
+ await sleep(100)
expect(outB.join("")).not.toContain("AAA")
} finally {
@@ -88,7 +89,7 @@ describe("pty", () => {
}
Pty.write(a.id, "AAA\n")
- await Bun.sleep(100)
+ await sleep(100)
expect(outB.join("")).not.toContain("AAA")
} finally {
@@ -128,7 +129,7 @@ describe("pty", () => {
ctx.connId = 2
Pty.write(a.id, "AAA\n")
- await Bun.sleep(100)
+ await sleep(100)
expect(out.join("")).toContain("AAA")
} finally {
diff --git a/packages/opencode/test/session/retry.test.ts b/packages/opencode/test/session/retry.test.ts
index 6768e72d95..eba4a99505 100644
--- a/packages/opencode/test/session/retry.test.ts
+++ b/packages/opencode/test/session/retry.test.ts
@@ -1,6 +1,7 @@
import { describe, expect, test } from "bun:test"
import type { NamedError } from "@opencode-ai/util/error"
import { APICallError } from "ai"
+import { setTimeout as sleep } from "node:timers/promises"
import { SessionRetry } from "../../src/session/retry"
import { MessageV2 } from "../../src/session/message-v2"
@@ -135,7 +136,7 @@ describe("session.message-v2.fromError", () => {
new ReadableStream({
async pull(controller) {
controller.enqueue("Hello,")
- await Bun.sleep(10000)
+ await sleep(10000)
controller.enqueue(" World!")
controller.close()
},
diff --git a/packages/opencode/test/util/which.test.ts b/packages/opencode/test/util/which.test.ts
new file mode 100644
index 0000000000..323173b181
--- /dev/null
+++ b/packages/opencode/test/util/which.test.ts
@@ -0,0 +1,82 @@
+import { describe, expect, test } from "bun:test"
+import fs from "fs/promises"
+import path from "path"
+import { which } from "../../src/util/which"
+import { tmpdir } from "../fixture/fixture"
+
+async function cmd(dir: string, name: string, exec = true) {
+ const ext = process.platform === "win32" ? ".cmd" : ""
+ const file = path.join(dir, name + ext)
+ const body = process.platform === "win32" ? "@echo off\r\n" : "#!/bin/sh\n"
+ await fs.writeFile(file, body)
+ if (process.platform !== "win32") {
+ await fs.chmod(file, exec ? 0o755 : 0o644)
+ }
+ return file
+}
+
+function env(PATH: string): NodeJS.ProcessEnv {
+ return {
+ PATH,
+ PATHEXT: process.env["PATHEXT"],
+ }
+}
+
+function same(a: string | null, b: string) {
+ if (process.platform === "win32") {
+ expect(a?.toLowerCase()).toBe(b.toLowerCase())
+ return
+ }
+
+ expect(a).toBe(b)
+}
+
+describe("util.which", () => {
+ test("returns null when command is missing", () => {
+ expect(which("opencode-missing-command-for-test")).toBeNull()
+ })
+
+ test("finds a command from PATH override", async () => {
+ await using tmp = await tmpdir()
+ const bin = path.join(tmp.path, "bin")
+ await fs.mkdir(bin)
+ const file = await cmd(bin, "tool")
+
+ same(which("tool", env(bin)), file)
+ })
+
+ test("uses first PATH match", async () => {
+ await using tmp = await tmpdir()
+ const a = path.join(tmp.path, "a")
+ const b = path.join(tmp.path, "b")
+ await fs.mkdir(a)
+ await fs.mkdir(b)
+ const first = await cmd(a, "dupe")
+ await cmd(b, "dupe")
+
+ same(which("dupe", env([a, b].join(path.delimiter))), first)
+ })
+
+ test("returns null for non-executable file on unix", async () => {
+ if (process.platform === "win32") return
+
+ await using tmp = await tmpdir()
+ const bin = path.join(tmp.path, "bin")
+ await fs.mkdir(bin)
+ await cmd(bin, "noexec", false)
+
+ expect(which("noexec", env(bin))).toBeNull()
+ })
+
+ test("uses PATHEXT on windows", async () => {
+ if (process.platform !== "win32") return
+
+ await using tmp = await tmpdir()
+ const bin = path.join(tmp.path, "bin")
+ await fs.mkdir(bin)
+ const file = path.join(bin, "pathext.CMD")
+ await fs.writeFile(file, "@echo off\r\n")
+
+ expect(which("pathext", { PATH: bin, PATHEXT: ".CMD" })).toBe(file)
+ })
+})
diff --git a/packages/plugin/package.json b/packages/plugin/package.json
index e57e83d051..7419f4ecca 100644
--- a/packages/plugin/package.json
+++ b/packages/plugin/package.json
@@ -1,7 +1,7 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@opencode-ai/plugin",
- "version": "1.2.18",
+ "version": "1.2.20",
"type": "module",
"license": "MIT",
"scripts": {
diff --git a/packages/sdk/js/package.json b/packages/sdk/js/package.json
index 22993464e3..ced7f6c05f 100644
--- a/packages/sdk/js/package.json
+++ b/packages/sdk/js/package.json
@@ -1,7 +1,7 @@
{
"$schema": "https://json.schemastore.org/package.json",
"name": "@opencode-ai/sdk",
- "version": "1.2.18",
+ "version": "1.2.20",
"type": "module",
"license": "MIT",
"scripts": {
diff --git a/packages/slack/package.json b/packages/slack/package.json
index 1407c0e4a9..fbdf202fdf 100644
--- a/packages/slack/package.json
+++ b/packages/slack/package.json
@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/slack",
- "version": "1.2.18",
+ "version": "1.2.20",
"type": "module",
"license": "MIT",
"scripts": {
diff --git a/packages/ui/package.json b/packages/ui/package.json
index 0adc4c57f1..db585bb008 100644
--- a/packages/ui/package.json
+++ b/packages/ui/package.json
@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/ui",
- "version": "1.2.18",
+ "version": "1.2.20",
"type": "module",
"license": "MIT",
"exports": {
diff --git a/packages/util/package.json b/packages/util/package.json
index 87cf20591e..b6ee1c3bf2 100644
--- a/packages/util/package.json
+++ b/packages/util/package.json
@@ -1,6 +1,6 @@
{
"name": "@opencode-ai/util",
- "version": "1.2.18",
+ "version": "1.2.20",
"private": true,
"type": "module",
"license": "MIT",
diff --git a/packages/web/package.json b/packages/web/package.json
index 4e7ac7cd8b..e386f581f5 100644
--- a/packages/web/package.json
+++ b/packages/web/package.json
@@ -2,7 +2,7 @@
"name": "@opencode-ai/web",
"type": "module",
"license": "MIT",
- "version": "1.2.18",
+ "version": "1.2.20",
"scripts": {
"dev": "astro dev",
"dev:remote": "VITE_API_URL=https://api.opencode.ai astro dev",
diff --git a/packages/web/src/content/docs/ar/zen.mdx b/packages/web/src/content/docs/ar/zen.mdx
index 2810dea7dd..e155748fbf 100644
--- a/packages/web/src/content/docs/ar/zen.mdx
+++ b/packages/web/src/content/docs/ar/zen.mdx
@@ -59,6 +59,7 @@ OpenCode Zen هو بوابة للذكاء الاصطناعي تتيح لك ال
| النموذج | معرّف النموذج | نقطة النهاية | حزمة AI SDK |
| ------------------ | ------------------ | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
@@ -141,6 +142,7 @@ https://opencode.ai/zen/v1/models
| Gemini 3 Pro (≤ 200K tokens) | $2.00 | $12.00 | $0.20 | - |
| Gemini 3 Pro (> 200K tokens) | $4.00 | $18.00 | $0.40 | - |
| Gemini 3 Flash | $0.50 | $3.00 | $0.05 | - |
+| GPT 5.4 | $2.50 | $15.00 | $0.25 | - |
| GPT 5.3 Codex | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 Codex | $1.75 | $14.00 | $0.175 | - |
@@ -184,6 +186,19 @@ https://opencode.ai/zen/v1/models
---
+### نماذج مهملة
+
+| النموذج | تاريخ الإيقاف |
+| ---------------- | ------------- |
+| Qwen3 Coder 480B | 6 فبراير 2026 |
+| Kimi K2 Thinking | 6 مارس 2026 |
+| Kimi K2 | 6 مارس 2026 |
+| MiniMax M2.1 | 15 مارس 2026 |
+| GLM 4.7 | 15 مارس 2026 |
+| GLM 4.6 | 15 مارس 2026 |
+
+---
+
## الخصوصية
تتم استضافة جميع نماذجنا في الولايات المتحدة. يلتزم مزوّدونا بسياسة عدم الاحتفاظ بالبيانات (zero-retention) ولا يستخدمون بياناتك لتدريب النماذج، مع الاستثناءات التالية:
diff --git a/packages/web/src/content/docs/bs/zen.mdx b/packages/web/src/content/docs/bs/zen.mdx
index ad428884d3..8da6697d09 100644
--- a/packages/web/src/content/docs/bs/zen.mdx
+++ b/packages/web/src/content/docs/bs/zen.mdx
@@ -55,6 +55,7 @@ Nasim modelima mozete pristupiti i preko sljedecih API endpointa.
| Model | Model ID | Endpoint | AI SDK Package |
| ------------------ | ------------------ | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
@@ -136,6 +137,7 @@ Podrzavamo pay-as-you-go model. Ispod su cijene **po 1M tokena**.
| Gemini 3 Pro (≤ 200K tokens) | $2.00 | $12.00 | $0.20 | - |
| Gemini 3 Pro (> 200K tokens) | $4.00 | $18.00 | $0.40 | - |
| Gemini 3 Flash | $0.50 | $3.00 | $0.05 | - |
+| GPT 5.4 | $2.50 | $15.00 | $0.25 | - |
| GPT 5.3 Codex | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 Codex | $1.75 | $14.00 | $0.175 | - |
@@ -178,6 +180,19 @@ Na primjer, ako postavite mjesecni limit na $20, Zen nece potrositi vise od $20
---
+### Zastarjeli modeli
+
+| Model | Datum ukidanja |
+| ---------------- | -------------- |
+| Qwen3 Coder 480B | 6. feb. 2026. |
+| Kimi K2 Thinking | 6. mart 2026. |
+| Kimi K2 | 6. mart 2026. |
+| MiniMax M2.1 | 15. mart 2026. |
+| GLM 4.7 | 15. mart 2026. |
+| GLM 4.6 | 15. mart 2026. |
+
+---
+
## Privatnost
Svi nasi modeli su hostovani u SAD-u. Provajderi prate zero-retention politiku i ne koriste vase podatke za treniranje modela, uz sljedece izuzetke:
diff --git a/packages/web/src/content/docs/da/zen.mdx b/packages/web/src/content/docs/da/zen.mdx
index e99c626c57..dee93e3bea 100644
--- a/packages/web/src/content/docs/da/zen.mdx
+++ b/packages/web/src/content/docs/da/zen.mdx
@@ -64,6 +64,7 @@ Du kan også få adgang til vores modeller gennem følgende API-endpoints.
| Model | Model ID | Endpoint | AI SDK Pakke |
| ------------------- | ------------------ | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
@@ -147,6 +148,7 @@ Vi støtter en pay-as-you-go-model. Nedenfor er priserne **per 1 million tokens*
| Gemini 3 Pro (≤ 200K tokens) | $2,00 | $12,00 | $0,20 | - |
| Gemini 3 Pro (> 200K tokens) | $4,00 | $18,00 | $0,40 | - |
| Gemini 3 Flash | $0,50 | $3,00 | $0,05 | - |
+| GPT 5.4 | $2,50 | $15,00 | $0,25 | - |
| GPT 5.3 Codex | $1,75 | $14,00 | $0,175 | - |
| GPT 5.2 | $1,75 | $14,00 | $0,175 | - |
| GPT 5.2 Codex | $1,75 | $14,00 | $0,175 | - |
@@ -192,6 +194,19 @@ at opkræve dig mere end $20, hvis din saldo går under $5.
---
+### Udfasede modeller
+
+| Model | Udfasningsdato |
+| ---------------- | -------------- |
+| Qwen3-koder 480B | 6. feb. 2026 |
+| Kimi K2 Tenker | 6. marts 2026 |
+| Kimi K2 | 6. marts 2026 |
+| MiniMax M2.1 | 15. marts 2026 |
+| GLM 4.7 | 15. marts 2026 |
+| GLM 4.6 | 15. marts 2026 |
+
+---
+
## Privatliv
Alle vores modeller er hostet i USA. Vores udbydere følger en nul-opbevaringspolitik og bruger ikke dine data til modeltræning, med følgende undtagelser:
diff --git a/packages/web/src/content/docs/de/zen.mdx b/packages/web/src/content/docs/de/zen.mdx
index 7545b10deb..e5661ad569 100644
--- a/packages/web/src/content/docs/de/zen.mdx
+++ b/packages/web/src/content/docs/de/zen.mdx
@@ -57,6 +57,7 @@ Du kannst unsere Modelle auch ueber die folgenden API-Endpunkte aufrufen.
| Model | Model ID | Endpoint | AI SDK Package |
| ------------------ | ------------------ | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
@@ -114,12 +115,12 @@ Unten siehst du die Preise **pro 1 Mio. Tokens**.
| --------------------------------- | ------ | ------ | ----------- | ------------ |
| Big Pickle | Free | Free | Free | - |
| MiniMax M2.5 Free | Free | Free | Free | - |
-| MiniMax M2.5 | $0.30 | $1.20 | $0.06 | - |
+| MiniMax M2.5 | $0.30 | $1.20 | $0.06 | $0.375 |
| MiniMax M2.1 | $0.30 | $1.20 | $0.10 | - |
| GLM 5 | $1.00 | $3.20 | $0.20 | - |
| GLM 4.7 | $0.60 | $2.20 | $0.10 | - |
| GLM 4.6 | $0.60 | $2.20 | $0.10 | - |
-| Kimi K2.5 | $0.60 | $3.00 | $0.08 | - |
+| Kimi K2.5 | $0.60 | $3.00 | $0.10 | - |
| Kimi K2 Thinking | $0.40 | $2.50 | - | - |
| Kimi K2 | $0.40 | $2.50 | - | - |
| Qwen3 Coder 480B | $0.45 | $1.50 | - | - |
@@ -140,6 +141,7 @@ Unten siehst du die Preise **pro 1 Mio. Tokens**.
| Gemini 3 Pro (≤ 200K tokens) | $2.00 | $12.00 | $0.20 | - |
| Gemini 3 Pro (> 200K tokens) | $4.00 | $18.00 | $0.40 | - |
| Gemini 3 Flash | $0.50 | $3.00 | $0.05 | - |
+| GPT 5.4 | $2.50 | $15.00 | $0.25 | - |
| GPT 5.3 Codex | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 Codex | $1.75 | $14.00 | $0.175 | - |
@@ -184,6 +186,19 @@ Mit aktiviertem Auto-Reload kann die Abrechnung dennoch darueber liegen, falls d
---
+### Veraltete Modelle
+
+| Model | Datum der Abschaltung |
+| ---------------- | --------------------- |
+| Qwen3 Coder 480B | 6. Feb. 2026 |
+| Kimi K2 Thinking | 6. Maerz 2026 |
+| Kimi K2 | 6. Maerz 2026 |
+| MiniMax M2.1 | 15. Maerz 2026 |
+| GLM 4.7 | 15. Maerz 2026 |
+| GLM 4.6 | 15. Maerz 2026 |
+
+---
+
## Datenschutz
Alle Modelle werden in den USA gehostet.
diff --git a/packages/web/src/content/docs/es/zen.mdx b/packages/web/src/content/docs/es/zen.mdx
index 94838902a5..9848eb100a 100644
--- a/packages/web/src/content/docs/es/zen.mdx
+++ b/packages/web/src/content/docs/es/zen.mdx
@@ -62,6 +62,7 @@ También puede acceder a nuestros modelos a través de los siguientes puntos fin
| Modelo | Model ID | Endpoint | AI SDK package |
| ------------------ | ------------------ | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
@@ -145,6 +146,7 @@ Apoyamos un modelo de pago por uso. A continuación se muestran los precios **po
| Gemini 3 Pro (≤ 200K tokens) | $2.00 | $12.00 | $0,20 | - |
| Gemini 3 Pro (> 200K tokens) | $4.00 | $18.00 | $0,40 | - |
| Gemini 3 Flash | $0,50 | $3.00 | $0,05 | - |
+| GPT 5.4 | $2,50 | $15,00 | $0,25 | - |
| GPT 5.3 Codex | $1,75 | $14.00 | $0,175 | - |
| GPT 5.2 | $1,75 | $14.00 | $0,175 | - |
| GPT 5.2 Codex | $1,75 | $14.00 | $0,175 | - |
@@ -190,6 +192,19 @@ cobrarle más de $20 si su saldo es inferior a $5.
---
+### Modelos obsoletos
+
+| Modelo | Fecha de retiro |
+| ---------------- | ------------------- |
+| Qwen3 Coder 480B | 6 de feb. de 2026 |
+| Kimi K2 Thinking | 6 de marzo de 2026 |
+| Kimi K2 | 6 de marzo de 2026 |
+| MiniMax M2.1 | 15 de marzo de 2026 |
+| GLM 4.7 | 15 de marzo de 2026 |
+| GLM 4.6 | 15 de marzo de 2026 |
+
+---
+
## Privacidad
Todos nuestros modelos están alojados en los EE. UU. Nuestros proveedores siguen una política de retención cero y no utilizan sus datos para la capacitación de modelos, con las siguientes excepciones:
diff --git a/packages/web/src/content/docs/fr/zen.mdx b/packages/web/src/content/docs/fr/zen.mdx
index e40b1be77e..7310922aea 100644
--- a/packages/web/src/content/docs/fr/zen.mdx
+++ b/packages/web/src/content/docs/fr/zen.mdx
@@ -55,6 +55,7 @@ Vous pouvez également accéder à nos modèles via les points de terminaison AP
| Modèle | ID du modèle | Point de terminaison | Package SDK IA |
| ------------------ | ------------------ | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
@@ -136,6 +137,7 @@ Nous soutenons un modèle de paiement à l'utilisation. Vous trouverez ci-dessou
| Gemini 3 Pro (≤ 200K jetons) | 2,00 $ | 12,00 $ | 0,20 $ | - |
| Gemini 3 Pro (> 200K jetons) | 4,00 $ | 18,00 $ | 0,40 $ | - |
| Gemini 3 Flash | 0,50 $ | 3,00 $ | 0,05 $ | - |
+| GPT 5.4 | 2,50 $ | 15,00 $ | 0,25 $ | - |
| GPT 5.3 Codex | 1,75 $ | 14,00 $ | 0,175 $ | - |
| GPT 5.2 | 1,75 $ | 14,00 $ | 0,175 $ | - |
| GPT 5.2 Codex | 1,75 $ | 14,00 $ | 0,175 $ | - |
@@ -178,6 +180,19 @@ Par exemple, disons que vous définissez une limite d'utilisation mensuelle à 2
---
+### Modèles obsolètes
+
+| Modèle | Date de dépréciation |
+| ---------------- | -------------------- |
+| Qwen3 Coder 480B | 6 février 2026 |
+| Kimi K2 Thinking | 6 mars 2026 |
+| Kimi K2 | 6 mars 2026 |
+| MiniMax M2.1 | 15 mars 2026 |
+| GLM 4.7 | 15 mars 2026 |
+| GLM 4.6 | 15 mars 2026 |
+
+---
+
## Confidentialité
Tous nos modèles sont hébergés aux États-Unis. Nos fournisseurs suivent une politique de rétention zéro et n'utilisent pas vos données pour la formation de modèles, avec les exceptions suivantes :
diff --git a/packages/web/src/content/docs/it/zen.mdx b/packages/web/src/content/docs/it/zen.mdx
index db0434db50..3c892f0d48 100644
--- a/packages/web/src/content/docs/it/zen.mdx
+++ b/packages/web/src/content/docs/it/zen.mdx
@@ -55,6 +55,7 @@ Puoi anche accedere ai nostri modelli tramite i seguenti endpoint API.
| Modello | ID modello | Endpoint | Pacchetto AI SDK |
| ------------------ | ------------------ | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
@@ -136,6 +137,7 @@ Supportiamo un modello pay-as-you-go. Qui sotto trovi i prezzi **per 1M token**.
| Gemini 3 Pro (≤ 200K tokens) | $2.00 | $12.00 | $0.20 | - |
| Gemini 3 Pro (> 200K tokens) | $4.00 | $18.00 | $0.40 | - |
| Gemini 3 Flash | $0.50 | $3.00 | $0.05 | - |
+| GPT 5.4 | $2.50 | $15.00 | $0.25 | - |
| GPT 5.3 Codex | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 Codex | $1.75 | $14.00 | $0.175 | - |
@@ -178,6 +180,19 @@ Per esempio, se imposti un limite mensile a $20, Zen non usera piu di $20 in un
---
+### Modelli deprecati
+
+| Modello | Data di deprecazione |
+| ---------------- | -------------------- |
+| Qwen3 Coder 480B | 6 feb 2026 |
+| Kimi K2 Thinking | 6 mar 2026 |
+| Kimi K2 | 6 mar 2026 |
+| MiniMax M2.1 | 15 mar 2026 |
+| GLM 4.7 | 15 mar 2026 |
+| GLM 4.6 | 15 mar 2026 |
+
+---
+
## Privacy
Tutti i nostri modelli sono ospitati negli US. I nostri provider seguono una policy di zero-retention e non usano i tuoi dati per training dei modelli, con le seguenti eccezioni:
diff --git a/packages/web/src/content/docs/ja/zen.mdx b/packages/web/src/content/docs/ja/zen.mdx
index c7121fb3b7..7a380aa9fb 100644
--- a/packages/web/src/content/docs/ja/zen.mdx
+++ b/packages/web/src/content/docs/ja/zen.mdx
@@ -54,6 +54,7 @@ OpenCode Zen は、OpenCode の他のプロバイダーと同様に機能しま
| Model | Model ID | Endpoint | AI SDK Package |
| ------------------ | ------------------ | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
@@ -137,6 +138,7 @@ https://opencode.ai/zen/v1/models
| Gemini 3 Pro (≤ 200K tokens) | $2.00 | $12.00 | $0.20 | - |
| Gemini 3 Pro (> 200K tokens) | $4.00 | $18.00 | $0.40 | - |
| Gemini 3 Flash | $0.50 | $3.00 | $0.05 | - |
+| GPT 5.4 | $2.50 | $15.00 | $0.25 | - |
| GPT 5.3 Codex | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 Codex | $1.75 | $14.00 | $0.175 | - |
@@ -179,6 +181,19 @@ https://opencode.ai/zen/v1/models
---
+### 非推奨モデル
+
+| Model | Deprecation date |
+| ---------------- | ---------------- |
+| Qwen3 Coder 480B | 2026年2月6日 |
+| Kimi K2 Thinking | 2026年3月6日 |
+| Kimi K2 | 2026年3月6日 |
+| MiniMax M2.1 | 2026年3月15日 |
+| GLM 4.7 | 2026年3月15日 |
+| GLM 4.6 | 2026年3月15日 |
+
+---
+
## プライバシー
すべてのモデルは米国でホストされています。当社のプロバイダーはゼロ保持ポリシーに従い、次の例外を除いて、モデルのトレーニングにデータを使用しません。
diff --git a/packages/web/src/content/docs/ko/zen.mdx b/packages/web/src/content/docs/ko/zen.mdx
index ae598cee18..5c2b9644ff 100644
--- a/packages/web/src/content/docs/ko/zen.mdx
+++ b/packages/web/src/content/docs/ko/zen.mdx
@@ -55,6 +55,7 @@ OpenCode Zen은 OpenCode의 다른 제공자와 동일한 방식으로 작동합
| 모델 | 모델 ID | 엔드포인트 | AI SDK 패키지 |
| ------------------ | ------------------ | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
@@ -111,12 +112,12 @@ https://opencode.ai/zen/v1/models
| --------------------------------- | ------ | ------ | --------- | --------- |
| Big Pickle | Free | Free | Free | - |
| MiniMax M2.5 Free | Free | Free | Free | - |
-| MiniMax M2.5 | $0.30 | $1.20 | $0.06 | - |
+| MiniMax M2.5 | $0.30 | $1.20 | $0.06 | $0.375 |
| MiniMax M2.1 | $0.30 | $1.20 | $0.10 | - |
| GLM 5 | $1.00 | $3.20 | $0.20 | - |
| GLM 4.7 | $0.60 | $2.20 | $0.10 | - |
| GLM 4.6 | $0.60 | $2.20 | $0.10 | - |
-| Kimi K2.5 | $0.60 | $3.00 | $0.08 | - |
+| Kimi K2.5 | $0.60 | $3.00 | $0.10 | - |
| Kimi K2 Thinking | $0.40 | $2.50 | - | - |
| Kimi K2 | $0.40 | $2.50 | - | - |
| Qwen3 Coder 480B | $0.45 | $1.50 | - | - |
@@ -137,6 +138,7 @@ https://opencode.ai/zen/v1/models
| Gemini 3 Pro (≤ 200K tokens) | $2.00 | $12.00 | $0.20 | - |
| Gemini 3 Pro (> 200K tokens) | $4.00 | $18.00 | $0.40 | - |
| Gemini 3 Flash | $0.50 | $3.00 | $0.05 | - |
+| GPT 5.4 | $2.50 | $15.00 | $0.25 | - |
| GPT 5.3 Codex | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 Codex | $1.75 | $14.00 | $0.175 | - |
@@ -180,6 +182,19 @@ https://opencode.ai/zen/v1/models
---
+### 지원 중단 모델
+
+| 모델 | 지원 중단일 |
+| ---------------- | --------------- |
+| Qwen3 Coder 480B | 2026년 2월 6일 |
+| Kimi K2 Thinking | 2026년 3월 6일 |
+| Kimi K2 | 2026년 3월 6일 |
+| MiniMax M2.1 | 2026년 3월 15일 |
+| GLM 4.7 | 2026년 3월 15일 |
+| GLM 4.6 | 2026년 3월 15일 |
+
+---
+
## 개인정보 보호
당사의 모든 모델은 미국에서 호스팅됩니다. 당사 제공자는 데이터 무보존(zero-retention) 정책을 따르며, 아래의 예외를 제외하고는 귀하의 데이터를 모델 학습에 사용하지 않습니다.
diff --git a/packages/web/src/content/docs/nb/zen.mdx b/packages/web/src/content/docs/nb/zen.mdx
index 51399615e5..71dd0e9eaf 100644
--- a/packages/web/src/content/docs/nb/zen.mdx
+++ b/packages/web/src/content/docs/nb/zen.mdx
@@ -64,6 +64,7 @@ Du kan også få tilgang til modellene våre gjennom følgende API-endepunkter.
| Modell | Modell ID | Endepunkt | AI SDK Pakke |
| ------------------ | ------------------ | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
@@ -121,7 +122,7 @@ Vi støtter en pay-as-you-go-modell. Nedenfor er prisene **per 1 million tokens*
| --------------------------------- | ------- | ------ | ------------- | --------------- |
| Big Pickle | Gratis | Gratis | Gratis | - |
| MiniMax M2.5 Free | Gratis | Gratis | Gratis | - |
-| MiniMax M2.5 | $0,30 | $1,20 | $0,06 | - |
+| MiniMax M2.5 | $0,30 | $1,20 | $0,06 | $0,375 |
| MiniMax M2.1 | $0,30 | $1,20 | $0,10 | - |
| GLM 5 | $1,00 | $3,20 | $0,20 | - |
| GLM 4.7 | $0,60 | $2,20 | $0,10 | - |
@@ -147,6 +148,7 @@ Vi støtter en pay-as-you-go-modell. Nedenfor er prisene **per 1 million tokens*
| Gemini 3 Pro (≤ 200K tokens) | $2,00 | $12,00 | $0,20 | - |
| Gemini 3 Pro (> 200K tokens) | $4,00 | $18,00 | $0,40 | - |
| Gemini 3 Flash | $0,50 | $3,00 | $0,05 | - |
+| GPT 5.4 | $2,50 | $15,00 | $0,25 | - |
| GPT 5.3 Codex | $1,75 | $14,00 | $0,175 | - |
| GPT 5.2 | $1,75 | $14,00 | $0,175 | - |
| GPT 5.2 Codex | $1,75 | $14,00 | $0,175 | - |
@@ -192,6 +194,19 @@ belaster deg mer enn $20 hvis saldoen din går under $5.
---
+### Utfasede modeller
+
+| Modell | Utfasingdato |
+| ---------------- | ------------- |
+| Qwen3 Coder 480B | 6. feb. 2026 |
+| Kimi K2 Thinking | 6. mars 2026 |
+| Kimi K2 | 6. mars 2026 |
+| MiniMax M2.1 | 15. mars 2026 |
+| GLM 4.7 | 15. mars 2026 |
+| GLM 4.6 | 15. mars 2026 |
+
+---
+
## Personvern
Alle våre modeller er hostet i USA. Leverandørene våre følger retningslinjer om ingen datalagring og bruker ikke dataene dine til modellopplæring, med følgende unntak:
diff --git a/packages/web/src/content/docs/pl/zen.mdx b/packages/web/src/content/docs/pl/zen.mdx
index dbb75489cb..ddb7d2ff15 100644
--- a/packages/web/src/content/docs/pl/zen.mdx
+++ b/packages/web/src/content/docs/pl/zen.mdx
@@ -1,21 +1,21 @@
---
title: Zen
-description: Wyselekcjonowana lista modeli dostarczonych przez opencode.
+description: Wyselekcjonowana lista modeli dostarczonych przez OpenCode.
---
import config from "../../../../config.mjs"
export const console = config.console
export const email = `mailto:${config.email}`
-OpenCode Zen to lista przetestowanych i zweryfikowanych modeli udostępniona przez zespół opencode.
+OpenCode Zen to lista przetestowanych i zweryfikowanych modeli udostępniona przez zespół OpenCode.
:::note
-OpenCode Zen is currently in beta.
+OpenCode Zen jest obecnie w wersji beta.
:::
-Zen działa jak każdy inny dostawca opencode. Logujesz się do OpenCode Zen i dostajesz
-Twój klucz API. Jest **całkowicie opcjonalny** i nie musisz go używać, aby z niego korzystać
-opencode.
+Zen działa jak każdy inny dostawca w OpenCode. Logujesz się do OpenCode Zen i otrzymujesz
+swój klucz API. Jest to **całkowicie opcjonalne** i nie musisz tego używać, aby korzystać z
+OpenCode.
---
@@ -23,23 +23,23 @@ opencode.
Istnieje ogromna liczba modeli, ale tylko kilka z nich
działa dobrze jako agenci kodujący. Dodatkowo większość dostawców jest
-skonfigurowana bardzo różnie; więc otrzymujesz zupełnie inną wydajność i jakość.
+skonfigurowana bardzo różnie, więc otrzymujesz bardzo różną wydajność i jakość.
:::tip
-Przetestowaliśmy wybraną grupę modeli i dostawców, którzy dobrze współpracują z opencode.
+Przetestowaliśmy wybraną grupę modeli i dostawców, którzy dobrze współpracują z OpenCode.
:::
-Jeśli więc używasz modelu za pośrednictwem czegoś takiego jak OpenRouter, nigdy nie będzie to możliwe
+Jeśli więc używasz modelu za pośrednictwem czegoś takiego jak OpenRouter, nigdy nie możesz być
pewien, czy otrzymujesz najlepszą wersję modelu, jaki chcesz.
Aby to naprawić, zrobiliśmy kilka rzeczy:
-1. Przetestowaliśmy wybraną grupę modeli i rozmawialiśmy z ich zespołami o tym, jak to zrobić
- najlepiej je uruchom.
+1. Przetestowaliśmy wybraną grupę modeli i rozmawialiśmy z ich zespołami o tym, jak
+ najlepiej je uruchamiać.
2. Następnie współpracowaliśmy z kilkoma dostawcami, aby upewnić się, że są one obsługiwane
- correctly.
-3. Na koniec porównaliśmy kombinację modelu/dostawcy i otrzymaliśmy wynik
- z listą, którą z przyjemnością polecamy.
+ poprawnie.
+3. Na koniec sprawdziliśmy wydajność kombinacji modelu/dostawcy i stworzyliśmy
+ listę, którą z czystym sumieniem polecamy.
OpenCode Zen to brama AI, która zapewnia dostęp do tych modeli.
@@ -47,14 +47,14 @@ OpenCode Zen to brama AI, która zapewnia dostęp do tych modeli.
## Jak to działa
-OpenCode Zen działa jak każdy inny dostawca opencode.
+OpenCode Zen działa jak każdy inny dostawca w OpenCode.
-1. Logujesz się do **OpenCode Zen**, dodajesz swoje rozliczenia
- szczegóły i skopiuj klucz API.
+1. Logujesz się do **OpenCode Zen**, dodajesz dane rozliczeniowe
+ i kopiujesz swój klucz API.
2. Uruchamiasz polecenie `/connect` w TUI, wybierasz OpenCode Zen i wklejasz klucz API.
3. Uruchom `/models` w TUI, aby zobaczyć listę zalecanych przez nas modeli.
-Opłata jest pobierana za każde żądanie i możesz dodać kredyty do swojego konta.
+Opłata jest pobierana za każde żądanie i możesz dodać środki do swojego konta.
---
@@ -64,6 +64,7 @@ Dostęp do naszych modeli można również uzyskać za pośrednictwem następuj
| Model | Identyfikator modelu | Punkt końcowy | Pakiet SDK AI |
| ------------------ | -------------------- | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
@@ -97,9 +98,9 @@ Dostęp do naszych modeli można również uzyskać za pośrednictwem następuj
| Qwen3 Coder 480B | qwen3-coder | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
| Big Pickle | big-pickle | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
-[Identyfikator modelu](/docs/config/#models) w konfiguracji opencode
-używa formatu `opencode/`. Na przykład w przypadku Kodeksu GPT 5.2 zrobiłbyś to
-użyj `opencode/gpt-5.2-codex` w swojej konfiguracji.
+[Identyfikator modelu](/docs/config/#models) w konfiguracji OpenCode
+używa formatu `opencode/`. Na przykład w przypadku GPT 5.2 Codex użyłbyś
+`opencode/gpt-5.2-codex` w swojej konfiguracji.
---
@@ -121,12 +122,12 @@ Wspieramy model pay-as-you-go. Poniżej znajdują się ceny **za 1M tokenów**.
| --------------------------------- | ------- | ------- | --------------------------- | -------------------------- |
| Big Pickle | Free | Free | Free | - |
| MiniMax M2.5 Free | Free | Free | Free | - |
-| MiniMax M2.5 | $0.30 | $1.20 | $0.06 | - |
+| MiniMax M2.5 | $0.30 | $1.20 | $0.06 | $0.375 |
| MiniMax M2.1 | $0.30 | $1.20 | $0.10 | - |
| GLM 5 | $1.00 | $3.20 | $0.20 | - |
| GLM 4.7 | $0.60 | $2.20 | $0.10 | - |
| GLM 4.6 | $0.60 | $2.20 | $0.10 | - |
-| Kimi K2.5 | $0.60 | $3.00 | $0.08 | - |
+| Kimi K2.5 | $0.60 | $3.00 | $0.10 | - |
| Kimi K2 Thinking | $0.40 | $2.50 | - | - |
| Kimi K2 | $0.40 | $2.50 | - | - |
| Qwen3 Coder 480B | $0.45 | $1.50 | - | - |
@@ -147,6 +148,7 @@ Wspieramy model pay-as-you-go. Poniżej znajdują się ceny **za 1M tokenów**.
| Gemini 3 Pro (≤ 200K tokens) | $2.00 | $12.00 | $0.20 | - |
| Gemini 3 Pro (> 200K tokens) | $4.00 | $18.00 | $0.40 | - |
| Gemini 3 Flash | $0.50 | $3.00 | $0.05 | - |
+| GPT 5.4 | $2.50 | $15.00 | $0.25 | - |
| GPT 5.3 Codex | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 Codex | $1.75 | $14.00 | $0.175 | - |
@@ -158,10 +160,10 @@ Wspieramy model pay-as-you-go. Poniżej znajdują się ceny **za 1M tokenów**.
| GPT 5 Codex | $1.07 | $8.50 | $0.107 | - |
| GPT 5 Nano | Free | Free | Free | - |
-Możesz zauważyć _Claude Haiku 3.5_ w swojej historii użytkowania. To jest [model niskokosztowy](/docs/config/#models), który służy do generowania tytułów sesji.
+Możesz zauważyć _Claude Haiku 3.5_ w swojej historii użytkowania. Jest to [tani model](/docs/config/#models), który jest używany do generowania tytułów Twoich sesji.
:::note
-Opłaty za karty kredytowe są przenoszone na koszt (4,4% + 0,30 USD za transakcję); nie pobieramy żadnych dodatkowych opłat.
+Opłaty za karty kredytowe są przenoszone po kosztach (4,4% + 0,30 USD za transakcję); nie pobieramy nic poza tym.
:::
Darmowe modele:
@@ -177,18 +179,31 @@ Darmowe modele:
Jeśli Twoje saldo spadnie poniżej 5 USD, Zen automatycznie doładuje 20 USD.
-Możesz zmienić kwotę automatycznego doładowania. Możesz także całkowicie wyłączyć automatyczne przeładowywanie.
+Możesz zmienić kwotę automatycznego doładowania. Możesz także całkowicie wyłączyć automatyczne doładowanie.
---
### Limity miesięczne
-Możesz także ustawić miesięczny limit wykorzystania dla całego obszaru roboczego i dla każdego z nich
-członek Twojego zespołu.
+Możesz także ustawić miesięczny limit użytkowania dla całego obszaru roboczego i dla każdego
+członka Twojego zespołu.
-Załóżmy na przykład, że ustawiłeś miesięczny limit użytkowania na 20 USD, Zen nie będzie z niego korzystał
-ponad 20 dolarów miesięcznie. Ale jeśli masz włączone automatyczne przeładowywanie, Zen może się skończyć
-obciąży Cię kwotą wyższą niż 20 USD, jeśli saldo spadnie poniżej 5 USD.
+Na przykład, jeśli ustawisz miesięczny limit użytkowania na 20 USD, Zen nie zużyje
+więcej niż 20 dolarów w miesiącu. Ale jeśli masz włączone automatyczne doładowanie, Zen może
+obciążyć Cię kwotą wyższą niż 20 USD, jeśli saldo spadnie poniżej 5 USD.
+
+---
+
+### Przestarzałe modele
+
+| Model | Data wycofania |
+| ---------------- | -------------- |
+| Qwen3 Coder 480B | 6 lutego 2026 |
+| Kimi K2 Thinking | 6 marca 2026 |
+| Kimi K2 | 6 marca 2026 |
+| MiniMax M2.1 | 15 marca 2026 |
+| GLM 4.7 | 15 marca 2026 |
+| GLM 4.6 | 15 marca 2026 |
---
@@ -198,22 +213,22 @@ Wszystkie nasze modele są hostowane w USA. Nasi dostawcy przestrzegają polityk
- Big Pickle: W okresie bezpłatnym zebrane dane mogą zostać wykorzystane do udoskonalenia modelu.
- MiniMax M2.5 Free: W okresie bezpłatnym zebrane dane mogą zostać wykorzystane do udoskonalenia modelu.
-- Interfejsy API OpenAI: żądania są przechowywane przez 30 dni zgodnie z [Zasadami dotyczącymi danych OpenAI](https://platform.openai.com/docs/guides/your-data).
-- Interfejsy API Anthropic: żądania są przechowywane przez 30 dni zgodnie z [Zasadami dotyczącymi danych firmy Anthropic](https://docs.anthropic.com/en/docs/claude-code/data-usage).
+- API OpenAI: Żądania są przechowywane przez 30 dni zgodnie z [Zasadami dotyczącymi danych OpenAI](https://platform.openai.com/docs/guides/your-data).
+- API Anthropic: Żądania są przechowywane przez 30 dni zgodnie z [Zasadami dotyczącymi danych Anthropic](https://docs.anthropic.com/en/docs/claude-code/data-usage).
---
## Dla zespołów
-Zen świetnie sprawdza się także w zespołach. Możesz zapraszać członków zespołu, przypisywać role, zarządzać
+Zen działa świetnie także dla zespołów. Możesz zapraszać członków zespołu, przypisywać role, dobierać
modele, z których korzysta Twój zespół i nie tylko.
:::note
Obszary robocze są obecnie bezpłatne dla zespołów w ramach wersji beta.
:::
-Zarządzanie obszarem roboczym jest obecnie bezpłatne dla zespołów w ramach wersji beta. Będziemy
-wkrótce udostępnimy więcej szczegółów na temat cen.
+Zarządzanie obszarem roboczym jest obecnie bezpłatne dla zespołów w ramach wersji beta.
+Wkrótce udostępnimy więcej szczegółów na temat cen.
---
@@ -221,8 +236,8 @@ wkrótce udostępnimy więcej szczegółów na temat cen.
Możesz zapraszać członków zespołu do swojego obszaru roboczego i przypisywać role:
-- **Administrator**: Zarządzaj modelami, członkami, kluczami API i rozliczeniami
-- **Członek**: Zarządzaj tylko własnymi kluczami API
+- **Admin**: Zarządzanie modelami, członkami, kluczami API i rozliczeniami
+- **Członek**: Zarządzanie tylko własnymi kluczami API
Administratorzy mogą także ustawić miesięczne limity wydatków dla każdego członka, aby utrzymać koszty pod kontrolą.
@@ -233,7 +248,7 @@ Administratorzy mogą także ustawić miesięczne limity wydatków dla każdego
Administratorzy mogą włączać i wyłączać określone modele w obszarze roboczym. Żądania skierowane do wyłączonego modelu zwrócą błąd.
Jest to przydatne w przypadkach, gdy chcesz wyłączyć korzystanie z modelu, który
-collects data.
+zbiera dane.
---
@@ -253,6 +268,6 @@ i chcesz go używać zamiast tego, który zapewnia Zen.
Stworzyliśmy OpenCode Zen, aby:
1. **Testować** (Benchmark) najlepsze modele/dostawców dla agentów kodujących.
-2. Miej dostęp do opcji **najwyższej jakości**, a nie obniżaj wydajności ani nie kieruj się do tańszych dostawców.
-3. Przekaż wszelkie **obniżki cen**, sprzedając po kosztach; więc jedyną marżą jest pokrycie naszych opłat manipulacyjnych.
-4. Nie **nie blokuj**, umożliwiając używanie go z dowolnym innym agentem kodującym. I zawsze pozwalaj na korzystanie z opencode dowolnego innego dostawcy.
+2. Mieć dostęp do opcji **najwyższej jakości**, a nie obniżać wydajności ani nie kierować do tańszych dostawców.
+3. Przekazywać wszelkie **obniżki cen**, sprzedając po kosztach; więc jedyną marżą jest pokrycie naszych opłat manipulacyjnych.
+4. Nie **mieć blokady** (no lock-in), umożliwiając używanie go z dowolnym innym agentem kodującym. I zawsze pozwalać na korzystanie z dowolnego innego dostawcy w OpenCode.
diff --git a/packages/web/src/content/docs/pt-br/zen.mdx b/packages/web/src/content/docs/pt-br/zen.mdx
index ba029fb7fc..1ed92cbd78 100644
--- a/packages/web/src/content/docs/pt-br/zen.mdx
+++ b/packages/web/src/content/docs/pt-br/zen.mdx
@@ -55,6 +55,7 @@ Você também pode acessar nossos modelos através dos seguintes endpoints da AP
| Modelo | ID do Modelo | Endpoint | Pacote AI SDK |
| ------------------ | ------------------ | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
@@ -136,6 +137,7 @@ Nós suportamos um modelo de pagamento conforme o uso. Abaixo estão os preços
| Gemini 3 Pro (≤ 200K tokens) | $2.00 | $12.00 | $0.20 | - |
| Gemini 3 Pro (> 200K tokens) | $4.00 | $18.00 | $0.40 | - |
| Gemini 3 Flash | $0.50 | $3.00 | $0.05 | - |
+| GPT 5.4 | $2.50 | $15.00 | $0.25 | - |
| GPT 5.3 Codex | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 Codex | $1.75 | $14.00 | $0.175 | - |
@@ -178,6 +180,19 @@ Por exemplo, digamos que você defina um limite de uso mensal de $20, o Zen não
---
+### Modelos obsoletos
+
+| Modelo | Data de descontinuação |
+| ---------------- | ---------------------- |
+| Qwen3 Coder 480B | 6 de fev. de 2026 |
+| Kimi K2 Thinking | 6 de mar. de 2026 |
+| Kimi K2 | 6 de mar. de 2026 |
+| MiniMax M2.1 | 15 de mar. de 2026 |
+| GLM 4.7 | 15 de mar. de 2026 |
+| GLM 4.6 | 15 de mar. de 2026 |
+
+---
+
## Privacidade
Todos os nossos modelos estão hospedados nos EUA. Nossos provedores seguem uma política de zero retenção e não usam seus dados para treinamento de modelos, com as seguintes exceções:
diff --git a/packages/web/src/content/docs/ru/zen.mdx b/packages/web/src/content/docs/ru/zen.mdx
index 078d1a3819..dff843d034 100644
--- a/packages/web/src/content/docs/ru/zen.mdx
+++ b/packages/web/src/content/docs/ru/zen.mdx
@@ -63,6 +63,7 @@ OpenCode Zen работает так же, как и любой другой п
| Модель | Идентификатор модели | Конечная точка | Пакет AI SDK |
| ------------------ | -------------------- | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
@@ -146,6 +147,7 @@ https://opencode.ai/zen/v1/models
| Gemini 3 Pro (≤ 200 тыс. токенов) | $2.00 | $12.00 | $0.20 | - |
| Gemini 3 Pro (> 200 тыс. токенов) | $4.00 | $18.00 | $0.40 | - |
| Gemini 3 Flash | $0.50 | $3.00 | $0.05 | - |
+| GPT 5.4 | $2.50 | $15.00 | $0.25 | - |
| GPT 5.3 Codex | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 Codex | $1.75 | $14.00 | $0.175 | - |
@@ -191,6 +193,19 @@ https://opencode.ai/zen/v1/models
---
+### Устаревшие модели
+
+| Модель | Дата отключения |
+| ---------------- | ---------------- |
+| Qwen3 Coder 480B | 6 февр. 2026 г. |
+| Kimi K2 Thinking | 6 марта 2026 г. |
+| Kimi K2 | 6 марта 2026 г. |
+| MiniMax M2.1 | 15 марта 2026 г. |
+| GLM 4.7 | 15 марта 2026 г. |
+| GLM 4.6 | 15 марта 2026 г. |
+
+---
+
## Конфиденциальность
Все наши модели размещены в США. Наши поставщики придерживаются политики нулевого хранения и не используют ваши данные для обучения моделей, за следующими исключениями:
diff --git a/packages/web/src/content/docs/th/zen.mdx b/packages/web/src/content/docs/th/zen.mdx
index 7b9f172756..36b2090807 100644
--- a/packages/web/src/content/docs/th/zen.mdx
+++ b/packages/web/src/content/docs/th/zen.mdx
@@ -64,6 +64,7 @@ OpenCode Zen ทำงานเหมือนกับผู้ให้บร
| Model | Model ID | Endpoint | แพ็คเกจ AI SDK |
| ------------------ | ------------------ | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
@@ -121,12 +122,12 @@ https://opencode.ai/zen/v1/models
| --------------------------------- | ---------- | -------- | ------- | ---------- |
| Big Pickle | ฟรี | ฟรี | ฟรี | - |
| MiniMax M2.5 Free | ฟรี | ฟรี | ฟรี | - |
-| MiniMax M2.5 | $0.30 | $1.20 | $0.06 | - |
+| MiniMax M2.5 | $0.30 | $1.20 | $0.06 | $0.375 |
| MiniMax M2.1 | $0.30 | $1.20 | $0.10 | - |
| GLM 5 | $1.00 | $3.20 | $0.20 | - |
| GLM 4.7 | $0.60 | $2.20 | $0.10 | - |
| GLM 4.6 | $0.60 | $2.20 | $0.10 | - |
-| Kimi K2.5 | $0.60 | $3.00 | $0.08 | - |
+| Kimi K2.5 | $0.60 | $3.00 | $0.10 | - |
| Kimi K2 Thinking | $0.40 | $2.50 | - | - |
| Kimi K2 | $0.40 | $2.50 | - | - |
| Qwen3 Coder 480B | $0.45 | $1.50 | - | - |
@@ -147,6 +148,7 @@ https://opencode.ai/zen/v1/models
| Gemini 3 Pro (≤ 200K tokens) | $2.00 | $12.00 | $0.20 | - |
| Gemini 3 Pro (> 200K tokens) | $4.00 | $18.00 | $0.40 | - |
| Gemini 3 Flash | $0.50 | $3.00 | $0.05 | - |
+| GPT 5.4 | $2.50 | $15.00 | $0.25 | - |
| GPT 5.3 Codex | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 Codex | $1.75 | $14.00 | $0.175 | - |
@@ -192,11 +194,24 @@ https://opencode.ai/zen/v1/models
---
+### โมเดลที่เลิกใช้แล้ว
+
+| Model | วันที่เลิกใช้ |
+| ---------------- | ------------- |
+| Qwen3 Coder 480B | 6 ก.พ. 2026 |
+| Kimi K2 Thinking | 6 มี.ค. 2026 |
+| Kimi K2 | 6 มี.ค. 2026 |
+| MiniMax M2.1 | 15 มี.ค. 2026 |
+| GLM 4.7 | 15 มี.ค. 2026 |
+| GLM 4.6 | 15 มี.ค. 2026 |
+
+---
+
## ความเป็นส่วนตัว
โมเดลทั้งหมดของเราโฮสต์ในสหรัฐอเมริกา ผู้ให้บริการของเราปฏิบัติตามนโยบายการเก็บรักษาเป็นศูนย์ และไม่ใช้ข้อมูลของคุณสำหรับการฝึกโมเดล โดยมีข้อยกเว้นต่อไปนี้:
-- Big Pickle: ในช่วงระยะเวลาว่าง ข้อมูลที่รวบรวมอาจนำไปใช้ในการปรับปรุงโมเดลได้
+- Big Pickle: ในช่วงระยะเวลาฟรี ข้อมูลที่รวบรวมอาจนำไปใช้ในการปรับปรุงโมเดลได้
- MiniMax M2.5 Free: ในช่วงระยะเวลาฟรี ข้อมูลที่รวบรวมอาจนำไปใช้ในการปรับปรุงโมเดล
- OpenAI API: คำขอจะถูกเก็บไว้เป็นเวลา 30 วันตาม [นโยบายข้อมูลของ OpenAI](https://platform.openai.com/docs/guides/your-data)
- Anthropic API: คำขอจะถูกเก็บไว้เป็นเวลา 30 วันตาม [นโยบายข้อมูลของ Anthropic](https://docs.anthropic.com/en/docs/claude-code/data-usage)
diff --git a/packages/web/src/content/docs/tr/zen.mdx b/packages/web/src/content/docs/tr/zen.mdx
index 9582a7b7dc..2b79bb9625 100644
--- a/packages/web/src/content/docs/tr/zen.mdx
+++ b/packages/web/src/content/docs/tr/zen.mdx
@@ -55,6 +55,7 @@ Modellerimize aşağıdaki API uç noktaları aracılığıyla da erişebilirsin
| Model | Model ID | Endpoint | AI SDK Package |
| ------------------ | ------------------ | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
@@ -136,6 +137,7 @@ Kullandıkça öde modelini destekliyoruz. Aşağıda **1 milyon token başına*
| Gemini 3 Pro (≤ 200K tokens) | $2.00 | $12.00 | $0.20 | - |
| Gemini 3 Pro (> 200K tokens) | $4.00 | $18.00 | $0.40 | - |
| Gemini 3 Flash | $0.50 | $3.00 | $0.05 | - |
+| GPT 5.4 | $2.50 | $15.00 | $0.25 | - |
| GPT 5.3 Codex | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 Codex | $1.75 | $14.00 | $0.175 | - |
@@ -178,6 +180,19 @@ Ayrıca tüm çalışma alanı ve ekibinizin her üyesi için aylık kullanım l
---
+### Kullanımdan kaldırılan modeller
+
+| Model | Kullanımdan kaldırılma tarihi |
+| ---------------- | ----------------------------- |
+| Qwen3 Coder 480B | 6 Şub 2026 |
+| Kimi K2 Thinking | 6 Mar 2026 |
+| Kimi K2 | 6 Mar 2026 |
+| MiniMax M2.1 | 15 Mar 2026 |
+| GLM 4.7 | 15 Mar 2026 |
+| GLM 4.6 | 15 Mar 2026 |
+
+---
+
## Gizlilik
Tüm modellerimiz ABD'de barındırılmaktadır. Sağlayıcılarımız sıfır saklama politikasını izler ve aşağıdaki istisnalar dışında verilerinizi model eğitimi için kullanmaz:
diff --git a/packages/web/src/content/docs/zen.mdx b/packages/web/src/content/docs/zen.mdx
index 5ed2125cb1..330f90014d 100644
--- a/packages/web/src/content/docs/zen.mdx
+++ b/packages/web/src/content/docs/zen.mdx
@@ -62,44 +62,47 @@ You are charged per request and you can add credits to your account.
You can also access our models through the following API endpoints.
-| Model | Model ID | Endpoint | AI SDK Package |
-| ------------------ | ------------------ | -------------------------------------------------- | --------------------------- |
-| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
-| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
-| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
-| GPT 5.1 | gpt-5.1 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
-| GPT 5.1 Codex | gpt-5.1-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
-| GPT 5.1 Codex Max | gpt-5.1-codex-max | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
-| GPT 5.1 Codex Mini | gpt-5.1-codex-mini | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
-| GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
-| GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
-| GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
-| Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
-| Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
-| Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
-| Claude Sonnet 4.6 | claude-sonnet-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
-| Claude Sonnet 4.5 | claude-sonnet-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
-| Claude Sonnet 4 | claude-sonnet-4 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
-| Claude Haiku 4.5 | claude-haiku-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
-| Claude Haiku 3.5 | claude-3-5-haiku | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
-| Gemini 3.1 Pro | gemini-3.1-pro | `https://opencode.ai/zen/v1/models/gemini-3.1-pro` | `@ai-sdk/google` |
-| Gemini 3 Pro | gemini-3-pro | `https://opencode.ai/zen/v1/models/gemini-3-pro` | `@ai-sdk/google` |
-| Gemini 3 Flash | gemini-3-flash | `https://opencode.ai/zen/v1/models/gemini-3-flash` | `@ai-sdk/google` |
-| MiniMax M2.5 | minimax-m2.5 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
-| MiniMax M2.5 Free | minimax-m2.5-free | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
-| MiniMax M2.1 | minimax-m2.1 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
-| GLM 5 | glm-5 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
-| GLM 4.7 | glm-4.7 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
-| GLM 4.6 | glm-4.6 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
-| Kimi K2.5 | kimi-k2.5 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
-| Kimi K2 Thinking | kimi-k2-thinking | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
-| Kimi K2 | kimi-k2 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
-| Qwen3 Coder 480B | qwen3-coder | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
-| Big Pickle | big-pickle | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
+| Model | Model ID | Endpoint | AI SDK Package |
+| ------------------- | ------------------- | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 Pro | gpt-5.4-pro | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
+| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
+| GPT 5.3 Codex Spark | gpt-5.3-codex-spark | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
+| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
+| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
+| GPT 5.1 | gpt-5.1 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
+| GPT 5.1 Codex | gpt-5.1-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
+| GPT 5.1 Codex Max | gpt-5.1-codex-max | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
+| GPT 5.1 Codex Mini | gpt-5.1-codex-mini | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
+| GPT 5 | gpt-5 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
+| GPT 5 Codex | gpt-5-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
+| GPT 5 Nano | gpt-5-nano | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
+| Claude Opus 4.6 | claude-opus-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
+| Claude Opus 4.5 | claude-opus-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
+| Claude Opus 4.1 | claude-opus-4-1 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
+| Claude Sonnet 4.6 | claude-sonnet-4-6 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
+| Claude Sonnet 4.5 | claude-sonnet-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
+| Claude Sonnet 4 | claude-sonnet-4 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
+| Claude Haiku 4.5 | claude-haiku-4-5 | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
+| Claude Haiku 3.5 | claude-3-5-haiku | `https://opencode.ai/zen/v1/messages` | `@ai-sdk/anthropic` |
+| Gemini 3.1 Pro | gemini-3.1-pro | `https://opencode.ai/zen/v1/models/gemini-3.1-pro` | `@ai-sdk/google` |
+| Gemini 3 Pro | gemini-3-pro | `https://opencode.ai/zen/v1/models/gemini-3-pro` | `@ai-sdk/google` |
+| Gemini 3 Flash | gemini-3-flash | `https://opencode.ai/zen/v1/models/gemini-3-flash` | `@ai-sdk/google` |
+| MiniMax M2.5 | minimax-m2.5 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
+| MiniMax M2.5 Free | minimax-m2.5-free | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
+| MiniMax M2.1 | minimax-m2.1 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
+| GLM 5 | glm-5 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
+| GLM 4.7 | glm-4.7 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
+| GLM 4.6 | glm-4.6 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
+| Kimi K2.5 | kimi-k2.5 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
+| Kimi K2 Thinking | kimi-k2-thinking | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
+| Kimi K2 | kimi-k2 | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
+| Qwen3 Coder 480B | qwen3-coder | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
+| Big Pickle | big-pickle | `https://opencode.ai/zen/v1/chat/completions` | `@ai-sdk/openai-compatible` |
The [model id](/docs/config/#models) in your OpenCode config
-uses the format `opencode/`. For example, for GPT 5.2 Codex, you would
-use `opencode/gpt-5.2-codex` in your config.
+uses the format `opencode/`. For example, for GPT 5.3 Codex, you would
+use `opencode/gpt-5.3-codex` in your config.
---
@@ -117,46 +120,49 @@ https://opencode.ai/zen/v1/models
We support a pay-as-you-go model. Below are the prices **per 1M tokens**.
-| Model | Input | Output | Cached Read | Cached Write |
-| --------------------------------- | ------ | ------ | ----------- | ------------ |
-| Big Pickle | Free | Free | Free | - |
-| MiniMax M2.5 Free | Free | Free | Free | - |
-| MiniMax M2.5 | $0.30 | $1.20 | $0.06 | $0.375 |
-| MiniMax M2.1 | $0.30 | $1.20 | $0.10 | - |
-| GLM 5 | $1.00 | $3.20 | $0.20 | - |
-| GLM 4.7 | $0.60 | $2.20 | $0.10 | - |
-| GLM 4.6 | $0.60 | $2.20 | $0.10 | - |
-| Kimi K2.5 | $0.60 | $3.00 | $0.10 | - |
-| Kimi K2 Thinking | $0.40 | $2.50 | - | - |
-| Kimi K2 | $0.40 | $2.50 | - | - |
-| Qwen3 Coder 480B | $0.45 | $1.50 | - | - |
-| Claude Opus 4.6 (≤ 200K tokens) | $5.00 | $25.00 | $0.50 | $6.25 |
-| Claude Opus 4.6 (> 200K tokens) | $10.00 | $37.50 | $1.00 | $12.50 |
-| Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 |
-| Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 |
-| Claude Sonnet 4.6 (≤ 200K tokens) | $3.00 | $15.00 | $0.30 | $3.75 |
-| Claude Sonnet 4.6 (> 200K tokens) | $6.00 | $22.50 | $0.60 | $7.50 |
-| Claude Sonnet 4.5 (≤ 200K tokens) | $3.00 | $15.00 | $0.30 | $3.75 |
-| Claude Sonnet 4.5 (> 200K tokens) | $6.00 | $22.50 | $0.60 | $7.50 |
-| Claude Sonnet 4 (≤ 200K tokens) | $3.00 | $15.00 | $0.30 | $3.75 |
-| Claude Sonnet 4 (> 200K tokens) | $6.00 | $22.50 | $0.60 | $7.50 |
-| Claude Haiku 4.5 | $1.00 | $5.00 | $0.10 | $1.25 |
-| Claude Haiku 3.5 | $0.80 | $4.00 | $0.08 | $1.00 |
-| Gemini 3.1 Pro (≤ 200K tokens) | $2.00 | $12.00 | $0.20 | - |
-| Gemini 3.1 Pro (> 200K tokens) | $4.00 | $18.00 | $0.40 | - |
-| Gemini 3 Pro (≤ 200K tokens) | $2.00 | $12.00 | $0.20 | - |
-| Gemini 3 Pro (> 200K tokens) | $4.00 | $18.00 | $0.40 | - |
-| Gemini 3 Flash | $0.50 | $3.00 | $0.05 | - |
-| GPT 5.3 Codex | $1.75 | $14.00 | $0.175 | - |
-| GPT 5.2 | $1.75 | $14.00 | $0.175 | - |
-| GPT 5.2 Codex | $1.75 | $14.00 | $0.175 | - |
-| GPT 5.1 | $1.07 | $8.50 | $0.107 | - |
-| GPT 5.1 Codex | $1.07 | $8.50 | $0.107 | - |
-| GPT 5.1 Codex Max | $1.25 | $10.00 | $0.125 | - |
-| GPT 5.1 Codex Mini | $0.25 | $2.00 | $0.025 | - |
-| GPT 5 | $1.07 | $8.50 | $0.107 | - |
-| GPT 5 Codex | $1.07 | $8.50 | $0.107 | - |
-| GPT 5 Nano | Free | Free | Free | - |
+| Model | Input | Output | Cached Read | Cached Write |
+| --------------------------------- | ------ | ------- | ----------- | ------------ |
+| Big Pickle | Free | Free | Free | - |
+| MiniMax M2.5 Free | Free | Free | Free | - |
+| MiniMax M2.5 | $0.30 | $1.20 | $0.06 | $0.375 |
+| MiniMax M2.1 | $0.30 | $1.20 | $0.10 | - |
+| GLM 5 | $1.00 | $3.20 | $0.20 | - |
+| GLM 4.7 | $0.60 | $2.20 | $0.10 | - |
+| GLM 4.6 | $0.60 | $2.20 | $0.10 | - |
+| Kimi K2.5 | $0.60 | $3.00 | $0.10 | - |
+| Kimi K2 Thinking | $0.40 | $2.50 | - | - |
+| Kimi K2 | $0.40 | $2.50 | - | - |
+| Qwen3 Coder 480B | $0.45 | $1.50 | - | - |
+| Claude Opus 4.6 (≤ 200K tokens) | $5.00 | $25.00 | $0.50 | $6.25 |
+| Claude Opus 4.6 (> 200K tokens) | $10.00 | $37.50 | $1.00 | $12.50 |
+| Claude Opus 4.5 | $5.00 | $25.00 | $0.50 | $6.25 |
+| Claude Opus 4.1 | $15.00 | $75.00 | $1.50 | $18.75 |
+| Claude Sonnet 4.6 (≤ 200K tokens) | $3.00 | $15.00 | $0.30 | $3.75 |
+| Claude Sonnet 4.6 (> 200K tokens) | $6.00 | $22.50 | $0.60 | $7.50 |
+| Claude Sonnet 4.5 (≤ 200K tokens) | $3.00 | $15.00 | $0.30 | $3.75 |
+| Claude Sonnet 4.5 (> 200K tokens) | $6.00 | $22.50 | $0.60 | $7.50 |
+| Claude Sonnet 4 (≤ 200K tokens) | $3.00 | $15.00 | $0.30 | $3.75 |
+| Claude Sonnet 4 (> 200K tokens) | $6.00 | $22.50 | $0.60 | $7.50 |
+| Claude Haiku 4.5 | $1.00 | $5.00 | $0.10 | $1.25 |
+| Claude Haiku 3.5 | $0.80 | $4.00 | $0.08 | $1.00 |
+| Gemini 3.1 Pro (≤ 200K tokens) | $2.00 | $12.00 | $0.20 | - |
+| Gemini 3.1 Pro (> 200K tokens) | $4.00 | $18.00 | $0.40 | - |
+| Gemini 3 Pro (≤ 200K tokens) | $2.00 | $12.00 | $0.20 | - |
+| Gemini 3 Pro (> 200K tokens) | $4.00 | $18.00 | $0.40 | - |
+| Gemini 3 Flash | $0.50 | $3.00 | $0.05 | - |
+| GPT 5.4 Pro | $30.00 | $180.00 | $30.00 | - |
+| GPT 5.4 | $2.50 | $15.00 | $0.25 | - |
+| GPT 5.3 Codex Spark | $1.75 | $14.00 | $0.175 | - |
+| GPT 5.3 Codex | $1.75 | $14.00 | $0.175 | - |
+| GPT 5.2 | $1.75 | $14.00 | $0.175 | - |
+| GPT 5.2 Codex | $1.75 | $14.00 | $0.175 | - |
+| GPT 5.1 | $1.07 | $8.50 | $0.107 | - |
+| GPT 5.1 Codex | $1.07 | $8.50 | $0.107 | - |
+| GPT 5.1 Codex Max | $1.25 | $10.00 | $0.125 | - |
+| GPT 5.1 Codex Mini | $0.25 | $2.00 | $0.025 | - |
+| GPT 5 | $1.07 | $8.50 | $0.107 | - |
+| GPT 5 Codex | $1.07 | $8.50 | $0.107 | - |
+| GPT 5 Nano | Free | Free | Free | - |
You might notice _Claude Haiku 3.5_ in your usage history. This is a [low cost model](/docs/config/#models) that's used to generate the titles of your sessions.
diff --git a/packages/web/src/content/docs/zh-cn/zen.mdx b/packages/web/src/content/docs/zh-cn/zen.mdx
index 0c6c6b9d95..098fb5e35b 100644
--- a/packages/web/src/content/docs/zh-cn/zen.mdx
+++ b/packages/web/src/content/docs/zh-cn/zen.mdx
@@ -55,6 +55,7 @@ OpenCode Zen 的工作方式与 OpenCode 中的任何其他提供商相同。
| 模型 | 模型 ID | 端点 | AI SDK 包 |
| ------------------ | ------------------ | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
@@ -136,6 +137,7 @@ https://opencode.ai/zen/v1/models
| Gemini 3 Pro (≤ 200K tokens) | $2.00 | $12.00 | $0.20 | - |
| Gemini 3 Pro (> 200K tokens) | $4.00 | $18.00 | $0.40 | - |
| Gemini 3 Flash | $0.50 | $3.00 | $0.05 | - |
+| GPT 5.4 | $2.50 | $15.00 | $0.25 | - |
| GPT 5.3 Codex | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 Codex | $1.75 | $14.00 | $0.175 | - |
@@ -178,6 +180,19 @@ https://opencode.ai/zen/v1/models
---
+### 已弃用模型
+
+| 模型 | 弃用日期 |
+| ---------------- | ------------------ |
+| Qwen3 Coder 480B | 2026 年 2 月 6 日 |
+| Kimi K2 Thinking | 2026 年 3 月 6 日 |
+| Kimi K2 | 2026 年 3 月 6 日 |
+| MiniMax M2.1 | 2026 年 3 月 15 日 |
+| GLM 4.7 | 2026 年 3 月 15 日 |
+| GLM 4.6 | 2026 年 3 月 15 日 |
+
+---
+
## 隐私
我们所有的模型都托管在美国。我们的提供商遵循零保留政策,不会将你的数据用于模型训练,但以下情况除外:
diff --git a/packages/web/src/content/docs/zh-tw/zen.mdx b/packages/web/src/content/docs/zh-tw/zen.mdx
index c38188280b..c0ef9d03bd 100644
--- a/packages/web/src/content/docs/zh-tw/zen.mdx
+++ b/packages/web/src/content/docs/zh-tw/zen.mdx
@@ -55,6 +55,7 @@ OpenCode Zen 的工作方式與 OpenCode 中的任何其他供應商相同。
| 模型 | 模型 ID | 端點 | AI SDK 套件 |
| ------------------ | ------------------ | -------------------------------------------------- | --------------------------- |
+| GPT 5.4 | gpt-5.4 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.3 Codex | gpt-5.3-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 | gpt-5.2 | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
| GPT 5.2 Codex | gpt-5.2-codex | `https://opencode.ai/zen/v1/responses` | `@ai-sdk/openai` |
@@ -136,6 +137,7 @@ https://opencode.ai/zen/v1/models
| Gemini 3 Pro (≤ 200K Token) | $2.00 | $12.00 | $0.20 | - |
| Gemini 3 Pro (> 200K Token) | $4.00 | $18.00 | $0.40 | - |
| Gemini 3 Flash | $0.50 | $3.00 | $0.05 | - |
+| GPT 5.4 | $2.50 | $15.00 | $0.25 | - |
| GPT 5.3 Codex | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 | $1.75 | $14.00 | $0.175 | - |
| GPT 5.2 Codex | $1.75 | $14.00 | $0.175 | - |
@@ -178,6 +180,19 @@ https://opencode.ai/zen/v1/models
---
+### 已棄用的模型
+
+| 模型 | 棄用日期 |
+| ---------------- | ------------------ |
+| Qwen3 Coder 480B | 2026 年 2 月 6 日 |
+| Kimi K2 Thinking | 2026 年 3 月 6 日 |
+| Kimi K2 | 2026 年 3 月 6 日 |
+| MiniMax M2.1 | 2026 年 3 月 15 日 |
+| GLM 4.7 | 2026 年 3 月 15 日 |
+| GLM 4.6 | 2026 年 3 月 15 日 |
+
+---
+
## 隱私
我們所有的模型都託管在美國。我們的供應商遵循零保留政策,不會將你的資料用於模型訓練,但以下情況除外:
diff --git a/script/upstream/merge.ts b/script/upstream/merge.ts
index c9c0c36d53..1e9a804ef5 100644
--- a/script/upstream/merge.ts
+++ b/script/upstream/merge.ts
@@ -13,17 +13,18 @@
* 2. Create merge branch
* 3. Start git merge (expect conflicts)
* 4. Resolve keepOurs files (our custom code)
- * 5. Resolve skipFiles (unused upstream packages)
+ * 5. Resolve skipFiles (accept upstream for packages we don't modify)
* 6. Resolve lock files (accept ours, regenerate later)
* 7. Report remaining conflicts for manual resolution
- * 8. After manual resolution: regenerate lock file
+ * 8. After manual resolution: regenerate lock file, build, test
*/
import { parseArgs } from "util"
+import { execSync } from "child_process"
import { git, gitSafe, tagExists, currentBranch, hasUncommittedChanges, conflictedFiles } from "./utils/git"
import { loadConfig, repoRoot } from "./utils/config"
import { resolveKeepOurs } from "./transforms/keep-ours"
-import { resolveSkipFiles, cleanSkippedPackages } from "./transforms/skip-files"
+import { resolveSkipFiles } from "./transforms/skip-files"
import { resolveLockFiles, regenerateLockFile } from "./transforms/lock-files"
const { values: args } = parseArgs({
@@ -87,7 +88,7 @@ async function main() {
if (mergeResult !== null) {
console.log(" Merge completed without conflicts!")
- await postMerge(config)
+ await postMerge()
return
}
@@ -100,9 +101,9 @@ async function main() {
for (const f of keepOursResult.resolved) console.log(` ✓ ${f}`)
// Step 5: Resolve skipFiles
- console.log("\nStep 5: Resolving skipFiles (unused packages)...")
+ console.log("\nStep 5: Resolving skipFiles (accept upstream)...")
const skipResult = resolveSkipFiles()
- console.log(` Resolved ${skipResult.resolved.length} files (removed)`)
+ console.log(` Resolved ${skipResult.resolved.length} files (accepted upstream)`)
// Step 6: Resolve lock files
console.log("\nStep 6: Resolving lock files...")
@@ -114,7 +115,7 @@ async function main() {
if (remaining.length === 0) {
console.log("\nAll conflicts resolved automatically!")
git("commit --no-edit")
- await postMerge(config)
+ await postMerge()
} else {
console.log(`\nStep 7: ${remaining.length} files need manual resolution:`)
for (const f of remaining) {
@@ -128,7 +129,6 @@ async function main() {
}
async function continueAfterManualResolution() {
- const config = loadConfig()
const remaining = conflictedFiles()
if (remaining.length > 0) {
@@ -139,28 +139,69 @@ async function continueAfterManualResolution() {
console.log("All conflicts resolved. Continuing merge...")
git("commit --no-edit")
- await postMerge(config)
+ await postMerge()
}
-async function postMerge(config: ReturnType) {
- // Clean up skipped packages that might have been added by upstream
- console.log("\nPost-merge: Cleaning skipped packages...")
- const cleaned = cleanSkippedPackages()
- if (cleaned.length > 0) {
- console.log(` Removed ${cleaned.length} skipped directories`)
- git('commit -m "chore: remove unused upstream packages after merge"')
- }
+async function postMerge() {
+ const root = repoRoot()
+ const pkgDir = `${root}/packages/opencode`
- // Regenerate lock file
- console.log("\nPost-merge: Regenerating lock file...")
+ // Step 8: Regenerate lock file
+ console.log("\nStep 8: Regenerating lock file...")
regenerateLockFile()
git('commit -m "chore: regenerate bun.lock after upstream merge"')
- console.log("\n✅ Merge complete!")
- console.log("Next steps:")
- console.log(" 1. bun run build")
- console.log(" 2. bun test")
- console.log(" 3. Review changes: git log --oneline HEAD~5..HEAD")
+ // Step 9: Build
+ console.log("\nStep 9: Building...")
+ try {
+ execSync("bun run build", { cwd: pkgDir, stdio: "inherit" })
+ console.log(" ✓ Build passed")
+ } catch {
+ console.error("\n ✗ Build failed!")
+ console.error(" Fix build errors, then:")
+ console.error(" git add -A && git commit -m 'fix: resolve build errors after upstream merge'")
+ console.error(" bun run script/upstream/merge.ts --continue")
+ process.exit(1)
+ }
+
+ // Step 10: Test
+ console.log("\nStep 10: Running tests...")
+ try {
+ const testResult = execSync("bun test 2>&1", { cwd: pkgDir, encoding: "utf-8" })
+ // Extract summary line
+ const summary = testResult.split("\n").find((l) => l.includes("pass") && l.includes("fail"))
+ if (summary) console.log(` ${summary.trim()}`)
+ console.log(" ✓ Tests passed")
+ } catch (e: any) {
+ // Tests may have failures — extract summary to show
+ const output = e.stdout || e.stderr || ""
+ const summary = output.split("\n").find((l: string) => l.includes("pass") && l.includes("fail"))
+ if (summary) {
+ console.log(` ${summary.trim()}`)
+ }
+ console.warn(" ⚠ Some tests failed — review output above")
+ console.warn(" If failures are pre-existing (same as main), this is OK")
+ }
+
+ // Step 11: Typecheck
+ console.log("\nStep 11: Running typecheck...")
+ try {
+ execSync("bun run typecheck", { cwd: pkgDir, stdio: "inherit" })
+ console.log(" ✓ Typecheck passed")
+ } catch {
+ console.warn(" ⚠ Typecheck has errors — review output above")
+ console.warn(" If errors are pre-existing (same as main), this is OK")
+ }
+
+ console.log("\n═══════════════════════════════════════════════")
+ console.log(" MERGE COMPLETE")
+ console.log("═══════════════════════════════════════════════")
+ console.log("\nReview:")
+ console.log(` git log --oneline HEAD~5..HEAD`)
+ console.log(` git diff main --stat`)
+ console.log("\nWhen ready:")
+ console.log(` git push -u origin $(git branch --show-current)`)
+ console.log(` gh pr create --base main`)
}
async function reportOnly(version: string, config: ReturnType) {
@@ -194,7 +235,7 @@ async function reportOnly(version: string, config: ReturnType
}
console.log(`\nKeepOurs (auto-resolved): ${keepOurs.length}`)
- console.log(`SkipFiles (auto-removed): ${skipFiles.length}`)
+ console.log(`SkipFiles (accept upstream): ${skipFiles.length}`)
console.log(`Safe updates (no conflict): ${safeUpdates.length}`)
console.log(`Potential conflicts (manual review): ${potentialConflicts.length}`)
diff --git a/script/upstream/transforms/skip-files.ts b/script/upstream/transforms/skip-files.ts
index 193aca13cf..a38ba71eea 100644
--- a/script/upstream/transforms/skip-files.ts
+++ b/script/upstream/transforms/skip-files.ts
@@ -3,8 +3,8 @@ import { git, conflictedFiles } from "../utils/git"
import { loadConfig } from "../utils/config"
/**
- * For conflicted files matching skipFiles patterns, resolve by removing them.
- * These are upstream packages we don't use (e.g., packages/app/**, packages/desktop/**).
+ * For conflicted files matching skipFiles patterns, resolve by accepting upstream's version.
+ * These are upstream packages we don't modify — we keep them to avoid merge friction.
*/
export function resolveSkipFiles(): { resolved: string[]; skipped: string[] } {
const config = loadConfig()
@@ -15,7 +15,9 @@ export function resolveSkipFiles(): { resolved: string[]; skipped: string[] } {
for (const file of conflicts) {
const shouldSkip = config.skipFiles.some((pattern) => minimatch(file, pattern))
if (shouldSkip) {
- git(`rm --force "${file}"`)
+ // Accept upstream's version — we don't modify these files
+ git(`checkout --theirs "${file}"`)
+ git(`add "${file}"`)
resolved.push(file)
} else {
skipped.push(file)
@@ -24,25 +26,3 @@ export function resolveSkipFiles(): { resolved: string[]; skipped: string[] } {
return { resolved, skipped }
}
-
-/**
- * After merge, remove any new files from skipped packages that upstream added.
- */
-export function cleanSkippedPackages(): string[] {
- const config = loadConfig()
- const cleaned: string[] = []
-
- for (const pattern of config.skipFiles) {
- // Only clean directory-level patterns
- if (!pattern.endsWith("/**")) continue
- const dir = pattern.replace("/**", "")
- try {
- git(`rm -rf "${dir}"`)
- cleaned.push(dir)
- } catch {
- // Directory doesn't exist, skip
- }
- }
-
- return cleaned
-}
diff --git a/script/upstream/utils/git.ts b/script/upstream/utils/git.ts
index 60e354f1e7..6160817e6d 100644
--- a/script/upstream/utils/git.ts
+++ b/script/upstream/utils/git.ts
@@ -32,7 +32,8 @@ export function currentBranch(): string {
}
export function hasUncommittedChanges(): boolean {
- return git("status --porcelain").length > 0
+ // Only check tracked files — untracked files shouldn't block merges
+ return git("status --porcelain -uno").length > 0
}
export function conflictedFiles(): string[] {
diff --git a/sdks/vscode/package.json b/sdks/vscode/package.json
index 64307f053d..f900f1099c 100644
--- a/sdks/vscode/package.json
+++ b/sdks/vscode/package.json
@@ -2,7 +2,7 @@
"name": "opencode",
"displayName": "opencode",
"description": "opencode for VS Code",
- "version": "1.2.18",
+ "version": "1.2.20",
"publisher": "sst-dev",
"repository": {
"type": "git",