diff --git a/.github/actions/setup-bun/action.yml b/.github/actions/setup-bun/action.yml index 6c632f7e07..f53f20fcdb 100644 --- a/.github/actions/setup-bun/action.yml +++ b/.github/actions/setup-bun/action.yml @@ -3,14 +3,6 @@ description: "Setup Bun with caching and install dependencies" runs: using: "composite" steps: - - name: Cache Bun dependencies - uses: actions/cache@v4 - with: - path: ~/.bun/install/cache - key: ${{ runner.os }}-bun-${{ hashFiles('**/bun.lockb') }} - restore-keys: | - ${{ runner.os }}-bun- - - name: Get baseline download URL id: bun-url shell: bash @@ -31,6 +23,23 @@ runs: bun-version-file: ${{ !steps.bun-url.outputs.url && 'package.json' || '' }} bun-download-url: ${{ steps.bun-url.outputs.url }} + - name: Get cache directory + id: cache + shell: bash + run: echo "dir=$(bun pm cache)" >> "$GITHUB_OUTPUT" + + - name: Cache Bun dependencies + uses: actions/cache@v4 + with: + path: ${{ steps.cache.outputs.dir }} + key: ${{ runner.os }}-bun-${{ hashFiles('**/bun.lock') }} + restore-keys: | + ${{ runner.os }}-bun- + + - name: Install setuptools for distutils compatibility + run: python3 -m pip install setuptools || pip install setuptools || true + shell: bash + - name: Install dependencies run: bun install shell: bash diff --git a/.github/workflows/beta.yml b/.github/workflows/beta.yml index 46d8fd0dbe..2e61dc1529 100644 --- a/.github/workflows/beta.yml +++ b/.github/workflows/beta.yml @@ -1,9 +1,11 @@ name: beta +# Disabled — upstream beta branch syncing workflow, not needed for our fork. +# To re-enable, uncomment the triggers below. on: workflow_dispatch: - schedule: - - cron: "0 * * * *" + # schedule: + # - cron: "0 * * * *" jobs: sync: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 266512718d..03ae8b434e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -42,6 +42,39 @@ jobs: run: bun test working-directory: packages/opencode + marker-guard: + name: Marker Guard + runs-on: ubuntu-latest + if: github.event_name == 'pull_request' + timeout-minutes: 5 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - uses: oven-sh/setup-bun@ecf28ddc73e819eb6fa29df6b34ef8921c743461 # v2 + with: + bun-version: "1.3.10" + + - name: Add upstream remote + run: | + git remote add upstream https://github.com/anomalyco/opencode.git || true + git fetch upstream --quiet + + - name: Install merge tooling deps + run: bun install + working-directory: script/upstream + + - name: Check for missing altimate_change markers + run: | + # Skip strict marker enforcement for upstream merge PRs — all changes come from upstream + if [[ "${{ github.head_ref }}" == merge-upstream-* ]] || [[ "${{ github.head_ref }}" == upstream/merge-* ]]; then + echo "Upstream merge PR detected — running marker check in non-strict mode" + bun run script/upstream/analyze.ts --markers --base ${{ github.event.pull_request.base.ref }} + else + bun run script/upstream/analyze.ts --markers --base ${{ github.event.pull_request.base.ref }} --strict + fi + lint: name: Lint runs-on: ubuntu-latest diff --git a/.github/workflows/opencode.yml b/.github/workflows/opencode.yml index da17f70a9a..d913d7270b 100644 --- a/.github/workflows/opencode.yml +++ b/.github/workflows/opencode.yml @@ -9,10 +9,10 @@ on: jobs: altimate-code: if: | - contains(github.event.comment.body, ' /oc') || - startsWith(github.event.comment.body, '/oc') || - contains(github.event.comment.body, ' /opencode') || - startsWith(github.event.comment.body, '/opencode') + contains(github.event.comment.body, ' /altimate') || + startsWith(github.event.comment.body, '/altimate') || + contains(github.event.comment.body, ' /ac') || + startsWith(github.event.comment.body, '/ac') runs-on: blacksmith-4vcpu-ubuntu-2404 permissions: id-token: write @@ -31,4 +31,4 @@ jobs: OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }} OPENCODE_PERMISSION: '{"bash": "deny"}' with: - model: opencode/claude-opus-4-5 + model: anthropic/claude-opus-4-5 diff --git a/.gitignore b/.gitignore index bf78c046d4..4d1a0cdf96 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ .DS_Store node_modules +__pycache__ .worktrees .sst .env @@ -17,7 +18,7 @@ ts-dist /result refs Session.vim -opencode.json +/opencode.json a.out target .scripts diff --git a/.opencode/.gitignore b/.opencode/.gitignore index 00bfdfda29..03445edaf2 100644 --- a/.opencode/.gitignore +++ b/.opencode/.gitignore @@ -1,3 +1,4 @@ plans/ bun.lock package.json +package-lock.json diff --git a/.opencode/agent/docs.md b/.opencode/agent/docs.md deleted file mode 100644 index 21cfc6a16e..0000000000 --- a/.opencode/agent/docs.md +++ /dev/null @@ -1,34 +0,0 @@ ---- -description: ALWAYS use this when writing docs -color: "#38A3EE" ---- - -You are an expert technical documentation writer - -You are not verbose - -Use a relaxed and friendly tone - -The title of the page should be a word or a 2-3 word phrase - -The description should be one short line, should not start with "The", should -avoid repeating the title of the page, should be 5-10 words long - -Chunks of text should not be more than 2 sentences long - -Each section is separated by a divider of 3 dashes - -The section titles are short with only the first letter of the word capitalized - -The section titles are in the imperative mood - -The section titles should not repeat the term used in the page title, for -example, if the page title is "Models", avoid using a section title like "Add -new models". This might be unavoidable in some cases, but try to avoid it. - -Check out the /packages/web/src/content/docs/docs/index.mdx as an example. - -For JS or TS code snippets remove trailing semicolons and any trailing commas -that might not be needed. - -If you are making a commit prefix the commit message with `docs:` diff --git a/.opencode/agent/duplicate-pr.md b/.opencode/agent/duplicate-pr.md deleted file mode 100644 index c9c932ef79..0000000000 --- a/.opencode/agent/duplicate-pr.md +++ /dev/null @@ -1,26 +0,0 @@ ---- -mode: primary -hidden: true -model: opencode/claude-haiku-4-5 -color: "#E67E22" -tools: - "*": false - "github-pr-search": true ---- - -You are a duplicate PR detection agent. When a PR is opened, your job is to search for potentially duplicate or related open PRs. - -Use the github-pr-search tool to search for PRs that might be addressing the same issue or feature. - -IMPORTANT: The input will contain a line `CURRENT_PR_NUMBER: NNNN`. This is the current PR number, you should not mark that the current PR as a duplicate of itself. - -Search using keywords from the PR title and description. Try multiple searches with different relevant terms. - -If you find potential duplicates: - -- List them with their titles and URLs -- Briefly explain why they might be related - -If no duplicates are found, say so clearly. BUT ONLY SAY "No duplicate PRs found" (don't say anything else if no dups) - -Keep your response concise and actionable. diff --git a/.opencode/agent/translator.md b/.opencode/agent/translator.md deleted file mode 100644 index 263afbe9b5..0000000000 --- a/.opencode/agent/translator.md +++ /dev/null @@ -1,900 +0,0 @@ ---- -description: Translate content for a specified locale while preserving technical terms -mode: subagent -model: opencode/gemini-3-pro ---- - -You are a professional translator and localization specialist. - -Translate the user's content into the requested target locale (language + region, e.g. fr-FR, de-DE). - -Requirements: - -- Preserve meaning, intent, tone, and formatting (including Markdown/MDX structure). -- Preserve all technical terms and artifacts exactly: product/company names, API names, identifiers, code, commands/flags, file paths, URLs, versions, error messages, config keys/values, and anything inside inline code or code blocks. -- Also preserve every term listed in the Do-Not-Translate glossary below. -- Also apply locale-specific guidance from `.opencode/glossary/.md` when available (for example, `zh-cn.md`). -- Do not modify fenced code blocks. -- Output ONLY the translation (no commentary). - -If the target locale is missing, ask the user to provide it. -If no locale-specific glossary exists, use the global glossary only. - ---- - -# Locale-Specific Glossaries - -When a locale glossary exists, use it to: - -- Apply preferred wording for recurring UI/docs terms in that locale -- Preserve locale-specific do-not-translate terms and casing decisions -- Prefer natural phrasing over literal translation when the locale file calls it out -- If the repo uses a locale alias slug, apply that file too (for example, `pt-BR` maps to `br.md` in this repo) - -Locale guidance does not override code/command preservation rules or the global Do-Not-Translate glossary below. - ---- - -# Do-Not-Translate Terms (OpenCode Docs) - -Generated from: `packages/web/src/content/docs/*.mdx` (default English docs) -Generated on: 2026-02-10 - -Use this as a translation QA checklist / glossary. Preserve listed terms exactly (spelling, casing, punctuation). - -General rules (verbatim, even if not listed below): - -- Anything inside inline code (single backticks) or fenced code blocks (triple backticks) -- MDX/JS code in docs: `import ... from "..."`, component tags, identifiers -- CLI commands, flags, config keys/values, file paths, URLs/domains, and env vars - -## Proper nouns and product names - -Additional (not reliably captured via link text): - -```text -Astro -Bun -Chocolatey -Cursor -Docker -Git -GitHub Actions -GitLab CI -GNOME Terminal -Homebrew -Mise -Neovim -Node.js -npm -Obsidian -opencode -opencode-ai -Paru -pnpm -ripgrep -Scoop -SST -Starlight -Visual Studio Code -VS Code -VSCodium -Windsurf -Windows Terminal -Yarn -Zellij -Zed -anomalyco -``` - -Extracted from link labels in the English docs (review and prune as desired): - -```text -@openspoon/subtask2 -302.AI console -ACP progress report -Agent Client Protocol -Agent Skills -Agentic -AGENTS.md -AI SDK -Alacritty -Anthropic -Anthropic's Data Policies -Atom One -Avante.nvim -Ayu -Azure AI Foundry -Azure portal -Baseten -built-in GITHUB_TOKEN -Bun.$ -Catppuccin -Cerebras console -ChatGPT Plus or Pro -Cloudflare dashboard -CodeCompanion.nvim -CodeNomad -Configuring Adapters: Environment Variables -Context7 MCP server -Cortecs console -Deep Infra dashboard -DeepSeek console -Duo Agent Platform -Everforest -Fireworks AI console -Firmware dashboard -Ghostty -GitLab CLI agents docs -GitLab docs -GitLab User Settings > Access Tokens -Granular Rules (Object Syntax) -Grep by Vercel -Groq console -Gruvbox -Helicone -Helicone documentation -Helicone Header Directory -Helicone's Model Directory -Hugging Face Inference Providers -Hugging Face settings -install WSL -IO.NET console -JetBrains IDE -Kanagawa -Kitty -MiniMax API Console -Models.dev -Moonshot AI console -Nebius Token Factory console -Nord -OAuth -Ollama integration docs -OpenAI's Data Policies -OpenChamber -OpenCode -OpenCode config -OpenCode Config -OpenCode TUI with the opencode theme -OpenCode Web - Active Session -OpenCode Web - New Session -OpenCode Web - See Servers -OpenCode Zen -OpenCode-Obsidian -OpenRouter dashboard -OpenWork -OVHcloud panel -Pro+ subscription -SAP BTP Cockpit -Scaleway Console IAM settings -Scaleway Generative APIs -SDK documentation -Sentry MCP server -shell API -Together AI console -Tokyonight -Unified Billing -Venice AI console -Vercel dashboard -WezTerm -Windows Subsystem for Linux (WSL) -WSL -WSL (Windows Subsystem for Linux) -WSL extension -xAI console -Z.AI API console -Zed -ZenMux dashboard -Zod -``` - -## Acronyms and initialisms - -```text -ACP -AGENTS -AI -AI21 -ANSI -API -AST -AWS -BTP -CD -CDN -CI -CLI -CMD -CORS -DEBUG -EKS -ERROR -FAQ -GLM -GNOME -GPT -HTML -HTTP -HTTPS -IAM -ID -IDE -INFO -IO -IP -IRSA -JS -JSON -JSONC -K2 -LLM -LM -LSP -M2 -MCP -MR -NET -NPM -NTLM -OIDC -OS -PAT -PATH -PHP -PR -PTY -README -RFC -RPC -SAP -SDK -SKILL -SSE -SSO -TS -TTY -TUI -UI -URL -US -UX -VCS -VPC -VPN -VS -WARN -WSL -X11 -YAML -``` - -## Code identifiers used in prose (CamelCase, mixedCase) - -```text -apiKey -AppleScript -AssistantMessage -baseURL -BurntSushi -ChatGPT -ClangFormat -CodeCompanion -CodeNomad -DeepSeek -DefaultV2 -FileContent -FileDiff -FileNode -fineGrained -FormatterStatus -GitHub -GitLab -iTerm2 -JavaScript -JetBrains -macOS -mDNS -MiniMax -NeuralNomadsAI -NickvanDyke -NoeFabris -OpenAI -OpenAPI -OpenChamber -OpenCode -OpenRouter -OpenTUI -OpenWork -ownUserPermissions -PowerShell -ProviderAuthAuthorization -ProviderAuthMethod -ProviderInitError -SessionStatus -TabItem -tokenType -ToolIDs -ToolList -TypeScript -typesUrl -UserMessage -VcsInfo -WebView2 -WezTerm -xAI -ZenMux -``` - -## OpenCode CLI commands (as shown in docs) - -```text -opencode -opencode [project] -opencode /path/to/project -opencode acp -opencode agent [command] -opencode agent create -opencode agent list -opencode attach [url] -opencode attach http://10.20.30.40:4096 -opencode attach http://localhost:4096 -opencode auth [command] -opencode auth list -opencode auth login -opencode auth logout -opencode auth ls -opencode export [sessionID] -opencode github [command] -opencode github install -opencode github run -opencode import -opencode import https://opncd.ai/s/abc123 -opencode import session.json -opencode mcp [command] -opencode mcp add -opencode mcp auth [name] -opencode mcp auth list -opencode mcp auth ls -opencode mcp auth my-oauth-server -opencode mcp auth sentry -opencode mcp debug -opencode mcp debug my-oauth-server -opencode mcp list -opencode mcp logout [name] -opencode mcp logout my-oauth-server -opencode mcp ls -opencode models --refresh -opencode models [provider] -opencode models anthropic -opencode run [message..] -opencode run Explain the use of context in Go -opencode serve -opencode serve --cors http://localhost:5173 --cors https://app.example.com -opencode serve --hostname 0.0.0.0 --port 4096 -opencode serve [--port ] [--hostname ] [--cors ] -opencode session [command] -opencode session list -opencode session delete -opencode stats -opencode uninstall -opencode upgrade -opencode upgrade [target] -opencode upgrade v0.1.48 -opencode web -opencode web --cors https://example.com -opencode web --hostname 0.0.0.0 -opencode web --mdns -opencode web --mdns --mdns-domain myproject.local -opencode web --port 4096 -opencode web --port 4096 --hostname 0.0.0.0 -opencode.server.close() -``` - -## Slash commands and routes - -```text -/agent -/auth/:id -/clear -/command -/config -/config/providers -/connect -/continue -/doc -/editor -/event -/experimental/tool?provider=

&model= -/experimental/tool/ids -/export -/file?path= -/file/content?path=

-/file/status -/find?pattern= -/find/file -/find/file?query= -/find/symbol?query= -/formatter -/global/event -/global/health -/help -/init -/instance/dispose -/log -/lsp -/mcp -/mnt/ -/mnt/c/ -/mnt/d/ -/models -/oc -/opencode -/path -/project -/project/current -/provider -/provider/{id}/oauth/authorize -/provider/{id}/oauth/callback -/provider/auth -/q -/quit -/redo -/resume -/session -/session/:id -/session/:id/abort -/session/:id/children -/session/:id/command -/session/:id/diff -/session/:id/fork -/session/:id/init -/session/:id/message -/session/:id/message/:messageID -/session/:id/permissions/:permissionID -/session/:id/prompt_async -/session/:id/revert -/session/:id/share -/session/:id/shell -/session/:id/summarize -/session/:id/todo -/session/:id/unrevert -/session/status -/share -/summarize -/theme -/tui -/tui/append-prompt -/tui/clear-prompt -/tui/control/next -/tui/control/response -/tui/execute-command -/tui/open-help -/tui/open-models -/tui/open-sessions -/tui/open-themes -/tui/show-toast -/tui/submit-prompt -/undo -/Users/username -/Users/username/projects/* -/vcs -``` - -## CLI flags and short options - -```text ---agent ---attach ---command ---continue ---cors ---cwd ---days ---dir ---dry-run ---event ---file ---force ---fork ---format ---help ---hostname ---hostname 0.0.0.0 ---keep-config ---keep-data ---log-level ---max-count ---mdns ---mdns-domain ---method ---model ---models ---port ---print-logs ---project ---prompt ---refresh ---session ---share ---title ---token ---tools ---verbose ---version ---wait - --c --d --f --h --m --n --s --v -``` - -## Environment variables - -```text -AI_API_URL -AI_FLOW_CONTEXT -AI_FLOW_EVENT -AI_FLOW_INPUT -AICORE_DEPLOYMENT_ID -AICORE_RESOURCE_GROUP -AICORE_SERVICE_KEY -ANTHROPIC_API_KEY -AWS_ACCESS_KEY_ID -AWS_BEARER_TOKEN_BEDROCK -AWS_PROFILE -AWS_REGION -AWS_ROLE_ARN -AWS_SECRET_ACCESS_KEY -AWS_WEB_IDENTITY_TOKEN_FILE -AZURE_COGNITIVE_SERVICES_RESOURCE_NAME -AZURE_RESOURCE_NAME -CI_PROJECT_DIR -CI_SERVER_FQDN -CI_WORKLOAD_REF -CLOUDFLARE_ACCOUNT_ID -CLOUDFLARE_API_TOKEN -CLOUDFLARE_GATEWAY_ID -CONTEXT7_API_KEY -GITHUB_TOKEN -GITLAB_AI_GATEWAY_URL -GITLAB_HOST -GITLAB_INSTANCE_URL -GITLAB_OAUTH_CLIENT_ID -GITLAB_TOKEN -GITLAB_TOKEN_OPENCODE -GOOGLE_APPLICATION_CREDENTIALS -GOOGLE_CLOUD_PROJECT -HTTP_PROXY -HTTPS_PROXY -K2_ -MY_API_KEY -MY_ENV_VAR -MY_MCP_CLIENT_ID -MY_MCP_CLIENT_SECRET -NO_PROXY -NODE_ENV -NODE_EXTRA_CA_CERTS -NPM_AUTH_TOKEN -OC_ALLOW_WAYLAND -OPENCODE_API_KEY -OPENCODE_AUTH_JSON -OPENCODE_AUTO_SHARE -OPENCODE_CLIENT -OPENCODE_CONFIG -OPENCODE_CONFIG_CONTENT -OPENCODE_CONFIG_DIR -OPENCODE_DISABLE_AUTOCOMPACT -OPENCODE_DISABLE_AUTOUPDATE -OPENCODE_DISABLE_CLAUDE_CODE -OPENCODE_DISABLE_CLAUDE_CODE_PROMPT -OPENCODE_DISABLE_CLAUDE_CODE_SKILLS -OPENCODE_DISABLE_DEFAULT_PLUGINS -OPENCODE_DISABLE_FILETIME_CHECK -OPENCODE_DISABLE_LSP_DOWNLOAD -OPENCODE_DISABLE_MODELS_FETCH -OPENCODE_DISABLE_PRUNE -OPENCODE_DISABLE_TERMINAL_TITLE -OPENCODE_ENABLE_EXA -OPENCODE_ENABLE_EXPERIMENTAL_MODELS -OPENCODE_EXPERIMENTAL -OPENCODE_EXPERIMENTAL_BASH_DEFAULT_TIMEOUT_MS -OPENCODE_EXPERIMENTAL_DISABLE_COPY_ON_SELECT -OPENCODE_EXPERIMENTAL_DISABLE_FILEWATCHER -OPENCODE_EXPERIMENTAL_EXA -OPENCODE_EXPERIMENTAL_FILEWATCHER -OPENCODE_EXPERIMENTAL_ICON_DISCOVERY -OPENCODE_EXPERIMENTAL_LSP_TOOL -OPENCODE_EXPERIMENTAL_LSP_TY -OPENCODE_EXPERIMENTAL_MARKDOWN -OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX -OPENCODE_EXPERIMENTAL_OXFMT -OPENCODE_EXPERIMENTAL_PLAN_MODE -OPENCODE_ENABLE_QUESTION_TOOL -OPENCODE_FAKE_VCS -OPENCODE_GIT_BASH_PATH -OPENCODE_MODEL -OPENCODE_MODELS_URL -OPENCODE_PERMISSION -OPENCODE_PORT -OPENCODE_SERVER_PASSWORD -OPENCODE_SERVER_USERNAME -PROJECT_ROOT -RESOURCE_NAME -RUST_LOG -VARIABLE_NAME -VERTEX_LOCATION -XDG_CONFIG_HOME -``` - -## Package/module identifiers - -```text -../../../config.mjs -@astrojs/starlight/components -@opencode-ai/plugin -@opencode-ai/sdk -path -shescape -zod - -@ -@ai-sdk/anthropic -@ai-sdk/cerebras -@ai-sdk/google -@ai-sdk/openai -@ai-sdk/openai-compatible -@File#L37-42 -@modelcontextprotocol/server-everything -@opencode -``` - -## GitHub owner/repo slugs referenced in docs - -```text -24601/opencode-zellij-namer -angristan/opencode-wakatime -anomalyco/opencode -apps/opencode-agent -athal7/opencode-devcontainers -awesome-opencode/awesome-opencode -backnotprop/plannotator -ben-vargas/ai-sdk-provider-opencode-sdk -btriapitsyn/openchamber -BurntSushi/ripgrep -Cluster444/agentic -code-yeongyu/oh-my-opencode -darrenhinde/opencode-agents -different-ai/opencode-scheduler -different-ai/openwork -features/copilot -folke/tokyonight.nvim -franlol/opencode-md-table-formatter -ggml-org/llama.cpp -ghoulr/opencode-websearch-cited.git -H2Shami/opencode-helicone-session -hosenur/portal -jamesmurdza/daytona -jenslys/opencode-gemini-auth -JRedeker/opencode-morph-fast-apply -JRedeker/opencode-shell-strategy -kdcokenny/ocx -kdcokenny/opencode-background-agents -kdcokenny/opencode-notify -kdcokenny/opencode-workspace -kdcokenny/opencode-worktree -login/device -mohak34/opencode-notifier -morhetz/gruvbox -mtymek/opencode-obsidian -NeuralNomadsAI/CodeNomad -nick-vi/opencode-type-inject -NickvanDyke/opencode.nvim -NoeFabris/opencode-antigravity-auth -nordtheme/nord -numman-ali/opencode-openai-codex-auth -olimorris/codecompanion.nvim -panta82/opencode-notificator -rebelot/kanagawa.nvim -remorses/kimaki -sainnhe/everforest -shekohex/opencode-google-antigravity-auth -shekohex/opencode-pty.git -spoons-and-mirrors/subtask2 -sudo-tee/opencode.nvim -supermemoryai/opencode-supermemory -Tarquinen/opencode-dynamic-context-pruning -Th3Whit3Wolf/one-nvim -upstash/context7 -vtemian/micode -vtemian/octto -yetone/avante.nvim -zenobi-us/opencode-plugin-template -zenobi-us/opencode-skillful -``` - -## Paths, filenames, globs, and URLs - -```text -./.opencode/themes/*.json -.//storage/ -./config/#custom-directory -./global/storage/ -.agents/skills/*/SKILL.md -.agents/skills//SKILL.md -.clang-format -.claude -.claude/skills -.claude/skills/*/SKILL.md -.claude/skills//SKILL.md -.env -.github/workflows/opencode.yml -.gitignore -.gitlab-ci.yml -.ignore -.NET SDK -.npmrc -.ocamlformat -.opencode -.opencode/ -.opencode/agents/ -.opencode/commands/ -.opencode/commands/test.md -.opencode/modes/ -.opencode/plans/*.md -.opencode/plugins/ -.opencode/skills//SKILL.md -.opencode/skills/git-release/SKILL.md -.opencode/tools/ -.well-known/opencode -{ type: "raw" \| "patch", content: string } -{file:path/to/file} -**/*.js -%USERPROFILE%/intelephense/license.txt -%USERPROFILE%\.cache\opencode -%USERPROFILE%\.config\opencode\opencode.jsonc -%USERPROFILE%\.config\opencode\plugins -%USERPROFILE%\.local\share\opencode -%USERPROFILE%\.local\share\opencode\log -/.opencode/themes/*.json -/ -/.opencode/plugins/ -~ -~/... -~/.agents/skills/*/SKILL.md -~/.agents/skills//SKILL.md -~/.aws/credentials -~/.bashrc -~/.cache/opencode -~/.cache/opencode/node_modules/ -~/.claude/CLAUDE.md -~/.claude/skills/ -~/.claude/skills/*/SKILL.md -~/.claude/skills//SKILL.md -~/.config/opencode -~/.config/opencode/AGENTS.md -~/.config/opencode/agents/ -~/.config/opencode/commands/ -~/.config/opencode/modes/ -~/.config/opencode/opencode.json -~/.config/opencode/opencode.jsonc -~/.config/opencode/plugins/ -~/.config/opencode/skills/*/SKILL.md -~/.config/opencode/skills//SKILL.md -~/.config/opencode/themes/*.json -~/.config/opencode/tools/ -~/.config/zed/settings.json -~/.local/share -~/.local/share/opencode/ -~/.local/share/opencode/auth.json -~/.local/share/opencode/log/ -~/.local/share/opencode/mcp-auth.json -~/.local/share/opencode/opencode.jsonc -~/.npmrc -~/.zshrc -~/code/ -~/Library/Application Support -~/projects/* -~/projects/personal/ -${config.github}/blob/dev/packages/sdk/js/src/gen/types.gen.ts -$HOME/intelephense/license.txt -$HOME/projects/* -$XDG_CONFIG_HOME/opencode/themes/*.json -agent/ -agents/ -build/ -commands/ -dist/ -http://:4096 -http://127.0.0.1:8080/callback -http://localhost: -http://localhost:4096 -http://localhost:4096/doc -https://app.example.com -https://AZURE_COGNITIVE_SERVICES_RESOURCE_NAME.cognitiveservices.azure.com/ -https://opencode.ai/zen/v1/chat/completions -https://opencode.ai/zen/v1/messages -https://opencode.ai/zen/v1/models/gemini-3-flash -https://opencode.ai/zen/v1/models/gemini-3-pro -https://opencode.ai/zen/v1/responses -https://RESOURCE_NAME.openai.azure.com/ -laravel/pint -log/ -model: "anthropic/claude-sonnet-4-5" -modes/ -node_modules/ -openai/gpt-4.1 -opencode.ai/config.json -opencode/ -opencode/gpt-5.1-codex -opencode/gpt-5.2-codex -opencode/kimi-k2 -openrouter/google/gemini-2.5-flash -opncd.ai/s/ -packages/*/AGENTS.md -plugins/ -project/ -provider_id/model_id -provider/model -provider/model-id -rm -rf ~/.cache/opencode -skills/ -skills/*/SKILL.md -src/**/*.ts -themes/ -tools/ -``` - -## Keybind strings - -```text -alt+b -Alt+Ctrl+K -alt+d -alt+f -Cmd+Esc -Cmd+Option+K -Cmd+Shift+Esc -Cmd+Shift+G -Cmd+Shift+P -ctrl+a -ctrl+b -ctrl+d -ctrl+e -Ctrl+Esc -ctrl+f -ctrl+g -ctrl+k -Ctrl+Shift+Esc -Ctrl+Shift+P -ctrl+t -ctrl+u -ctrl+w -ctrl+x -DELETE -Shift+Enter -WIN+R -``` - -## Model ID strings referenced - -```text -{env:OPENCODE_MODEL} -anthropic/claude-3-5-sonnet-20241022 -anthropic/claude-haiku-4-20250514 -anthropic/claude-haiku-4-5 -anthropic/claude-sonnet-4-20250514 -anthropic/claude-sonnet-4-5 -gitlab/duo-chat-haiku-4-5 -lmstudio/google/gemma-3n-e4b -openai/gpt-4.1 -openai/gpt-5 -opencode/gpt-5.1-codex -opencode/gpt-5.2-codex -opencode/kimi-k2 -openrouter/google/gemini-2.5-flash -``` diff --git a/.opencode/agent/triage.md b/.opencode/agent/triage.md deleted file mode 100644 index a77b92737b..0000000000 --- a/.opencode/agent/triage.md +++ /dev/null @@ -1,140 +0,0 @@ ---- -mode: primary -hidden: true -model: opencode/minimax-m2.5 -color: "#44BA81" -tools: - "*": false - "github-triage": true ---- - -You are a triage agent responsible for triaging github issues. - -Use your github-triage tool to triage issues. - -This file is the source of truth for ownership/routing rules. - -## Labels - -### windows - -Use for any issue that mentions Windows (the OS). Be sure they are saying that they are on Windows. - -- Use if they mention WSL too - -#### perf - -Performance-related issues: - -- Slow performance -- High RAM usage -- High CPU usage - -**Only** add if it's likely a RAM or CPU issue. **Do not** add for LLM slowness. - -#### desktop - -Desktop app issues: - -- `opencode web` command -- The desktop app itself - -**Only** add if it's specifically about the Desktop application or `opencode web` view. **Do not** add for terminal, TUI, or general opencode issues. - -#### nix - -**Only** add if the issue explicitly mentions nix. - -If the issue does not mention nix, do not add nix. - -If the issue mentions nix, assign to `rekram1-node`. - -#### zen - -**Only** add if the issue mentions "zen" or "opencode zen" or "opencode black". - -If the issue doesn't have "zen" or "opencode black" in it then don't add zen label - -#### core - -Use for core server issues in `packages/opencode/`, excluding `packages/opencode/src/cli/cmd/tui/`. - -Examples: - -- LSP server behavior -- Harness behavior (agent + tools) -- Feature requests for server behavior -- Agent context construction -- API endpoints -- Provider integration issues -- New, broken, or poor-quality models - -#### acp - -If the issue mentions acp support, assign acp label. - -#### docs - -Add if the issue requests better documentation or docs updates. - -#### opentui - -TUI issues potentially caused by our underlying TUI library: - -- Keybindings not working -- Scroll speed issues (too fast/slow/laggy) -- Screen flickering -- Crashes with opentui in the log - -**Do not** add for general TUI bugs. - -When assigning to people here are the following rules: - -Desktop / Web: -Use for desktop-labeled issues only. - -- adamdotdevin -- iamdavidhill -- Brendonovich -- nexxeln - -Zen: -ONLY assign if the issue will have the "zen" label. - -- fwang -- MrMushrooooom - -TUI (`packages/opencode/src/cli/cmd/tui/...`): - -- thdxr for TUI UX/UI product decisions and interaction flow -- kommander for OpenTUI engine issues: rendering artifacts, keybind handling, terminal compatibility, SSH behavior, and low-level perf bottlenecks -- rekram1-node for TUI bugs that are not clearly OpenTUI engine issues - -Core (`packages/opencode/...`, excluding TUI subtree): - -- thdxr for sqlite/snapshot/memory bugs and larger architectural core features -- jlongster for opencode server + API feature work (tool currently remaps jlongster -> thdxr until assignable) -- rekram1-node for harness issues, provider issues, and other bug-squashing - -For core bugs that do not clearly map, either thdxr or rekram1-node is acceptable. - -Docs: - -- R44VC0RP - -Windows: - -- Hona (assign any issue that mentions Windows or is likely Windows-specific) - -Determinism rules: - -- If title + body does not contain "zen", do not add the "zen" label -- If "nix" label is added but title + body does not mention nix/nixos, the tool will drop "nix" -- If title + body mentions nix/nixos, assign to `rekram1-node` -- If "desktop" label is added, the tool will override assignee and randomly pick one Desktop / Web owner - -In all other cases, choose the team/section with the most overlap with the issue and assign a member from that team at random. - -ACP: - -- rekram1-node (assign any acp issues to rekram1-node) diff --git a/.opencode/command/ai-deps.md b/.opencode/command/ai-deps.md deleted file mode 100644 index 83783d5b9b..0000000000 --- a/.opencode/command/ai-deps.md +++ /dev/null @@ -1,24 +0,0 @@ ---- -description: "Bump AI sdk dependencies minor / patch versions only" ---- - -Please read @package.json and @packages/opencode/package.json. - -Your job is to look into AI SDK dependencies, figure out if they have versions that can be upgraded (minor or patch versions ONLY no major ignore major changes). - -I want a report of every dependency and the version that can be upgraded to. -What would be even better is if you can give me brief summary of the changes for each dep and a link to the changelog for each dependency, or at least some reference info so I can see what bugs were fixed or new features were added. - -Consider using subagents for each dep to save your context window. - -Here is a short list of some deps (please be comprehensive tho): - -- "ai" -- "@ai-sdk/openai" -- "@ai-sdk/anthropic" -- "@openrouter/ai-sdk-provider" -- etc, etc - -DO NOT upgrade the dependencies yet, just make a list of all dependencies and their versions that can be upgraded to minor or patch versions only. - -Write up your findings to ai-sdk-updates.md diff --git a/.opencode/command/issues.md b/.opencode/command/issues.md index 75b5961674..7422dbe791 100644 --- a/.opencode/command/issues.md +++ b/.opencode/command/issues.md @@ -3,7 +3,7 @@ description: "find issue(s) on github" model: opencode/claude-haiku-4-5 --- -Search through existing issues in anomalyco/opencode using the gh cli to find issues matching this query: +Search through existing issues in AltimateAI/altimate-code using the gh cli to find issues matching this query: $ARGUMENTS diff --git a/.opencode/command/rmslop.md b/.opencode/command/rmslop.md deleted file mode 100644 index 02c9fc0844..0000000000 --- a/.opencode/command/rmslop.md +++ /dev/null @@ -1,15 +0,0 @@ ---- -description: Remove AI code slop ---- - -Check the diff against dev, and remove all AI generated slop introduced in this branch. - -This includes: - -- Extra comments that a human wouldn't add or is inconsistent with the rest of the file -- Extra defensive checks or try/catch blocks that are abnormal for that area of the codebase (especially if called by trusted / validated codepaths) -- Casts to any to get around type issues -- Any other style that is inconsistent with the file -- Unnecessary emoji usage - -Report at the end with only a 1-3 sentence summary of what you changed diff --git a/.opencode/command/spellcheck.md b/.opencode/command/spellcheck.md deleted file mode 100644 index 0abf23c4fd..0000000000 --- a/.opencode/command/spellcheck.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -description: spellcheck all markdown file changes ---- - -Look at all the unstaged changes to markdown (.md, .mdx) files, pull out the lines that have changed, and check for spelling and grammar errors. diff --git a/.opencode/env.d.ts b/.opencode/env.d.ts deleted file mode 100644 index f2b13a934c..0000000000 --- a/.opencode/env.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -declare module "*.txt" { - const content: string - export default content -} diff --git a/.opencode/glossary/README.md b/.opencode/glossary/README.md deleted file mode 100644 index 983900381c..0000000000 --- a/.opencode/glossary/README.md +++ /dev/null @@ -1,63 +0,0 @@ -# Locale Glossaries - -Use this folder for locale-specific translation guidance that supplements `.opencode/agent/translator.md`. - -The global glossary in `translator.md` remains the source of truth for shared do-not-translate terms (commands, code, paths, product names, etc.). These locale files capture community learnings about phrasing and terminology preferences. - -## File Naming - -- One file per locale -- Use lowercase locale slugs that match docs locales when possible (for example, `zh-cn.md`, `zh-tw.md`) -- If only language-level guidance exists, use the language code (for example, `fr.md`) -- Some repo locale slugs may be aliases/non-BCP47 for consistency (for example, `br` for Brazilian Portuguese / `pt-BR`) - -## What To Put In A Locale File - -- **Sources**: PRs/issues/discussions that motivated the guidance -- **Do Not Translate (Locale Additions)**: locale-specific terms or casing decisions -- **Preferred Terms**: recurring UI/docs words with preferred translations -- **Guidance**: tone, style, and consistency notes -- **Avoid** (optional): common literal translations or wording we should avoid -- If the repo uses a locale alias slug, document the alias in **Guidance** (for example, prose may mention `pt-BR` while config/examples use `br`) - -Prefer guidance that is: - -- Repeated across multiple docs/screens -- Easy to apply consistently -- Backed by a community contribution or review discussion - -## Template - -```md -# Glossary - -## Sources - -- PR #12345: https://github.com/anomalyco/opencode/pull/12345 - -## Do Not Translate (Locale Additions) - -- `OpenCode` (preserve casing) - -## Preferred Terms - -| English | Preferred | Notes | -| ------- | --------- | --------- | -| prompt | ... | preferred | -| session | ... | preferred | - -## Guidance - -- Prefer natural phrasing over literal translation - -## Avoid - -- Avoid ... when ... -``` - -## Contribution Notes - -- Mark entries as preferred when they may evolve -- Keep examples short -- Add or update the `Sources` section whenever you add a new rule -- Prefer PR-backed guidance over invented term mappings; start with general guidance if no term-level corrections exist yet diff --git a/.opencode/glossary/ar.md b/.opencode/glossary/ar.md deleted file mode 100644 index 37355522a0..0000000000 --- a/.opencode/glossary/ar.md +++ /dev/null @@ -1,28 +0,0 @@ -# ar Glossary - -## Sources - -- PR #9947: https://github.com/anomalyco/opencode/pull/9947 - -## Do Not Translate (Locale Additions) - -- `OpenCode` (preserve casing in prose; keep `opencode` only in commands, package names, paths, or code) -- `OpenCode CLI` -- `CLI`, `TUI`, `MCP`, `OAuth` -- Commands, flags, file paths, and code literals (keep exactly as written) - -## Preferred Terms - -No PR-backed term mappings yet. Add entries here when review PRs introduce repeated wording corrections. - -## Guidance - -- Prefer natural Arabic phrasing over literal translation -- Keep tone clear and direct in UI labels and docs prose -- Preserve technical artifacts exactly: commands, flags, code, URLs, model IDs, and file paths -- For RTL text, treat code, commands, and paths as LTR artifacts and keep their character order unchanged - -## Avoid - -- Avoid translating product and protocol names that are fixed identifiers -- Avoid mixing multiple Arabic terms for the same recurring UI action once a preferred term is established diff --git a/.opencode/glossary/br.md b/.opencode/glossary/br.md deleted file mode 100644 index fd3e7251cd..0000000000 --- a/.opencode/glossary/br.md +++ /dev/null @@ -1,34 +0,0 @@ -# br Glossary - -## Sources - -- PR #10086: https://github.com/anomalyco/opencode/pull/10086 - -## Do Not Translate (Locale Additions) - -- `OpenCode` (preserve casing in prose; keep `opencode` only in commands, package names, paths, or code) -- `OpenCode CLI` -- `CLI`, `TUI`, `MCP`, `OAuth` -- Locale code `br` in repo config, code, and paths (repo alias for Brazilian Portuguese) - -## Preferred Terms - -These are PR-backed locale naming preferences and may evolve. - -| English / Context | Preferred | Notes | -| ---------------------------------------- | ------------------------------ | ------------------------------------------------------------- | -| Brazilian Portuguese (prose locale name) | `pt-BR` | Use standard locale naming in prose when helpful | -| Repo locale slug (code/config) | `br` | PR #10086 uses `br` for consistency/simplicity | -| Browser locale detection | `pt`, `pt-br`, `pt-BR` -> `br` | Preserve this mapping in docs/examples about locale detection | - -## Guidance - -- This file covers Brazilian Portuguese (`pt-BR`), but the repo locale code is `br` -- Use natural Brazilian Portuguese phrasing over literal translation -- Preserve technical artifacts exactly: commands, flags, code, URLs, model IDs, and file paths -- Keep repo locale identifiers as implemented in code/config (`br`) even when prose mentions `pt-BR` - -## Avoid - -- Avoid changing repo locale code references from `br` to `pt-br` in code snippets, paths, or config examples -- Avoid mixing Portuguese variants when a Brazilian Portuguese form is established diff --git a/.opencode/glossary/bs.md b/.opencode/glossary/bs.md deleted file mode 100644 index aa3bd96f6f..0000000000 --- a/.opencode/glossary/bs.md +++ /dev/null @@ -1,33 +0,0 @@ -# bs Glossary - -## Sources - -- PR #12283: https://github.com/anomalyco/opencode/pull/12283 - -## Do Not Translate (Locale Additions) - -- `OpenCode` (preserve casing in prose; keep `opencode` only in commands, package names, paths, or code) -- `OpenCode CLI` -- `CLI`, `TUI`, `MCP`, `OAuth` -- Commands, flags, file paths, and code literals (keep exactly as written) - -## Preferred Terms - -These are PR-backed locale naming preferences and may evolve. - -| English / Context | Preferred | Notes | -| ---------------------------------- | ---------- | ------------------------------------------------- | -| Bosnian language label (UI) | `Bosanski` | PR #12283 tested switching language to `Bosanski` | -| Repo locale slug (code/config) | `bs` | Preserve in code, config, paths, and examples | -| Browser locale detection (Bosnian) | `bs` | PR #12283 added `bs` locale auto-detection | - -## Guidance - -- Use natural Bosnian phrasing over literal translation -- Preserve technical artifacts exactly: commands, flags, code, URLs, model IDs, and file paths -- Keep repo locale references as `bs` in code/config, and use `Bosanski` for the user-facing language name when applicable - -## Avoid - -- Avoid changing repo locale references from `bs` to another slug in code snippets or config examples -- Avoid translating product and protocol names that are fixed identifiers diff --git a/.opencode/glossary/da.md b/.opencode/glossary/da.md deleted file mode 100644 index e632221701..0000000000 --- a/.opencode/glossary/da.md +++ /dev/null @@ -1,27 +0,0 @@ -# da Glossary - -## Sources - -- PR #9821: https://github.com/anomalyco/opencode/pull/9821 - -## Do Not Translate (Locale Additions) - -- `OpenCode` (preserve casing in prose; keep `opencode` only in commands, package names, paths, or code) -- `OpenCode CLI` -- `CLI`, `TUI`, `MCP`, `OAuth` -- Commands, flags, file paths, and code literals (keep exactly as written) - -## Preferred Terms - -No PR-backed term mappings yet. Add entries here when review PRs introduce repeated wording corrections. - -## Guidance - -- Prefer natural Danish phrasing over literal translation -- Keep tone clear and direct in UI labels and docs prose -- Preserve technical artifacts exactly: commands, flags, code, URLs, model IDs, and file paths - -## Avoid - -- Avoid translating product and protocol names that are fixed identifiers -- Avoid mixing multiple Danish terms for the same recurring UI action once a preferred term is established diff --git a/.opencode/glossary/de.md b/.opencode/glossary/de.md deleted file mode 100644 index 0d2c49face..0000000000 --- a/.opencode/glossary/de.md +++ /dev/null @@ -1,27 +0,0 @@ -# de Glossary - -## Sources - -- PR #9817: https://github.com/anomalyco/opencode/pull/9817 - -## Do Not Translate (Locale Additions) - -- `OpenCode` (preserve casing in prose; keep `opencode` only in commands, package names, paths, or code) -- `OpenCode CLI` -- `CLI`, `TUI`, `MCP`, `OAuth` -- Commands, flags, file paths, and code literals (keep exactly as written) - -## Preferred Terms - -No PR-backed term mappings yet. Add entries here when review PRs introduce repeated wording corrections. - -## Guidance - -- Prefer natural German phrasing over literal translation -- Keep tone clear and direct in UI labels and docs prose -- Preserve technical artifacts exactly: commands, flags, code, URLs, model IDs, and file paths - -## Avoid - -- Avoid translating product and protocol names that are fixed identifiers -- Avoid mixing multiple German terms for the same recurring UI action once a preferred term is established diff --git a/.opencode/glossary/es.md b/.opencode/glossary/es.md deleted file mode 100644 index dc9b977ecf..0000000000 --- a/.opencode/glossary/es.md +++ /dev/null @@ -1,27 +0,0 @@ -# es Glossary - -## Sources - -- PR #9817: https://github.com/anomalyco/opencode/pull/9817 - -## Do Not Translate (Locale Additions) - -- `OpenCode` (preserve casing in prose; keep `opencode` only in commands, package names, paths, or code) -- `OpenCode CLI` -- `CLI`, `TUI`, `MCP`, `OAuth` -- Commands, flags, file paths, and code literals (keep exactly as written) - -## Preferred Terms - -No PR-backed term mappings yet. Add entries here when review PRs introduce repeated wording corrections. - -## Guidance - -- Prefer natural Spanish phrasing over literal translation -- Keep tone clear and direct in UI labels and docs prose -- Preserve technical artifacts exactly: commands, flags, code, URLs, model IDs, and file paths - -## Avoid - -- Avoid translating product and protocol names that are fixed identifiers -- Avoid mixing multiple Spanish terms for the same recurring UI action once a preferred term is established diff --git a/.opencode/glossary/fr.md b/.opencode/glossary/fr.md deleted file mode 100644 index 074c4de110..0000000000 --- a/.opencode/glossary/fr.md +++ /dev/null @@ -1,27 +0,0 @@ -# fr Glossary - -## Sources - -- PR #9821: https://github.com/anomalyco/opencode/pull/9821 - -## Do Not Translate (Locale Additions) - -- `OpenCode` (preserve casing in prose; keep `opencode` only in commands, package names, paths, or code) -- `OpenCode CLI` -- `CLI`, `TUI`, `MCP`, `OAuth` -- Commands, flags, file paths, and code literals (keep exactly as written) - -## Preferred Terms - -No PR-backed term mappings yet. Add entries here when review PRs introduce repeated wording corrections. - -## Guidance - -- Prefer natural French phrasing over literal translation -- Keep tone clear and direct in UI labels and docs prose -- Preserve technical artifacts exactly: commands, flags, code, URLs, model IDs, and file paths - -## Avoid - -- Avoid translating product and protocol names that are fixed identifiers -- Avoid mixing multiple French terms for the same recurring UI action once a preferred term is established diff --git a/.opencode/glossary/ja.md b/.opencode/glossary/ja.md deleted file mode 100644 index f0159ca966..0000000000 --- a/.opencode/glossary/ja.md +++ /dev/null @@ -1,33 +0,0 @@ -# ja Glossary - -## Sources - -- PR #9821: https://github.com/anomalyco/opencode/pull/9821 -- PR #13160: https://github.com/anomalyco/opencode/pull/13160 - -## Do Not Translate (Locale Additions) - -- `OpenCode` (preserve casing in prose; keep `opencode` only in commands, package names, paths, or code) -- `OpenCode CLI` -- `CLI`, `TUI`, `MCP`, `OAuth` -- Commands, flags, file paths, and code literals (keep exactly as written) - -## Preferred Terms - -These are PR-backed wording preferences and may evolve. - -| English / Context | Preferred | Notes | -| --------------------------- | ----------------------- | ------------------------------------- | -| WSL integration (UI label) | `WSL連携` | PR #13160 prefers this over `WSL統合` | -| WSL integration description | `WindowsのWSL環境で...` | PR #13160 improved phrasing naturally | - -## Guidance - -- Prefer natural Japanese phrasing over literal translation -- Preserve technical artifacts exactly: commands, flags, code, URLs, model IDs, and file paths -- In WSL integration text, follow PR #13160 wording direction for more natural Japanese phrasing - -## Avoid - -- Avoid `WSL統合` in the WSL integration UI context where `WSL連携` is the reviewed wording -- Avoid translating product and protocol names that are fixed identifiers diff --git a/.opencode/glossary/ko.md b/.opencode/glossary/ko.md deleted file mode 100644 index 71385c8a10..0000000000 --- a/.opencode/glossary/ko.md +++ /dev/null @@ -1,27 +0,0 @@ -# ko Glossary - -## Sources - -- PR #9817: https://github.com/anomalyco/opencode/pull/9817 - -## Do Not Translate (Locale Additions) - -- `OpenCode` (preserve casing in prose; keep `opencode` only in commands, package names, paths, or code) -- `OpenCode CLI` -- `CLI`, `TUI`, `MCP`, `OAuth` -- Commands, flags, file paths, and code literals (keep exactly as written) - -## Preferred Terms - -No PR-backed term mappings yet. Add entries here when review PRs introduce repeated wording corrections. - -## Guidance - -- Prefer natural Korean phrasing over literal translation -- Keep tone clear and direct in UI labels and docs prose -- Preserve technical artifacts exactly: commands, flags, code, URLs, model IDs, and file paths - -## Avoid - -- Avoid translating product and protocol names that are fixed identifiers -- Avoid mixing multiple Korean terms for the same recurring UI action once a preferred term is established diff --git a/.opencode/glossary/no.md b/.opencode/glossary/no.md deleted file mode 100644 index d7159dca41..0000000000 --- a/.opencode/glossary/no.md +++ /dev/null @@ -1,38 +0,0 @@ -# no Glossary - -## Sources - -- PR #10018: https://github.com/anomalyco/opencode/pull/10018 -- PR #12935: https://github.com/anomalyco/opencode/pull/12935 - -## Do Not Translate (Locale Additions) - -- `OpenCode` (preserve casing in prose; keep `opencode` only in commands, package names, paths, or code) -- `OpenCode CLI` -- `CLI`, `TUI`, `MCP`, `OAuth` -- Sound names (PR #10018 notes these were intentionally left untranslated) - -## Preferred Terms - -These are PR-backed corrections and may evolve. - -| English / Context | Preferred | Notes | -| ----------------------------------- | ------------ | ----------------------------- | -| Save (data persistence action) | `Lagre` | Prefer over `Spare` | -| Disabled (feature/state) | `deaktivert` | Prefer over `funksjonshemmet` | -| API keys | `API Nøkler` | Prefer over `API Taster` | -| Cost (noun) | `Kostnad` | Prefer over verb form `Koste` | -| Show/View (imperative button label) | `Vis` | Prefer over `Utsikt` | - -## Guidance - -- Prefer natural Norwegian Bokmal (Bokmål) wording over literal translation -- Keep tone clear and practical in UI labels -- Preserve technical artifacts exactly: commands, flags, code, URLs, model IDs, and file paths -- Keep recurring UI terms consistent once a preferred term is chosen - -## Avoid - -- Avoid `Spare` for save actions in persistence contexts -- Avoid `funksjonshemmet` for disabled feature states -- Avoid `API Taster`, `Koste`, and `Utsikt` in the corrected contexts above diff --git a/.opencode/glossary/pl.md b/.opencode/glossary/pl.md deleted file mode 100644 index e9bad7a515..0000000000 --- a/.opencode/glossary/pl.md +++ /dev/null @@ -1,27 +0,0 @@ -# pl Glossary - -## Sources - -- PR #9884: https://github.com/anomalyco/opencode/pull/9884 - -## Do Not Translate (Locale Additions) - -- `OpenCode` (preserve casing in prose; keep `opencode` only in commands, package names, paths, or code) -- `OpenCode CLI` -- `CLI`, `TUI`, `MCP`, `OAuth` -- Commands, flags, file paths, and code literals (keep exactly as written) - -## Preferred Terms - -No PR-backed term mappings yet. Add entries here when review PRs introduce repeated wording corrections. - -## Guidance - -- Prefer natural Polish phrasing over literal translation -- Keep tone clear and direct in UI labels and docs prose -- Preserve technical artifacts exactly: commands, flags, code, URLs, model IDs, and file paths - -## Avoid - -- Avoid translating product and protocol names that are fixed identifiers -- Avoid mixing multiple Polish terms for the same recurring UI action once a preferred term is established diff --git a/.opencode/glossary/ru.md b/.opencode/glossary/ru.md deleted file mode 100644 index 6fee0f94c0..0000000000 --- a/.opencode/glossary/ru.md +++ /dev/null @@ -1,27 +0,0 @@ -# ru Glossary - -## Sources - -- PR #9882: https://github.com/anomalyco/opencode/pull/9882 - -## Do Not Translate (Locale Additions) - -- `OpenCode` (preserve casing in prose; keep `opencode` only in commands, package names, paths, or code) -- `OpenCode CLI` -- `CLI`, `TUI`, `MCP`, `OAuth` -- Commands, flags, file paths, and code literals (keep exactly as written) - -## Preferred Terms - -No PR-backed term mappings yet. Add entries here when review PRs introduce repeated wording corrections. - -## Guidance - -- Prefer natural Russian phrasing over literal translation -- Keep tone clear and direct in UI labels and docs prose -- Preserve technical artifacts exactly: commands, flags, code, URLs, model IDs, and file paths - -## Avoid - -- Avoid translating product and protocol names that are fixed identifiers -- Avoid mixing multiple Russian terms for the same recurring UI action once a preferred term is established diff --git a/.opencode/glossary/th.md b/.opencode/glossary/th.md deleted file mode 100644 index 7b5a31d16b..0000000000 --- a/.opencode/glossary/th.md +++ /dev/null @@ -1,34 +0,0 @@ -# th Glossary - -## Sources - -- PR #10809: https://github.com/anomalyco/opencode/pull/10809 -- PR #11496: https://github.com/anomalyco/opencode/pull/11496 - -## Do Not Translate (Locale Additions) - -- `OpenCode` (preserve casing in prose; keep `opencode` only in commands, package names, paths, or code) -- `OpenCode CLI` -- `CLI`, `TUI`, `MCP`, `OAuth` -- Commands, flags, file paths, and code literals (keep exactly as written) - -## Preferred Terms - -These are PR-backed preferences and may evolve. - -| English / Context | Preferred | Notes | -| ------------------------------------- | --------------------- | -------------------------------------------------------------------------------- | -| Thai language label in language lists | `ไทย` | PR #10809 standardized this across locales | -| Language names in language pickers | Native names (static) | PR #11496: keep names like `English`, `Deutsch`, `ไทย` consistent across locales | - -## Guidance - -- Prefer natural Thai phrasing over literal translation -- Keep tone short and clear for buttons and labels -- Preserve technical artifacts exactly: commands, flags, code, URLs, model IDs, and file paths -- Keep language names static/native in language pickers instead of translating them per current locale (PR #11496) - -## Avoid - -- Avoid translating language names differently per current locale in language lists -- Avoid changing `ไทย` to another display form for the Thai language option unless the product standard changes diff --git a/.opencode/glossary/tr.md b/.opencode/glossary/tr.md deleted file mode 100644 index 72b1cdfb40..0000000000 --- a/.opencode/glossary/tr.md +++ /dev/null @@ -1,38 +0,0 @@ -# tr Glossary - -## Sources - -- PR #15835: https://github.com/anomalyco/opencode/pull/15835 - -## Do Not Translate (Locale Additions) - -- `OpenCode` (preserve casing in prose, docs, and UI copy) -- Keep lowercase `opencode` in commands, package names, paths, URLs, and other exact identifiers -- `` stays the literal key token in code blocks; use `Tab` for the nearby explanatory label in prose -- Commands, flags, file paths, and code literals (keep exactly as written) - -## Preferred Terms - -These are PR-backed wording preferences and may evolve. - -| English / Context | Preferred | Notes | -| ------------------------- | --------------------------------------- | ------------------------------------------------------------- | -| available in beta | `beta olarak mevcut` | Prefer this over `beta olarak kullanılabilir` | -| privacy-first | `Gizlilik öncelikli tasarlandı` | Prefer this over `Önce gizlilik için tasarlandı` | -| connect your local models | `yerel modellerinizi bağlayabilirsiniz` | Use the fuller, more direct action phrase | -| `` key label | `Tab` | Use `Tab` in prose; keep `` in literal UI or code blocks | -| cross-platform | `cross-platform (tüm platformlarda)` | Keep the English term, add a short clarification when helpful | - -## Guidance - -- Prefer natural Turkish phrasing over literal translation -- Merge broken sentence fragments into one clear sentence when the source is a single thought -- Keep product naming consistent: `OpenCode` in prose, `opencode` only for exact technical identifiers -- When an English technical term is intentionally kept, add a short Turkish clarification only if it improves readability - -## Avoid - -- Avoid `beta olarak kullanılabilir` when `beta olarak mevcut` fits -- Avoid `Önce gizlilik için tasarlandı`; use the more natural reviewed wording instead -- Avoid `Sekme` for the translated key label in prose when referring to `` -- Avoid changing `opencode` to `OpenCode` inside commands, URLs, package names, or code literals diff --git a/.opencode/glossary/zh-cn.md b/.opencode/glossary/zh-cn.md deleted file mode 100644 index 054e94b7e8..0000000000 --- a/.opencode/glossary/zh-cn.md +++ /dev/null @@ -1,42 +0,0 @@ -# zh-cn Glossary - -## Sources - -- PR #13942: https://github.com/anomalyco/opencode/pull/13942 - -## Do Not Translate (Locale Additions) - -- `OpenCode` (preserve casing in prose; keep `opencode` only when it is part of commands, package names, paths, or code) -- `OpenCode Zen` -- `OpenCode CLI` -- `CLI`, `TUI`, `MCP`, `OAuth` -- `Model Context Protocol` (prefer the English expansion when introducing `MCP`) - -## Preferred Terms - -These are preferred terms for docs/UI prose and may evolve. - -| English | Preferred | Notes | -| ----------------------- | --------- | ------------------------------------------- | -| prompt | 提示词 | Keep `--prompt` unchanged in flags/code | -| session | 会话 | | -| provider | 提供商 | | -| share link / shared URL | 分享链接 | Prefer `分享` for user-facing share actions | -| headless (server) | 无界面 | Docs wording | -| authentication | 认证 | Prefer in auth/OAuth contexts | -| cache | 缓存 | | -| keybind / shortcut | 快捷键 | User-facing docs wording | -| workflow | 工作流 | e.g. GitHub Actions workflow | - -## Guidance - -- Prefer natural, concise phrasing over literal translation -- Keep the tone direct and friendly (PR #13942 consistently moved wording in this direction) -- Preserve technical artifacts exactly: commands, flags, code, inline code, URLs, file paths, model IDs -- Keep enum-like values in English when they are literals (for example, `default`, `json`) -- Prefer consistent terminology across pages once a term is chosen (`会话`, `提供商`, `提示词`, etc.) - -## Avoid - -- Avoid `opencode` in prose when referring to the product name; use `OpenCode` -- Avoid mixing alternative terms for the same concept across docs when a preferred term is already established diff --git a/.opencode/glossary/zh-tw.md b/.opencode/glossary/zh-tw.md deleted file mode 100644 index 283660e121..0000000000 --- a/.opencode/glossary/zh-tw.md +++ /dev/null @@ -1,42 +0,0 @@ -# zh-tw Glossary - -## Sources - -- PR #13942: https://github.com/anomalyco/opencode/pull/13942 - -## Do Not Translate (Locale Additions) - -- `OpenCode` (preserve casing in prose; keep `opencode` only when it is part of commands, package names, paths, or code) -- `OpenCode Zen` -- `OpenCode CLI` -- `CLI`, `TUI`, `MCP`, `OAuth` -- `Model Context Protocol` (prefer the English expansion when introducing `MCP`) - -## Preferred Terms - -These are preferred terms for docs/UI prose and may evolve. - -| English | Preferred | Notes | -| ----------------------- | --------- | ------------------------------------------- | -| prompt | 提示詞 | Keep `--prompt` unchanged in flags/code | -| session | 工作階段 | | -| provider | 供應商 | | -| share link / shared URL | 分享連結 | Prefer `分享` for user-facing share actions | -| headless (server) | 無介面 | Docs wording | -| authentication | 認證 | Prefer in auth/OAuth contexts | -| cache | 快取 | | -| keybind / shortcut | 快捷鍵 | User-facing docs wording | -| workflow | 工作流程 | e.g. GitHub Actions workflow | - -## Guidance - -- Prefer natural, concise phrasing over literal translation -- Keep the tone direct and friendly (PR #13942 consistently moved wording in this direction) -- Preserve technical artifacts exactly: commands, flags, code, inline code, URLs, file paths, model IDs -- Keep enum-like values in English when they are literals (for example, `default`, `json`) -- Prefer consistent terminology across pages once a term is chosen (`工作階段`, `供應商`, `提示詞`, etc.) - -## Avoid - -- Avoid `opencode` in prose when referring to the product name; use `OpenCode` -- Avoid mixing alternative terms for the same concept across docs when a preferred term is already established diff --git a/.opencode/opencode.jsonc b/.opencode/opencode.jsonc index 3497847a67..f9a81f1169 100644 --- a/.opencode/opencode.jsonc +++ b/.opencode/opencode.jsonc @@ -1,10 +1,15 @@ { - "$schema": "https://opencode.ai/config.json", + "$schema": "https://altimate.ai/config.json", "provider": { "opencode": { "options": {}, }, }, + "permission": { + "edit": { + "packages/opencode/migration/*": "deny", + }, + }, "mcp": {}, "tools": { "github-triage": false, diff --git a/.opencode/themes/mytheme.json b/.opencode/themes/mytheme.json deleted file mode 100644 index e444de807c..0000000000 --- a/.opencode/themes/mytheme.json +++ /dev/null @@ -1,223 +0,0 @@ -{ - "$schema": "https://opencode.ai/theme.json", - "defs": { - "nord0": "#2E3440", - "nord1": "#3B4252", - "nord2": "#434C5E", - "nord3": "#4C566A", - "nord4": "#D8DEE9", - "nord5": "#E5E9F0", - "nord6": "#ECEFF4", - "nord7": "#8FBCBB", - "nord8": "#88C0D0", - "nord9": "#81A1C1", - "nord10": "#5E81AC", - "nord11": "#BF616A", - "nord12": "#D08770", - "nord13": "#EBCB8B", - "nord14": "#A3BE8C", - "nord15": "#B48EAD" - }, - "theme": { - "primary": { - "dark": "nord8", - "light": "nord10" - }, - "secondary": { - "dark": "nord9", - "light": "nord9" - }, - "accent": { - "dark": "nord7", - "light": "nord7" - }, - "error": { - "dark": "nord11", - "light": "nord11" - }, - "warning": { - "dark": "nord12", - "light": "nord12" - }, - "success": { - "dark": "nord14", - "light": "nord14" - }, - "info": { - "dark": "nord8", - "light": "nord10" - }, - "text": { - "dark": "nord4", - "light": "nord0" - }, - "textMuted": { - "dark": "nord3", - "light": "nord1" - }, - "background": { - "dark": "nord0", - "light": "nord6" - }, - "backgroundPanel": { - "dark": "nord1", - "light": "nord5" - }, - "backgroundElement": { - "dark": "nord1", - "light": "nord4" - }, - "border": { - "dark": "nord2", - "light": "nord3" - }, - "borderActive": { - "dark": "nord3", - "light": "nord2" - }, - "borderSubtle": { - "dark": "nord2", - "light": "nord3" - }, - "diffAdded": { - "dark": "nord14", - "light": "nord14" - }, - "diffRemoved": { - "dark": "nord11", - "light": "nord11" - }, - "diffContext": { - "dark": "nord3", - "light": "nord3" - }, - "diffHunkHeader": { - "dark": "nord3", - "light": "nord3" - }, - "diffHighlightAdded": { - "dark": "nord14", - "light": "nord14" - }, - "diffHighlightRemoved": { - "dark": "nord11", - "light": "nord11" - }, - "diffAddedBg": { - "dark": "#3B4252", - "light": "#E5E9F0" - }, - "diffRemovedBg": { - "dark": "#3B4252", - "light": "#E5E9F0" - }, - "diffContextBg": { - "dark": "nord1", - "light": "nord5" - }, - "diffLineNumber": { - "dark": "nord2", - "light": "nord4" - }, - "diffAddedLineNumberBg": { - "dark": "#3B4252", - "light": "#E5E9F0" - }, - "diffRemovedLineNumberBg": { - "dark": "#3B4252", - "light": "#E5E9F0" - }, - "markdownText": { - "dark": "nord4", - "light": "nord0" - }, - "markdownHeading": { - "dark": "nord8", - "light": "nord10" - }, - "markdownLink": { - "dark": "nord9", - "light": "nord9" - }, - "markdownLinkText": { - "dark": "nord7", - "light": "nord7" - }, - "markdownCode": { - "dark": "nord14", - "light": "nord14" - }, - "markdownBlockQuote": { - "dark": "nord3", - "light": "nord3" - }, - "markdownEmph": { - "dark": "nord12", - "light": "nord12" - }, - "markdownStrong": { - "dark": "nord13", - "light": "nord13" - }, - "markdownHorizontalRule": { - "dark": "nord3", - "light": "nord3" - }, - "markdownListItem": { - "dark": "nord8", - "light": "nord10" - }, - "markdownListEnumeration": { - "dark": "nord7", - "light": "nord7" - }, - "markdownImage": { - "dark": "nord9", - "light": "nord9" - }, - "markdownImageText": { - "dark": "nord7", - "light": "nord7" - }, - "markdownCodeBlock": { - "dark": "nord4", - "light": "nord0" - }, - "syntaxComment": { - "dark": "nord3", - "light": "nord3" - }, - "syntaxKeyword": { - "dark": "nord9", - "light": "nord9" - }, - "syntaxFunction": { - "dark": "nord8", - "light": "nord8" - }, - "syntaxVariable": { - "dark": "nord7", - "light": "nord7" - }, - "syntaxString": { - "dark": "nord14", - "light": "nord14" - }, - "syntaxNumber": { - "dark": "nord15", - "light": "nord15" - }, - "syntaxType": { - "dark": "nord7", - "light": "nord7" - }, - "syntaxOperator": { - "dark": "nord9", - "light": "nord9" - }, - "syntaxPunctuation": { - "dark": "nord4", - "light": "nord0" - } - } -} diff --git a/.opencode/tool/github-pr-search.ts b/.opencode/tool/github-pr-search.ts deleted file mode 100644 index 587fdfaaf2..0000000000 --- a/.opencode/tool/github-pr-search.ts +++ /dev/null @@ -1,57 +0,0 @@ -/// -import { tool } from "@opencode-ai/plugin" -import DESCRIPTION from "./github-pr-search.txt" - -async function githubFetch(endpoint: string, options: RequestInit = {}) { - const response = await fetch(`https://api.github.com${endpoint}`, { - ...options, - headers: { - Authorization: `Bearer ${process.env.GITHUB_TOKEN}`, - Accept: "application/vnd.github+json", - "Content-Type": "application/json", - ...options.headers, - }, - }) - if (!response.ok) { - throw new Error(`GitHub API error: ${response.status} ${response.statusText}`) - } - return response.json() -} - -interface PR { - title: string - html_url: string -} - -export default tool({ - description: DESCRIPTION, - args: { - query: tool.schema.string().describe("Search query for PR titles and descriptions"), - limit: tool.schema.number().describe("Maximum number of results to return").default(10), - offset: tool.schema.number().describe("Number of results to skip for pagination").default(0), - }, - async execute(args) { - const owner = "anomalyco" - const repo = "opencode" - - const page = Math.floor(args.offset / args.limit) + 1 - const searchQuery = encodeURIComponent(`${args.query} repo:${owner}/${repo} type:pr state:open`) - const result = await githubFetch( - `/search/issues?q=${searchQuery}&per_page=${args.limit}&page=${page}&sort=updated&order=desc`, - ) - - if (result.total_count === 0) { - return `No PRs found matching "${args.query}"` - } - - const prs = result.items as PR[] - - if (prs.length === 0) { - return `No other PRs found matching "${args.query}"` - } - - const formatted = prs.map((pr) => `${pr.title}\n${pr.html_url}`).join("\n\n") - - return `Found ${result.total_count} PRs (showing ${prs.length}):\n\n${formatted}` - }, -}) diff --git a/.opencode/tool/github-pr-search.txt b/.opencode/tool/github-pr-search.txt deleted file mode 100644 index 28d8643f13..0000000000 --- a/.opencode/tool/github-pr-search.txt +++ /dev/null @@ -1,10 +0,0 @@ -Use this tool to search GitHub pull requests by title and description. - -This tool searches PRs in the sst/opencode repository and returns LLM-friendly results including: -- PR number and title -- Author -- State (open/closed/merged) -- Labels -- Description snippet - -Use the query parameter to search for keywords that might appear in PR titles or descriptions. diff --git a/.opencode/tool/github-triage.ts b/.opencode/tool/github-triage.ts deleted file mode 100644 index 8ad0212ad0..0000000000 --- a/.opencode/tool/github-triage.ts +++ /dev/null @@ -1,119 +0,0 @@ -/// -import { tool } from "@opencode-ai/plugin" -import DESCRIPTION from "./github-triage.txt" - -const TEAM = { - desktop: ["adamdotdevin", "iamdavidhill", "Brendonovich", "nexxeln"], - zen: ["fwang", "MrMushrooooom"], - tui: [ - "thdxr", - "kommander", - // "rekram1-node" (on vacation) - ], - core: [ - "thdxr", - // "rekram1-node", (on vacation) - "jlongster", - ], - docs: ["R44VC0RP"], - windows: ["Hona"], -} as const - -const ASSIGNEES = [...new Set(Object.values(TEAM).flat())] - -function pick(items: readonly T[]) { - return items[Math.floor(Math.random() * items.length)]! -} - -function getIssueNumber(): number { - const issue = parseInt(process.env.ISSUE_NUMBER ?? "", 10) - if (!issue) throw new Error("ISSUE_NUMBER env var not set") - return issue -} - -async function githubFetch(endpoint: string, options: RequestInit = {}) { - const response = await fetch(`https://api.github.com${endpoint}`, { - ...options, - headers: { - Authorization: `Bearer ${process.env.GITHUB_TOKEN}`, - Accept: "application/vnd.github+json", - "Content-Type": "application/json", - ...options.headers, - }, - }) - if (!response.ok) { - throw new Error(`GitHub API error: ${response.status} ${response.statusText}`) - } - return response.json() -} - -export default tool({ - description: DESCRIPTION, - args: { - assignee: tool.schema.enum(ASSIGNEES as [string, ...string[]]).describe("The username of the assignee"), - labels: tool.schema - .array(tool.schema.enum(["nix", "opentui", "perf", "web", "desktop", "zen", "docs", "windows", "core"])) - .describe("The labels(s) to add to the issue") - .default([]), - }, - async execute(args) { - const issue = getIssueNumber() - const owner = "anomalyco" - const repo = "opencode" - - const results: string[] = [] - let labels = [...new Set(args.labels.map((x) => (x === "desktop" ? "web" : x)))] - const web = labels.includes("web") - const text = `${process.env.ISSUE_TITLE ?? ""}\n${process.env.ISSUE_BODY ?? ""}`.toLowerCase() - const zen = /\bzen\b/.test(text) || text.includes("opencode black") - const nix = /\bnix(os)?\b/.test(text) - - if (labels.includes("nix") && !nix) { - labels = labels.filter((x) => x !== "nix") - results.push("Dropped label: nix (issue does not mention nix)") - } - - // const assignee = nix ? "rekram1-node" : web ? pick(TEAM.desktop) : args.assignee - const assignee = web ? pick(TEAM.desktop) : args.assignee - - if (labels.includes("zen") && !zen) { - throw new Error("Only add the zen label when issue title/body contains 'zen'") - } - - if (web && !nix && !(TEAM.desktop as readonly string[]).includes(assignee)) { - throw new Error("Web issues must be assigned to adamdotdevin, iamdavidhill, Brendonovich, or nexxeln") - } - - if ((TEAM.zen as readonly string[]).includes(assignee) && !labels.includes("zen")) { - throw new Error("Only zen issues should be assigned to fwang or MrMushrooooom") - } - - if (assignee === "Hona" && !labels.includes("windows")) { - throw new Error("Only windows issues should be assigned to Hona") - } - - if (assignee === "R44VC0RP" && !labels.includes("docs")) { - throw new Error("Only docs issues should be assigned to R44VC0RP") - } - - if (assignee === "kommander" && !labels.includes("opentui")) { - throw new Error("Only opentui issues should be assigned to kommander") - } - - await githubFetch(`/repos/${owner}/${repo}/issues/${issue}/assignees`, { - method: "POST", - body: JSON.stringify({ assignees: [assignee] }), - }) - results.push(`Assigned @${assignee} to issue #${issue}`) - - if (labels.length > 0) { - await githubFetch(`/repos/${owner}/${repo}/issues/${issue}/labels`, { - method: "POST", - body: JSON.stringify({ labels }), - }) - results.push(`Added labels: ${labels.join(", ")}`) - } - - return results.join("\n") - }, -}) diff --git a/.opencode/tool/github-triage.txt b/.opencode/tool/github-triage.txt deleted file mode 100644 index 1a2d69bdb5..0000000000 --- a/.opencode/tool/github-triage.txt +++ /dev/null @@ -1,8 +0,0 @@ -Use this tool to assign and/or label a GitHub issue. - -Choose labels and assignee using the current triage policy and ownership rules. -Pick the most fitting labels for the issue and assign one owner. - -If unsure, choose the team/section with the most overlap with the issue and assign a member from that team at random. - -(Note: rekram1-node is on vacation, do not assign issues to him.) diff --git a/AGENTS.md b/AGENTS.md deleted file mode 100644 index 2158d73af1..0000000000 --- a/AGENTS.md +++ /dev/null @@ -1,124 +0,0 @@ -- To regenerate the JavaScript SDK, run `./packages/sdk/js/script/build.ts`. -- ALWAYS USE PARALLEL TOOLS WHEN APPLICABLE. -- The default branch in this repo is `dev`. -- Local `main` ref may not exist; use `dev` or `origin/dev` for diffs. -- Prefer automation: execute requested actions without confirmation unless blocked by missing info or safety/irreversibility. - -## Style Guide - -### General Principles - -- Keep things in one function unless composable or reusable -- Avoid `try`/`catch` where possible -- Avoid using the `any` type -- Prefer single word variable names where possible -- Use Bun APIs when possible, like `Bun.file()` -- Rely on type inference when possible; avoid explicit type annotations or interfaces unless necessary for exports or clarity -- Prefer functional array methods (flatMap, filter, map) over for loops; use type guards on filter to maintain type inference downstream - -### Naming - -Prefer single word names for variables and functions. Only use multiple words if necessary. - -### Naming Enforcement (Read This) - -THIS RULE IS MANDATORY FOR AGENT WRITTEN CODE. - -- Use single word names by default for new locals, params, and helper functions. -- Multi-word names are allowed only when a single word would be unclear or ambiguous. -- Do not introduce new camelCase compounds when a short single-word alternative is clear. -- Before finishing edits, review touched lines and shorten newly introduced identifiers where possible. -- Good short names to prefer: `pid`, `cfg`, `err`, `opts`, `dir`, `root`, `child`, `state`, `timeout`. -- Examples to avoid unless truly required: `inputPID`, `existingClient`, `connectTimeout`, `workerPath`. - -```ts -// Good -const foo = 1 -function journal(dir: string) {} - -// Bad -const fooBar = 1 -function prepareJournal(dir: string) {} -``` - -Reduce total variable count by inlining when a value is only used once. - -```ts -// Good -const journal = await Bun.file(path.join(dir, "journal.json")).json() - -// Bad -const journalPath = path.join(dir, "journal.json") -const journal = await Bun.file(journalPath).json() -``` - -### Destructuring - -Avoid unnecessary destructuring. Use dot notation to preserve context. - -```ts -// Good -obj.a -obj.b - -// Bad -const { a, b } = obj -``` - -### Variables - -Prefer `const` over `let`. Use ternaries or early returns instead of reassignment. - -```ts -// Good -const foo = condition ? 1 : 2 - -// Bad -let foo -if (condition) foo = 1 -else foo = 2 -``` - -### Control Flow - -Avoid `else` statements. Prefer early returns. - -```ts -// Good -function foo() { - if (condition) return 1 - return 2 -} - -// Bad -function foo() { - if (condition) return 1 - else return 2 -} -``` - -### Schema Definitions (Drizzle) - -Use snake_case for field names so column names don't need to be redefined as strings. - -```ts -// Good -const table = sqliteTable("session", { - id: text().primaryKey(), - project_id: text().notNull(), - created_at: integer().notNull(), -}) - -// Bad -const table = sqliteTable("session", { - id: text("id").primaryKey(), - projectID: text("project_id").notNull(), - createdAt: integer("created_at").notNull(), -}) -``` - -## Testing - -- Avoid mocks as much as possible -- Test actual implementation, do not duplicate logic into tests -- Tests cannot run from repo root (guard: `do-not-run-tests-from-root`); run from package dirs like `packages/opencode`. diff --git a/CHANGELOG.md b/CHANGELOG.md index cabc1247a2..77301c14d6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,7 +13,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Datamate manager — dynamic MCP server management (#99) - Non-interactive mode for `mcp add` command with input validation - `mcp remove` command -- Upstream merge with OpenCode v1.2.20 +- Upstream merge with Altimate Code v1.2.20 ### Fixed @@ -134,7 +134,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Build: resolve @opentui/core parser.worker.js via import.meta.resolve for monorepo hoisting - Build: output binary as `altimate-code` instead of `opencode` -- Publish: update Docker/AUR/Homebrew references from anomalyco/opencode to AltimateAI/altimate-code +- Publish: update Docker/AUR/Homebrew references from AltimateAI/altimate-code to AltimateAI/altimate-code - Publish: make Docker/AUR/Homebrew steps non-fatal - Bin wrapper: look for `@altimateai/altimate-code-*` scoped platform packages - Postinstall: resolve `@altimateai` scoped platform packages diff --git a/FEATURES_RESTRUCTURE.md b/FEATURES_RESTRUCTURE.md index bcc79ced81..3894162d85 100644 --- a/FEATURES_RESTRUCTURE.md +++ b/FEATURES_RESTRUCTURE.md @@ -179,7 +179,7 @@ Custom agent modes added to `packages/opencode/src/agent/agent.ts` via `altimate #### 16. Anthropic OAuth Plugin - **Files**: `packages/opencode/src/altimate/plugin/anthropic.ts` -- **Description**: Custom plugin implementing Anthropic OAuth 2.0 authentication via PKCE flow. Supports two login modes: Claude Pro/Max subscription (claude.ai) and API key creation via console (console.anthropic.com). Handles token refresh, injects required OAuth beta headers, prefixes all tool names with `mcp_` as required by Anthropic's OAuth endpoint, strips the prefix in streaming responses, and sanitizes system prompts (replaces "OpenCode" with "Claude Code"). Also zeroes out model costs for Pro/Max subscribers. +- **Description**: Custom plugin implementing Anthropic OAuth 2.0 authentication via PKCE flow. Supports two login modes: Claude Pro/Max subscription (claude.ai) and API key creation via console (console.anthropic.com). Handles token refresh, injects required OAuth beta headers, prefixes all tool names with `mcp_` as required by Anthropic's OAuth endpoint, strips the prefix in streaming responses, and sanitizes system prompts (replaces "Altimate Code" with "Claude Code"). Also zeroes out model costs for Pro/Max subscribers. - **Category**: Plugin --- diff --git a/bun.lock b/bun.lock index ee060340de..6d3f01c448 100644 --- a/bun.lock +++ b/bun.lock @@ -11,11 +11,8 @@ "typescript": "catalog:", }, "devDependencies": { - "@actions/artifact": "5.0.1", "@tsconfig/bun": "catalog:", - "@types/mime-types": "3.0.1", "@typescript/native-preview": "catalog:", - "glob": "13.0.5", "husky": "9.1.7", "prettier": "3.6.2", "semver": "^7.6.0", @@ -26,8 +23,9 @@ "name": "@altimateai/altimate-code", "version": "1.2.20", "bin": { - "altimate": "./bin/altimate", "altimate-code": "./bin/altimate-code", + "altimate": "./bin/altimate-code", + "opencode": "./bin/opencode", }, "dependencies": { "@actions/core": "1.11.1", @@ -67,8 +65,8 @@ "@opencode-ai/sdk": "workspace:*", "@opencode-ai/util": "workspace:*", "@openrouter/ai-sdk-provider": "1.5.4", - "@opentui/core": "0.1.86", - "@opentui/solid": "0.1.86", + "@opentui/core": "0.1.87", + "@opentui/solid": "0.1.87", "@parcel/watcher": "2.5.1", "@pierre/diffs": "catalog:", "@solid-primitives/event-bus": "1.1.2", @@ -83,7 +81,8 @@ "clipboardy": "4.0.0", "decimal.js": "10.5.0", "diff": "catalog:", - "drizzle-orm": "1.0.0-beta.12-a5629fb", + "drizzle-orm": "1.0.0-beta.16-ea816b6", + "effect": "catalog:", "fuzzysort": "3.1.0", "glob": "13.0.5", "google-auth-library": "10.5.0", @@ -98,6 +97,7 @@ "opentui-spinner": "0.0.6", "partial-json": "0.1.7", "remeda": "catalog:", + "semver": "^7.6.3", "solid-js": "catalog:", "strip-ansi": "7.1.2", "tree-sitter-bash": "0.25.0", @@ -113,6 +113,7 @@ }, "devDependencies": { "@babel/core": "7.28.4", + "@effect/language-service": "0.79.0", "@octokit/webhooks-types": "7.6.1", "@opencode-ai/script": "workspace:*", "@parcel/watcher-darwin-arm64": "2.5.1", @@ -121,18 +122,20 @@ "@parcel/watcher-linux-arm64-musl": "2.5.1", "@parcel/watcher-linux-x64-glibc": "2.5.1", "@parcel/watcher-linux-x64-musl": "2.5.1", + "@parcel/watcher-win32-arm64": "2.5.1", "@parcel/watcher-win32-x64": "2.5.1", "@standard-schema/spec": "1.0.0", "@tsconfig/bun": "catalog:", "@types/babel__core": "7.20.5", "@types/bun": "catalog:", "@types/mime-types": "3.0.1", + "@types/semver": "^7.5.8", "@types/turndown": "5.0.5", "@types/which": "3.0.4", "@types/yargs": "17.0.33", "@typescript/native-preview": "catalog:", - "drizzle-kit": "1.0.0-beta.12-a5629fb", - "drizzle-orm": "1.0.0-beta.12-a5629fb", + "drizzle-kit": "1.0.0-beta.16-ea816b6", + "drizzle-orm": "1.0.0-beta.16-ea816b6", "typescript": "catalog:", "vscode-languageserver-types": "3.17.5", "why-is-node-running": "3.2.2", @@ -155,8 +158,12 @@ }, "packages/script": { "name": "@opencode-ai/script", + "dependencies": { + "semver": "^7.6.3", + }, "devDependencies": { "@types/bun": "catalog:", + "@types/semver": "^7.5.8", }, }, "packages/sdk/js": { @@ -196,10 +203,18 @@ "@types/node": "catalog:", }, "catalog": { + "@cloudflare/workers-types": "4.20251008.0", "@hono/zod-validator": "0.4.2", + "@kobalte/core": "0.13.11", "@octokit/rest": "22.0.0", "@openauthjs/openauth": "0.0.0-20250322224806", "@pierre/diffs": "1.1.0-beta.18", + "@playwright/test": "1.51.0", + "@solid-primitives/storage": "4.3.3", + "@solidjs/meta": "0.29.4", + "@solidjs/router": "0.15.4", + "@solidjs/start": "https://pkg.pr.new/@solidjs/start@dfb2020", + "@tailwindcss/vite": "4.1.11", "@tsconfig/bun": "1.0.9", "@tsconfig/node22": "22.0.2", "@types/bun": "1.3.9", @@ -209,8 +224,10 @@ "@typescript/native-preview": "7.0.0-dev.20251207.1", "ai": "5.0.124", "diff": "8.0.2", - "drizzle-kit": "1.0.0-beta.12-a5629fb", - "drizzle-orm": "1.0.0-beta.12-a5629fb", + "dompurify": "3.3.1", + "drizzle-kit": "1.0.0-beta.16-ea816b6", + "drizzle-orm": "1.0.0-beta.16-ea816b6", + "effect": "4.0.0-beta.31", "fuzzysort": "3.1.0", "hono": "4.10.7", "hono-openapi": "1.1.2", @@ -220,20 +237,23 @@ "remeda": "2.26.0", "shiki": "3.20.0", "solid-js": "1.9.10", + "solid-list": "0.3.0", + "tailwindcss": "4.1.11", "typescript": "5.8.2", "ulid": "3.0.1", + "virtua": "0.42.3", + "vite": "7.1.4", + "vite-plugin-solid": "2.11.10", "zod": "4.1.8", }, "packages": { - "@actions/artifact": ["@actions/artifact@5.0.1", "", { "dependencies": { "@actions/core": "^2.0.0", "@actions/github": "^6.0.1", "@actions/http-client": "^3.0.0", "@azure/storage-blob": "^12.29.1", "@octokit/core": "^5.2.1", "@octokit/plugin-request-log": "^1.0.4", "@octokit/plugin-retry": "^3.0.9", "@octokit/request": "^8.4.1", "@octokit/request-error": "^5.1.1", "@protobuf-ts/plugin": "^2.2.3-alpha.1", "archiver": "^7.0.1", "jwt-decode": "^3.1.2", "unzip-stream": "^0.3.1" } }, "sha512-dHJ5rHduhCKUikKTT9eXeWoUvfKia3IjR1sO/VTAV3DVAL4yMTRnl2iO5mcfiBjySHLwPNezwENAVskKYU5ymw=="], - "@actions/core": ["@actions/core@1.11.1", "", { "dependencies": { "@actions/exec": "^1.1.1", "@actions/http-client": "^2.0.1" } }, "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A=="], "@actions/exec": ["@actions/exec@1.1.1", "", { "dependencies": { "@actions/io": "^1.0.1" } }, "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w=="], "@actions/github": ["@actions/github@6.0.1", "", { "dependencies": { "@actions/http-client": "^2.2.0", "@octokit/core": "^5.0.1", "@octokit/plugin-paginate-rest": "^9.2.2", "@octokit/plugin-rest-endpoint-methods": "^10.4.0", "@octokit/request": "^8.4.1", "@octokit/request-error": "^5.1.1", "undici": "^5.28.5" } }, "sha512-xbZVcaqD4XnQAe35qSQqskb3SqIAfRyLBrHMd/8TuL7hJSz2QtbDwnNM8zWx4zO5l2fnGtseNE3MbEvD7BxVMw=="], - "@actions/http-client": ["@actions/http-client@3.0.2", "", { "dependencies": { "tunnel": "^0.0.6", "undici": "^6.23.0" } }, "sha512-JP38FYYpyqvUsz+Igqlc/JG6YO9PaKuvqjM3iGvaLqFnJ7TFmcLyy2IDrY0bI0qCQug8E9K+elv5ZNfw62ZJzA=="], + "@actions/http-client": ["@actions/http-client@2.2.3", "", { "dependencies": { "tunnel": "^0.0.6", "undici": "^5.25.4" } }, "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA=="], "@actions/io": ["@actions/io@1.1.3", "", {}, "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="], @@ -377,8 +397,6 @@ "@azure/core-util": ["@azure/core-util@1.13.1", "", { "dependencies": { "@azure/abort-controller": "^2.1.2", "@typespec/ts-http-runtime": "^0.3.0", "tslib": "^2.6.2" } }, "sha512-XPArKLzsvl0Hf0CaGyKHUyVgF7oDnhKoP85Xv6M4StF/1AhfORhZudHtOyf2s+FcbuQ9dPRAjB8J2KvRRMUK2A=="], - "@azure/core-xml": ["@azure/core-xml@1.5.0", "", { "dependencies": { "fast-xml-parser": "^5.0.7", "tslib": "^2.8.1" } }, "sha512-D/sdlJBMJfx7gqoj66PKVmhDDaU6TKA49ptcolxdas29X7AfvLTmfAGLjAcIMBK7UZ2o4lygHIqVckOlQU3xWw=="], - "@azure/identity": ["@azure/identity@4.13.0", "", { "dependencies": { "@azure/abort-controller": "^2.0.0", "@azure/core-auth": "^1.9.0", "@azure/core-client": "^1.9.2", "@azure/core-rest-pipeline": "^1.17.0", "@azure/core-tracing": "^1.0.0", "@azure/core-util": "^1.11.0", "@azure/logger": "^1.0.0", "@azure/msal-browser": "^4.2.0", "@azure/msal-node": "^3.5.0", "open": "^10.1.0", "tslib": "^2.2.0" } }, "sha512-uWC0fssc+hs1TGGVkkghiaFkkS7NkTxfnCH+Hdg+yTehTpMcehpok4PgUKKdyCH+9ldu6FhiHRv84Ntqj1vVcw=="], "@azure/keyvault-common": ["@azure/keyvault-common@2.0.0", "", { "dependencies": { "@azure/abort-controller": "^2.0.0", "@azure/core-auth": "^1.3.0", "@azure/core-client": "^1.5.0", "@azure/core-rest-pipeline": "^1.8.0", "@azure/core-tracing": "^1.0.0", "@azure/core-util": "^1.10.0", "@azure/logger": "^1.1.4", "tslib": "^2.2.0" } }, "sha512-wRLVaroQtOqfg60cxkzUkGKrKMsCP6uYXAOomOIysSMyt1/YM0eUn9LqieAWM8DLcU4+07Fio2YGpPeqUbpP9w=="], @@ -393,10 +411,6 @@ "@azure/msal-node": ["@azure/msal-node@3.8.7", "", { "dependencies": { "@azure/msal-common": "15.14.2", "jsonwebtoken": "^9.0.0", "uuid": "^8.3.0" } }, "sha512-a+Xnrae+uwLnlw68bplS1X4kuJ9F/7K6afuMFyRkNIskhjgDezl5Fhrx+1pmAlDmC0VaaAxjRQMp1OmcqVwkIg=="], - "@azure/storage-blob": ["@azure/storage-blob@12.31.0", "", { "dependencies": { "@azure/abort-controller": "^2.1.2", "@azure/core-auth": "^1.9.0", "@azure/core-client": "^1.9.3", "@azure/core-http-compat": "^2.2.0", "@azure/core-lro": "^2.2.0", "@azure/core-paging": "^1.6.2", "@azure/core-rest-pipeline": "^1.19.1", "@azure/core-tracing": "^1.2.0", "@azure/core-util": "^1.11.0", "@azure/core-xml": "^1.4.5", "@azure/logger": "^1.1.4", "@azure/storage-common": "^12.3.0", "events": "^3.0.0", "tslib": "^2.8.1" } }, "sha512-DBgNv10aCSxopt92DkTDD0o9xScXeBqPKGmR50FPZQaEcH4JLQ+GEOGEDv19V5BMkB7kxr+m4h6il/cCDPvmHg=="], - - "@azure/storage-common": ["@azure/storage-common@12.3.0", "", { "dependencies": { "@azure/abort-controller": "^2.1.2", "@azure/core-auth": "^1.9.0", "@azure/core-http-compat": "^2.2.0", "@azure/core-rest-pipeline": "^1.19.1", "@azure/core-tracing": "^1.2.0", "@azure/core-util": "^1.11.0", "@azure/logger": "^1.1.4", "events": "^3.3.0", "tslib": "^2.8.1" } }, "sha512-/OFHhy86aG5Pe8dP5tsp+BuJ25JOAl9yaMU3WZbkeoiFMHFtJ7tu5ili7qEdBXNW9G5lDB19trwyI6V49F/8iQ=="], - "@babel/code-frame": ["@babel/code-frame@7.29.0", "", { "dependencies": { "@babel/helper-validator-identifier": "^7.28.5", "js-tokens": "^4.0.0", "picocolors": "^1.1.1" } }, "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw=="], "@babel/compat-data": ["@babel/compat-data@7.29.0", "", {}, "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg=="], @@ -455,20 +469,16 @@ "@babel/types": ["@babel/types@7.29.0", "", { "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.28.5" } }, "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A=="], - "@bufbuild/protobuf": ["@bufbuild/protobuf@2.11.0", "", {}, "sha512-sBXGT13cpmPR5BMgHE6UEEfEaShh5Ror6rfN3yEK5si7QVrtZg8LEPQb0VVhiLRUslD2yLnXtnRzG035J/mZXQ=="], - - "@bufbuild/protoplugin": ["@bufbuild/protoplugin@2.11.0", "", { "dependencies": { "@bufbuild/protobuf": "2.11.0", "@typescript/vfs": "^1.6.2", "typescript": "5.4.5" } }, "sha512-lyZVNFUHArIOt4W0+dwYBe5GBwbKzbOy8ObaloEqsw9Mmiwv2O48TwddDoHN4itylC+BaEGqFdI1W8WQt2vWJQ=="], - "@clack/core": ["@clack/core@1.0.0-alpha.1", "", { "dependencies": { "picocolors": "^1.0.0", "sisteransi": "^1.0.5" } }, "sha512-rFbCU83JnN7l3W1nfgCqqme4ZZvTTgsiKQ6FM0l+r0P+o2eJpExcocBUWUIwnDzL76Aca9VhUdWmB2MbUv+Qyg=="], "@clack/prompts": ["@clack/prompts@1.0.0-alpha.1", "", { "dependencies": { "@clack/core": "1.0.0-alpha.1", "picocolors": "^1.0.0", "sisteransi": "^1.0.5" } }, "sha512-07MNT0OsxjKOcyVfX8KhXBhJiyUbDP1vuIAcHc+nx5v93MJO23pX3X/k3bWz6T3rpM9dgWPq90i4Jq7gZAyMbw=="], - "@cloudflare/workers-types": ["@cloudflare/workers-types@4.20251008.0", "", {}, "sha512-dZLkO4PbCL0qcCSKzuW7KE4GYe49lI12LCfQ5y9XeSwgYBoAUbwH4gmJ6A0qUIURiTJTkGkRkhVPqpq2XNgYRA=="], - "@dimforge/rapier2d-simd-compat": ["@dimforge/rapier2d-simd-compat@0.17.3", "", {}, "sha512-bijvwWz6NHsNj5e5i1vtd3dU2pDhthSaTUZSh14DUGGKJfw8eMnlWZsxwHBxB/a3AXVNDjL9abuHw1k9FGR+jg=="], "@drizzle-team/brocli": ["@drizzle-team/brocli@0.11.0", "", {}, "sha512-hD3pekGiPg0WPCCGAZmusBBJsDqGUR66Y452YgQsZOnkdQ7ViEPKuyP4huUGEZQefp8g34RRodXYmJ2TbCH+tg=="], + "@effect/language-service": ["@effect/language-service@0.79.0", "", { "bin": { "effect-language-service": "cli.js" } }, "sha512-DEmIOsg1GjjP6s9HXH1oJrW+gDmzkhVv9WOZl6to5eNyyCrjz1S2PDqQ7aYrW/HuifhfwI5Bik1pK4pj7Z+lrg=="], + "@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.12", "", { "os": "aix", "cpu": "ppc64" }, "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA=="], "@esbuild/android-arm": ["@esbuild/android-arm@0.25.12", "", { "os": "android", "cpu": "arm" }, "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg=="], @@ -561,7 +571,7 @@ "@isaacs/brace-expansion": ["@isaacs/brace-expansion@5.0.1", "", { "dependencies": { "@isaacs/balanced-match": "^4.0.1" } }, "sha512-WMz71T1JS624nWj2n2fnYAuPovhv7EUhk69R6i9dsVyzxt5eM3bjwvgk9L+APE1TRscGysAVMANkB0jh0LQZrQ=="], - "@isaacs/cliui": ["@isaacs/cliui@8.0.2", "", { "dependencies": { "string-width": "^5.1.2", "string-width-cjs": "npm:string-width@^4.2.0", "strip-ansi": "^7.0.1", "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", "wrap-ansi": "^8.1.0", "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" } }, "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA=="], + "@isaacs/cliui": ["@isaacs/cliui@9.0.0", "", {}, "sha512-AokJm4tuBHillT+FpMtxQ60n8ObyXBatq7jD2/JA9dxbDDokKQm8KMht5ibGzLVU9IJDIKK4TPKgMHEYMn3lMg=="], "@jimp/core": ["@jimp/core@1.6.0", "", { "dependencies": { "@jimp/file-ops": "1.6.0", "@jimp/types": "1.6.0", "@jimp/utils": "1.6.0", "await-to-js": "^3.0.0", "exif-parser": "^0.1.12", "file-type": "^16.0.0", "mime": "3" } }, "sha512-EQQlKU3s9QfdJqiSrZWNTxBs3rKXgO2W+GxNXDtwchF3a4IqxDheFX1ti+Env9hdJXDiYLp2jTRjlxhPthsk8w=="], @@ -643,6 +653,18 @@ "@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.25.2", "", { "dependencies": { "@hono/node-server": "^1.19.7", "ajv": "^8.17.1", "ajv-formats": "^3.0.1", "content-type": "^1.0.5", "cors": "^2.8.5", "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", "eventsource-parser": "^3.0.0", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "jose": "^6.1.1", "json-schema-typed": "^8.0.2", "pkce-challenge": "^5.0.0", "raw-body": "^3.0.0", "zod": "^3.25 || ^4.0", "zod-to-json-schema": "^3.25.0" }, "peerDependencies": { "@cfworker/json-schema": "^4.1.1" }, "optionalPeers": ["@cfworker/json-schema"] }, "sha512-LZFeo4F9M5qOhC/Uc1aQSrBHxMrvxett+9KLHt7OhcExtoiRN9DKgbZffMP/nxjutWDQpfMDfP3nkHI4X9ijww=="], + "@msgpackr-extract/msgpackr-extract-darwin-arm64": ["@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw=="], + + "@msgpackr-extract/msgpackr-extract-darwin-x64": ["@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw=="], + + "@msgpackr-extract/msgpackr-extract-linux-arm": ["@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3", "", { "os": "linux", "cpu": "arm" }, "sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw=="], + + "@msgpackr-extract/msgpackr-extract-linux-arm64": ["@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg=="], + + "@msgpackr-extract/msgpackr-extract-linux-x64": ["@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3", "", { "os": "linux", "cpu": "x64" }, "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg=="], + + "@msgpackr-extract/msgpackr-extract-win32-x64": ["@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3", "", { "os": "win32", "cpu": "x64" }, "sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ=="], + "@octokit/auth-token": ["@octokit/auth-token@4.0.0", "", {}, "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA=="], "@octokit/core": ["@octokit/core@5.2.2", "", { "dependencies": { "@octokit/auth-token": "^4.0.0", "@octokit/graphql": "^7.1.0", "@octokit/request": "^8.4.1", "@octokit/request-error": "^5.1.1", "@octokit/types": "^13.0.0", "before-after-hook": "^2.2.0", "universal-user-agent": "^6.0.0" } }, "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg=="], @@ -655,12 +677,10 @@ "@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@9.2.2", "", { "dependencies": { "@octokit/types": "^12.6.0" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-u3KYkGF7GcZnSD/3UP0S7K5XUFT2FkOQdcfXZGZQPGv3lm4F2Xbf71lvjldr8c1H3nNbF+33cLEkWYbokGWqiQ=="], - "@octokit/plugin-request-log": ["@octokit/plugin-request-log@1.0.4", "", { "peerDependencies": { "@octokit/core": ">=3" } }, "sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA=="], + "@octokit/plugin-request-log": ["@octokit/plugin-request-log@6.0.0", "", { "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-UkOzeEN3W91/eBq9sPZNQ7sUBvYCqYbrrD8gTbBuGtHEuycE4/awMXcYvx6sVYo7LypPhmQwwpUe4Yyu4QZN5Q=="], "@octokit/plugin-rest-endpoint-methods": ["@octokit/plugin-rest-endpoint-methods@10.4.1", "", { "dependencies": { "@octokit/types": "^12.6.0" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg=="], - "@octokit/plugin-retry": ["@octokit/plugin-retry@3.0.9", "", { "dependencies": { "@octokit/types": "^6.0.3", "bottleneck": "^2.15.3" } }, "sha512-r+fArdP5+TG6l1Rv/C9hVoty6tldw6cE2pRHNGmFPdyfrc696R6JjrQ3d7HdVqGwuzfyrcaLAKD7K8TX8aehUQ=="], - "@octokit/request": ["@octokit/request@8.4.1", "", { "dependencies": { "@octokit/endpoint": "^9.0.6", "@octokit/request-error": "^5.1.1", "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" } }, "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw=="], "@octokit/request-error": ["@octokit/request-error@5.1.1", "", { "dependencies": { "@octokit/types": "^13.1.0", "deprecation": "^2.0.0", "once": "^1.4.0" } }, "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g=="], @@ -687,21 +707,21 @@ "@opentelemetry/api": ["@opentelemetry/api@1.9.0", "", {}, "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg=="], - "@opentui/core": ["@opentui/core@0.1.86", "", { "dependencies": { "bun-ffi-structs": "0.1.2", "diff": "8.0.2", "jimp": "1.6.0", "marked": "17.0.1", "yoga-layout": "3.2.1" }, "optionalDependencies": { "@dimforge/rapier2d-simd-compat": "^0.17.3", "@opentui/core-darwin-arm64": "0.1.86", "@opentui/core-darwin-x64": "0.1.86", "@opentui/core-linux-arm64": "0.1.86", "@opentui/core-linux-x64": "0.1.86", "@opentui/core-win32-arm64": "0.1.86", "@opentui/core-win32-x64": "0.1.86", "bun-webgpu": "0.1.5", "planck": "^1.4.2", "three": "0.177.0" }, "peerDependencies": { "web-tree-sitter": "0.25.10" } }, "sha512-3tRLbI9ADrQE1jEEn4x2aJexEOQZkv9Emk2BixMZqxfVhz2zr2SxtpimDAX0vmZK3+GnWAwBWxuaCAsxZpY4+w=="], + "@opentui/core": ["@opentui/core@0.1.87", "", { "dependencies": { "bun-ffi-structs": "0.1.2", "diff": "8.0.2", "jimp": "1.6.0", "marked": "17.0.1", "yoga-layout": "3.2.1" }, "optionalDependencies": { "@dimforge/rapier2d-simd-compat": "^0.17.3", "@opentui/core-darwin-arm64": "0.1.87", "@opentui/core-darwin-x64": "0.1.87", "@opentui/core-linux-arm64": "0.1.87", "@opentui/core-linux-x64": "0.1.87", "@opentui/core-win32-arm64": "0.1.87", "@opentui/core-win32-x64": "0.1.87", "bun-webgpu": "0.1.5", "planck": "^1.4.2", "three": "0.177.0" }, "peerDependencies": { "web-tree-sitter": "0.25.10" } }, "sha512-dhsmMv0IqKftwG7J/pBrLBj2armsYIg5R3LBvciRQI/6X89GufP4l1u0+QTACAx6iR4SYJJNVNQ2tdX8LM9rMw=="], - "@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.1.86", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Zp7q64+d+Dcx6YrH3mRcnHq8EOBnrfc1RvjgSWLhpXr49hY6LzuhqpfZM57aGErPYlR+ff8QM6e5FUkFnDfyjw=="], + "@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.1.87", "", { "os": "darwin", "cpu": "arm64" }, "sha512-G8oq85diOfkU6n0T1CxCle7oDmpKxwhcdhZ9khBMU5IrfLx9ZDuCM3F6MsiRQWdvPPCq2oomNbd64bYkPamYgw=="], - "@opentui/core-darwin-x64": ["@opentui/core-darwin-x64@0.1.86", "", { "os": "darwin", "cpu": "x64" }, "sha512-NcxfjCJm1kLnTMVOpAPdRYNi8W8XdAXNa6N7i9khiVFrl2v5KRQfUjbrSOUYVxFJNc3jKFG6rsn3jEApvn92qA=="], + "@opentui/core-darwin-x64": ["@opentui/core-darwin-x64@0.1.87", "", { "os": "darwin", "cpu": "x64" }, "sha512-MYTFQfOHm6qO7YaY4GHK9u/oJlXY6djaaxl5I+k4p2mk3vvuFIl/AP1ypITwBFjyV5gyp7PRWFp4nGfY9oN8bw=="], - "@opentui/core-linux-arm64": ["@opentui/core-linux-arm64@0.1.86", "", { "os": "linux", "cpu": "arm64" }, "sha512-EDHAvqSOr8CXzbDvo1aE5blJ6wu1aSbR2LqoXtoeXHemr2T2W42D2TdIWewG6K+/BuRbzZnqt9wnYFBksLW6lw=="], + "@opentui/core-linux-arm64": ["@opentui/core-linux-arm64@0.1.87", "", { "os": "linux", "cpu": "arm64" }, "sha512-he8o1h5M6oskRJ7wE+xKJgmWnv5ZwN6gB3M/Z+SeHtOMPa5cZmi3TefTjG54llEgFfx0F9RcqHof7TJ/GNxRkw=="], - "@opentui/core-linux-x64": ["@opentui/core-linux-x64@0.1.86", "", { "os": "linux", "cpu": "x64" }, "sha512-VBaBkVdQDxYV4WcKjb+jgyMS5PiVHepvfaoKWpz1Bq+J01xXW4XPcXyPGkgR1+2R93KzaugEnLscTW4mWtLHlQ=="], + "@opentui/core-linux-x64": ["@opentui/core-linux-x64@0.1.87", "", { "os": "linux", "cpu": "x64" }, "sha512-aiUwjPlH4yDcB8/6YDKSmMkaoGAAltL0Xo0AzXyAtJXWK5tkCSaYjEVwzJ/rYRkr4Magnad+Mjth4AQUWdR2AA=="], - "@opentui/core-win32-arm64": ["@opentui/core-win32-arm64@0.1.86", "", { "os": "win32", "cpu": "arm64" }, "sha512-xKbT7sEKYKGwUPkoqmLfHjbJU+vwHPDwf/r/mIunL41JXQBB35CSZ3/QgIwpp2kkteu7oE1tdBdg15ogUU4OMg=="], + "@opentui/core-win32-arm64": ["@opentui/core-win32-arm64@0.1.87", "", { "os": "win32", "cpu": "arm64" }, "sha512-cmP0pOyREjWGniHqbDmaMY7U+1AyagrD8VseJbU0cGpNgVpG2/gbrJUGdfdLB0SNb+mzLdx6SOjdxtrElwRCQA=="], - "@opentui/core-win32-x64": ["@opentui/core-win32-x64@0.1.86", "", { "os": "win32", "cpu": "x64" }, "sha512-HRfgAUlcu71/MrtgfX4Gj7PsDtfXZiuC506Pkn1OnRN1Xomcu10BVRDweUa0/g8ldU9i9kLjMGGnpw6/NjaBFg=="], + "@opentui/core-win32-x64": ["@opentui/core-win32-x64@0.1.87", "", { "os": "win32", "cpu": "x64" }, "sha512-N2GErAAP8iODf2RPp86pilPaVKiD6G4pkpZL5nLGbKsl0bndrVTpSqZcn8+/nQwFZDPD/AsiRTYNOfWOblhzOw=="], - "@opentui/solid": ["@opentui/solid@0.1.86", "", { "dependencies": { "@babel/core": "7.28.0", "@babel/preset-typescript": "7.27.1", "@opentui/core": "0.1.86", "babel-plugin-module-resolver": "5.0.2", "babel-preset-solid": "1.9.9", "s-js": "^0.4.9" }, "peerDependencies": { "solid-js": "1.9.9" } }, "sha512-pOZC9dlZIH+bpstVVZ2AvYukBnslZTKSl/y5H8FWcMTHGv/BzpGxXBxstL65E/IQASqPFbvFcs7yMRzdLhynmA=="], + "@opentui/solid": ["@opentui/solid@0.1.87", "", { "dependencies": { "@babel/core": "7.28.0", "@babel/preset-typescript": "7.27.1", "@opentui/core": "0.1.87", "babel-plugin-module-resolver": "5.0.2", "babel-preset-solid": "1.9.9", "entities": "7.0.1", "s-js": "^0.4.9" }, "peerDependencies": { "solid-js": "1.9.9" } }, "sha512-lRT9t30l8+FtgOjjWJcdb2MT6hP8/RKqwGgYwTI7fXrOqdhxxwdP2SM+rH2l3suHeASheiTdlvPAo230iUcsvg=="], "@oslojs/asn1": ["@oslojs/asn1@1.0.0", "", { "dependencies": { "@oslojs/binary": "1.0.0" } }, "sha512-zw/wn0sj0j0QKbIXfIlnEcTviaCzYOY3V5rAyjR6YtOByFtJiT574+8p9Wlach0lZH9fddD4yb9laEAIl4vXQA=="], @@ -749,16 +769,6 @@ "@pkgjs/parseargs": ["@pkgjs/parseargs@0.11.0", "", {}, "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="], - "@planetscale/database": ["@planetscale/database@1.19.0", "", {}, "sha512-Tv4jcFUFAFjOWrGSio49H6R2ijALv0ZzVBfJKIdm+kl9X046Fh4LLawrF9OMsglVbK6ukqMJsUCeucGAFTBcMA=="], - - "@protobuf-ts/plugin": ["@protobuf-ts/plugin@2.11.1", "", { "dependencies": { "@bufbuild/protobuf": "^2.4.0", "@bufbuild/protoplugin": "^2.4.0", "@protobuf-ts/protoc": "^2.11.1", "@protobuf-ts/runtime": "^2.11.1", "@protobuf-ts/runtime-rpc": "^2.11.1", "typescript": "^3.9" }, "bin": { "protoc-gen-ts": "bin/protoc-gen-ts", "protoc-gen-dump": "bin/protoc-gen-dump" } }, "sha512-HyuprDcw0bEEJqkOWe1rnXUP0gwYLij8YhPuZyZk6cJbIgc/Q0IFgoHQxOXNIXAcXM4Sbehh6kjVnCzasElw1A=="], - - "@protobuf-ts/protoc": ["@protobuf-ts/protoc@2.11.1", "", { "bin": { "protoc": "protoc.js" } }, "sha512-mUZJaV0daGO6HUX90o/atzQ6A7bbN2RSuHtdwo8SSF2Qoe3zHwa4IHyCN1evftTeHfLmdz+45qo47sL+5P8nyg=="], - - "@protobuf-ts/runtime": ["@protobuf-ts/runtime@2.11.1", "", {}, "sha512-KuDaT1IfHkugM2pyz+FwiY80ejWrkH1pAtOBOZFuR6SXEFTsnb/jiQWQ1rCIrcKx2BtyxnxW6BWwsVSA/Ie+WQ=="], - - "@protobuf-ts/runtime-rpc": ["@protobuf-ts/runtime-rpc@2.11.1", "", { "dependencies": { "@protobuf-ts/runtime": "^2.11.1" } }, "sha512-4CqqUmNA+/uMz00+d3CYKgElXO9VrEbucjnBFEjqI4GuDrEQ32MaI3q+9qPBvIGOlL4PmHXrzM32vBPWRhQKWQ=="], - "@shikijs/core": ["@shikijs/core@3.20.0", "", { "dependencies": { "@shikijs/types": "3.20.0", "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4", "hast-util-to-html": "^9.0.5" } }, "sha512-f2ED7HYV4JEk827mtMDwe/yQ25pRiXZmtHjWF8uzZKuKiEsJR7Ce1nuQ+HhV9FzDcbIo4ObBCD9GPTzNuy9S1g=="], "@shikijs/engine-javascript": ["@shikijs/engine-javascript@3.20.0", "", { "dependencies": { "@shikijs/types": "3.20.0", "@shikijs/vscode-textmate": "^10.0.2", "oniguruma-to-es": "^4.3.4" } }, "sha512-OFx8fHAZuk7I42Z9YAdZ95To6jDePQ9Rnfbw9uSRTSbBhYBp1kEOKv/3jOimcj3VRUKusDYM6DswLauwfhboLg=="], @@ -903,6 +913,8 @@ "@types/readable-stream": ["@types/readable-stream@4.0.23", "", { "dependencies": { "@types/node": "*" } }, "sha512-wwXrtQvbMHxCbBgjHaMGEmImFTQxxpfMOR/ZoQnXxB1woqkUbdLGFDgauo00Py9IudiaqSeiBiulSV9i6XIPig=="], + "@types/semver": ["@types/semver@7.7.1", "", {}, "sha512-FmgJfu+MOcQ370SD0ev7EI8TlCAfKYU+B4m5T3yXc1CiRN94g/SZPtsCkk506aUDtlMnFZvasDwHHUcZUEaYuA=="], + "@types/turndown": ["@types/turndown@5.0.5", "", {}, "sha512-TL2IgGgc7B5j78rIccBtlYAnkuv8nUQqhQc+DSYV5j9Be9XOcm/SKOVRuA47xAVI3680Tk9B1d8flK2GWT2+4w=="], "@types/unist": ["@types/unist@3.0.3", "", {}, "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q=="], @@ -929,8 +941,6 @@ "@typescript/native-preview-win32-x64": ["@typescript/native-preview-win32-x64@7.0.0-dev.20251207.1", "", { "os": "win32", "cpu": "x64" }, "sha512-5l51HlXjX7lXwo65DEl1IaCFLjmkMtL6K3NrSEamPNeNTtTQwZRa3pQ9V65dCglnnCQ0M3+VF1RqzC7FU0iDKg=="], - "@typescript/vfs": ["@typescript/vfs@1.6.4", "", { "dependencies": { "debug": "^4.4.3" }, "peerDependencies": { "typescript": "*" } }, "sha512-PJFXFS4ZJKiJ9Qiuix6Dz/OwEIqHD7Dme1UwZhTK11vR+5dqW2ACbdndWQexBzCx+CPuMe5WBYQWCsFyGlQLlQ=="], - "@typespec/ts-http-runtime": ["@typespec/ts-http-runtime@0.3.3", "", { "dependencies": { "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.0", "tslib": "^2.6.2" } }, "sha512-91fp6CAAJSRtH5ja95T1FHSKa8aPW9/Zw6cta81jlZTUw/+Vq8jM/AfF/14h2b71wwR84JUTW/3Y8QPhDAawFA=="], "@ungap/structured-clone": ["@ungap/structured-clone@1.3.0", "", {}, "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g=="], @@ -965,28 +975,18 @@ "any-base": ["any-base@1.1.0", "", {}, "sha512-uMgjozySS8adZZYePpaWs8cxB9/kdzmpX6SgJZ+wbz1K5eYk5QMYDVJaZKhxyIHUdnnJkfR7SVgStgH7LkGUyg=="], - "archiver": ["archiver@7.0.1", "", { "dependencies": { "archiver-utils": "^5.0.2", "async": "^3.2.4", "buffer-crc32": "^1.0.0", "readable-stream": "^4.0.0", "readdir-glob": "^1.1.2", "tar-stream": "^3.0.0", "zip-stream": "^6.0.1" } }, "sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ=="], - - "archiver-utils": ["archiver-utils@5.0.2", "", { "dependencies": { "glob": "^10.0.0", "graceful-fs": "^4.2.0", "is-stream": "^2.0.1", "lazystream": "^1.0.0", "lodash": "^4.17.15", "normalize-path": "^3.0.0", "readable-stream": "^4.0.0" } }, "sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA=="], - "arctic": ["arctic@2.3.4", "", { "dependencies": { "@oslojs/crypto": "1.0.1", "@oslojs/encoding": "1.1.0", "@oslojs/jwt": "0.2.0" } }, "sha512-+p30BOWsctZp+CVYCt7oAean/hWGW42sH5LAcRQX56ttEkFJWbzXBhmSpibbzwSJkRrotmsA+oAoJoVsU0f5xA=="], "argparse": ["argparse@1.0.10", "", { "dependencies": { "sprintf-js": "~1.0.2" } }, "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg=="], - "async": ["async@3.2.6", "", {}, "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA=="], - "atomic-sleep": ["atomic-sleep@1.0.0", "", {}, "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ=="], "avvio": ["avvio@9.2.0", "", { "dependencies": { "@fastify/error": "^4.0.0", "fastq": "^1.17.1" } }, "sha512-2t/sy01ArdHHE0vRH5Hsay+RtCZt3dLPji7W7/MMOCEgze5b7SNDC4j5H6FnVgPkI1MTNFGzHdHrVXDDl7QSSQ=="], "await-to-js": ["await-to-js@3.0.0", "", {}, "sha512-zJAaP9zxTcvTHRlejau3ZOY4V7SRpiByf3/dxx2uyKxxor19tpmpV2QRsTKikckwhaPmr2dVpxxMr7jOCYVp5g=="], - "aws-ssl-profiles": ["aws-ssl-profiles@1.1.2", "", {}, "sha512-NZKeq9AfyQvEeNlN0zSYAaWrmBffJh3IELMZfRpJVWgrpEbtEpnjvzqBPf+mxoI287JohRDoa+/nsfqqiZmF6g=="], - "aws4fetch": ["aws4fetch@1.0.20", "", {}, "sha512-/djoAN709iY65ETD6LKCtyyEI04XIBP5xVvfmNxsEP0uJB5tyaGBztSryRr4HqMStr9R06PisQE7m9zDTXKu6g=="], - "b4a": ["b4a@1.7.5", "", { "peerDependencies": { "react-native-b4a": "*" }, "optionalPeers": ["react-native-b4a"] }, "sha512-iEsKNwDh1wiWTps1/hdkNdmBgDlDVZP5U57ZVOlt+dNFqpc/lpPouCIxZw+DYBgc4P9NDfIZMPNR4CHNhzwLIA=="], - "babel-plugin-jsx-dom-expressions": ["babel-plugin-jsx-dom-expressions@0.40.5", "", { "dependencies": { "@babel/helper-module-imports": "7.18.6", "@babel/plugin-syntax-jsx": "^7.18.6", "@babel/types": "^7.20.7", "html-entities": "2.3.3", "parse5": "^7.1.2" }, "peerDependencies": { "@babel/core": "^7.20.12" } }, "sha512-8TFKemVLDYezqqv4mWz+PhRrkryTzivTGu0twyLrOkVZ0P63COx2Y04eVsUjFlwSOXui1z3P3Pn209dokWnirg=="], "babel-plugin-module-resolver": ["babel-plugin-module-resolver@5.0.2", "", { "dependencies": { "find-babel-config": "^2.1.1", "glob": "^9.3.3", "pkg-up": "^3.1.0", "reselect": "^4.1.7", "resolve": "^1.22.8" } }, "sha512-9KtaCazHee2xc0ibfqsDeamwDps6FZNo5S0Q81dUqEuFzVwPhcT4J5jOqIVvgCA3Q/wO9hKYxN/Ds3tIsp5ygg=="], @@ -995,8 +995,6 @@ "balanced-match": ["balanced-match@4.0.2", "", { "dependencies": { "jackspeak": "^4.2.3" } }, "sha512-x0K50QvKQ97fdEz2kPehIerj+YTeptKF9hyYkKf6egnwmMWAkADiO0QCzSp0R5xN8FTZgYaBfSaue46Ej62nMg=="], - "bare-events": ["bare-events@2.8.2", "", { "peerDependencies": { "bare-abort-controller": "*" }, "optionalPeers": ["bare-abort-controller"] }, "sha512-riJjyv1/mHLIPX4RwiK+oW9/4c3TEUeORHKefKAKnZ5kyslbN+HXowtbaVEqt4IMUB7OXlfixcs6gsFeo/jhiQ=="], - "base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="], "baseline-browser-mapping": ["baseline-browser-mapping@2.9.19", "", { "bin": { "baseline-browser-mapping": "dist/cli.js" } }, "sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg=="], @@ -1005,8 +1003,6 @@ "bignumber.js": ["bignumber.js@9.3.1", "", {}, "sha512-Ko0uX15oIUS7wJ3Rb30Fs6SkVbLmPBAKdlm7q9+ak9bbIeFf0MwuBsQV6z7+X768/cHsfg+WlysDWJcmthjsjQ=="], - "binary": ["binary@0.3.0", "", { "dependencies": { "buffers": "~0.1.1", "chainsaw": "~0.1.0" } }, "sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg=="], - "bl": ["bl@6.1.6", "", { "dependencies": { "@types/readable-stream": "^4.0.0", "buffer": "^6.0.3", "inherits": "^2.0.4", "readable-stream": "^4.2.0" } }, "sha512-jLsPgN/YSvPUg9UX0Kd73CXpm2Psg9FxMeCSXnk3WBO3CMT10JMwijubhGfHCnFu6TPn1ei3b975dxv7K2pWVg=="], "bmp-ts": ["bmp-ts@1.0.9", "", {}, "sha512-cTEHk2jLrPyi+12M3dhpEbnnPOsaZuq7C45ylbbQIiWgDFZq4UVYPEY5mlqjvsj/6gJv9qX5sa+ebDzLXT28Vw=="], @@ -1015,8 +1011,6 @@ "bonjour-service": ["bonjour-service@1.3.0", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "multicast-dns": "^7.2.5" } }, "sha512-3YuAUiSkWykd+2Azjgyxei8OWf8thdn8AITIog2M4UICzoqfjlqr64WIjEXZllf/W6vK1goqleSR6brGomxQqA=="], - "bottleneck": ["bottleneck@2.19.5", "", {}, "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw=="], - "bowser": ["bowser@2.14.1", "", {}, "sha512-tzPjzCxygAKWFOJP011oxFHs57HzIhOEracIgAePE4pqB3LikALKnSzUyU4MGs9/iCEUuHlAJTjTc5M+u7YEGg=="], "brace-expansion": ["brace-expansion@5.0.2", "", { "dependencies": { "balanced-match": "^4.0.2" } }, "sha512-Pdk8c9poy+YhOgVWw1JNN22/HcivgKWwpxKq04M/jTmHyCZn12WPJebZxdjSa5TmBqISrUSgNYU3eRORljfCCw=="], @@ -1027,12 +1021,8 @@ "buffer": ["buffer@6.0.3", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.2.1" } }, "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA=="], - "buffer-crc32": ["buffer-crc32@1.0.0", "", {}, "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w=="], - "buffer-equal-constant-time": ["buffer-equal-constant-time@1.0.1", "", {}, "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="], - "buffers": ["buffers@0.1.1", "", {}, "sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ=="], - "bun-ffi-structs": ["bun-ffi-structs@0.1.2", "", { "peerDependencies": { "typescript": "^5" } }, "sha512-Lh1oQAYHDcnesJauieA4UNkWGXY9hYck7OA5IaRwE3Bp6K2F2pJSNYqq+hIy7P3uOvo3km3oxS8304g5gDMl/w=="], "bun-pty": ["bun-pty@0.4.8", "", {}, "sha512-rO70Mrbr13+jxHHHu2YBkk2pNqrJE5cJn29WE++PUr+GFA0hq/VgtQPZANJ8dJo6d7XImvBk37Innt8GM7O28w=="], @@ -1063,8 +1053,6 @@ "ccount": ["ccount@2.0.1", "", {}, "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg=="], - "chainsaw": ["chainsaw@0.1.0", "", { "dependencies": { "traverse": ">=0.3.0 <0.4" } }, "sha512-75kWfWt6MEKNC8xYXIdRpDehRYY/tNSgwKaJq+dbbDcxORuVrrQ+SEHoWsniVn9XPYfP4gmdWIeDk/4YNp1rNQ=="], - "character-entities-html4": ["character-entities-html4@2.1.0", "", {}, "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA=="], "character-entities-legacy": ["character-entities-legacy@3.0.0", "", {}, "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ=="], @@ -1089,8 +1077,6 @@ "commander": ["commander@14.0.2", "", {}, "sha512-TywoWNNRbhoD0BXs1P3ZEScW8W5iKrnbithIl0YH+uCmBd0QpPOA8yc82DS3BIE5Ma6FnBVUsJ7wVUDz4dvOWQ=="], - "compress-commons": ["compress-commons@6.0.2", "", { "dependencies": { "crc-32": "^1.2.0", "crc32-stream": "^6.0.0", "is-stream": "^2.0.1", "normalize-path": "^3.0.0", "readable-stream": "^4.0.0" } }, "sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg=="], - "confbox": ["confbox@0.2.4", "", {}, "sha512-ysOGlgTFbN2/Y6Cg3Iye8YKulHw+R2fNXHrgSmXISQdMnomY6eNDprVdW9R5xBguEqI954+S6709UyiO7B+6OQ=="], "consola": ["consola@3.4.2", "", {}, "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA=="], @@ -1105,14 +1091,8 @@ "cookie-signature": ["cookie-signature@1.2.2", "", {}, "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg=="], - "core-util-is": ["core-util-is@1.0.3", "", {}, "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ=="], - "cors": ["cors@2.8.6", "", { "dependencies": { "object-assign": "^4", "vary": "^1" } }, "sha512-tJtZBBHA6vjIAaF6EnIaq6laBBP9aq/Y3ouVJjEfoHbRBcHBAHYcMh/w8LDrk2PvIMMq8gmopa5D4V8RmbrxGw=="], - "crc-32": ["crc-32@1.2.2", "", { "bin": { "crc32": "bin/crc32.njs" } }, "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ=="], - - "crc32-stream": ["crc32-stream@6.0.0", "", { "dependencies": { "crc-32": "^1.2.0", "readable-stream": "^4.0.0" } }, "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g=="], - "cross-fetch": ["cross-fetch@3.2.0", "", { "dependencies": { "node-fetch": "^2.7.0" } }, "sha512-Q+xVJLoGOeIMXZmbUK4HYk+69cQH6LudR0Vu/pRm2YlU/hDV9CiS0gKUMaWY5f2NeUH9C1nV3bsTlCo0FsTV1Q=="], "cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="], @@ -1133,8 +1113,6 @@ "defu": ["defu@6.1.4", "", {}, "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg=="], - "denque": ["denque@2.1.0", "", {}, "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw=="], - "depd": ["depd@2.0.0", "", {}, "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw=="], "deprecation": ["deprecation@2.3.1", "", {}, "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="], @@ -1153,9 +1131,9 @@ "dotenv": ["dotenv@17.3.1", "", {}, "sha512-IO8C/dzEb6O3F9/twg6ZLXz164a2fhTnEWb95H23Dm4OuN+92NmEAlTrupP9VW6Jm3sO26tQlqyvyi4CsnY9GA=="], - "drizzle-kit": ["drizzle-kit@1.0.0-beta.12-a5629fb", "", { "dependencies": { "@drizzle-team/brocli": "^0.11.0", "@js-temporal/polyfill": "^0.5.1", "esbuild": "^0.25.10", "tsx": "^4.20.6" }, "bin": { "drizzle-kit": "bin.cjs" } }, "sha512-l+p4QOMvPGYBYEE9NBlU7diu+NSlxuOUwi0I7i01Uj1PpfU0NxhPzaks/9q1MDw4FAPP8vdD0dOhoqosKtRWWQ=="], + "drizzle-kit": ["drizzle-kit@1.0.0-beta.16-ea816b6", "", { "dependencies": { "@drizzle-team/brocli": "^0.11.0", "@js-temporal/polyfill": "^0.5.1", "esbuild": "^0.25.10", "jiti": "^2.6.1" }, "bin": { "drizzle-kit": "bin.cjs" } }, "sha512-GiJQqCNPZP8Kk+i7/sFa3rtXbq26tLDNi3LbMx9aoLuwF2ofk8CS7cySUGdI+r4J3q0a568quC8FZeaFTCw4IA=="], - "drizzle-orm": ["drizzle-orm@1.0.0-beta.12-a5629fb", "", { "peerDependencies": { "@aws-sdk/client-rds-data": ">=3", "@cloudflare/workers-types": ">=4", "@effect/sql": "^0.48.5", "@effect/sql-pg": "^0.49.7", "@electric-sql/pglite": ">=0.2.0", "@libsql/client": ">=0.10.0", "@libsql/client-wasm": ">=0.10.0", "@neondatabase/serverless": ">=0.10.0", "@op-engineering/op-sqlite": ">=2", "@opentelemetry/api": "^1.4.1", "@planetscale/database": ">=1.13", "@prisma/client": "*", "@sqlitecloud/drivers": ">=1.0.653", "@tidbcloud/serverless": "*", "@tursodatabase/database": ">=0.2.1", "@tursodatabase/database-common": ">=0.2.1", "@tursodatabase/database-wasm": ">=0.2.1", "@types/better-sqlite3": "*", "@types/mssql": "^9.1.4", "@types/pg": "*", "@types/sql.js": "*", "@upstash/redis": ">=1.34.7", "@vercel/postgres": ">=0.8.0", "@xata.io/client": "*", "better-sqlite3": ">=9.3.0", "bun-types": "*", "expo-sqlite": ">=14.0.0", "gel": ">=2", "mssql": "^11.0.1", "mysql2": ">=2", "pg": ">=8", "postgres": ">=3", "sql.js": ">=1", "sqlite3": ">=5" }, "optionalPeers": ["@aws-sdk/client-rds-data", "@cloudflare/workers-types", "@effect/sql", "@effect/sql-pg", "@electric-sql/pglite", "@libsql/client", "@libsql/client-wasm", "@neondatabase/serverless", "@op-engineering/op-sqlite", "@opentelemetry/api", "@planetscale/database", "@prisma/client", "@sqlitecloud/drivers", "@tidbcloud/serverless", "@tursodatabase/database", "@tursodatabase/database-common", "@tursodatabase/database-wasm", "@types/better-sqlite3", "@types/pg", "@types/sql.js", "@upstash/redis", "@vercel/postgres", "@xata.io/client", "better-sqlite3", "bun-types", "expo-sqlite", "gel", "mysql2", "pg", "postgres", "sql.js", "sqlite3"] }, "sha512-wyOAgr9Cy9oEN6z5S0JGhfipLKbRRJtQKgbDO9SXGR9swMBbGNIlXkeMqPRrqYQ8k70mh+7ZJ/eVmJ2F7zR3Vg=="], + "drizzle-orm": ["drizzle-orm@1.0.0-beta.16-ea816b6", "", { "peerDependencies": { "@aws-sdk/client-rds-data": ">=3", "@cloudflare/workers-types": ">=4", "@effect/sql": "^0.48.5", "@effect/sql-pg": "^0.49.7", "@electric-sql/pglite": ">=0.2.0", "@libsql/client": ">=0.10.0", "@libsql/client-wasm": ">=0.10.0", "@neondatabase/serverless": ">=0.10.0", "@op-engineering/op-sqlite": ">=2", "@opentelemetry/api": "^1.4.1", "@planetscale/database": ">=1.13", "@prisma/client": "*", "@sinclair/typebox": ">=0.34.8", "@sqlitecloud/drivers": ">=1.0.653", "@tidbcloud/serverless": "*", "@tursodatabase/database": ">=0.2.1", "@tursodatabase/database-common": ">=0.2.1", "@tursodatabase/database-wasm": ">=0.2.1", "@types/better-sqlite3": "*", "@types/mssql": "^9.1.4", "@types/pg": "*", "@types/sql.js": "*", "@upstash/redis": ">=1.34.7", "@vercel/postgres": ">=0.8.0", "@xata.io/client": "*", "arktype": ">=2.0.0", "better-sqlite3": ">=9.3.0", "bun-types": "*", "expo-sqlite": ">=14.0.0", "gel": ">=2", "mssql": "^11.0.1", "mysql2": ">=2", "pg": ">=8", "postgres": ">=3", "sql.js": ">=1", "sqlite3": ">=5", "typebox": ">=1.0.0", "valibot": ">=1.0.0-beta.7", "zod": "^3.25.0 || ^4.0.0" }, "optionalPeers": ["@aws-sdk/client-rds-data", "@cloudflare/workers-types", "@effect/sql", "@effect/sql-pg", "@electric-sql/pglite", "@libsql/client", "@libsql/client-wasm", "@neondatabase/serverless", "@op-engineering/op-sqlite", "@opentelemetry/api", "@planetscale/database", "@prisma/client", "@sinclair/typebox", "@sqlitecloud/drivers", "@tidbcloud/serverless", "@tursodatabase/database", "@tursodatabase/database-common", "@tursodatabase/database-wasm", "@types/better-sqlite3", "@types/pg", "@types/sql.js", "@upstash/redis", "@vercel/postgres", "@xata.io/client", "arktype", "better-sqlite3", "bun-types", "expo-sqlite", "gel", "mysql2", "pg", "postgres", "sql.js", "sqlite3", "typebox", "valibot", "zod"] }, "sha512-k9gT4f0O9Qvah5YK/zL+FZonQ8TPyVxcG/ojN4dzO0fHP8hs8tBno8lqmJo53g0JLWv3Q2nsTUoyBRKM2TljFw=="], "dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="], @@ -1165,6 +1143,8 @@ "ee-first": ["ee-first@1.1.1", "", {}, "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="], + "effect": ["effect@4.0.0-beta.31", "", { "dependencies": { "@standard-schema/spec": "^1.1.0", "fast-check": "^4.5.3", "find-my-way-ts": "^0.1.6", "ini": "^6.0.0", "kubernetes-types": "^1.30.0", "msgpackr": "^1.11.8", "multipasta": "^0.2.7", "toml": "^3.0.0", "uuid": "^13.0.0", "yaml": "^2.8.2" } }, "sha512-w3QwJnlaLtWWiUSzhCXUTIisnULPsxLzpO6uqaBFjXybKx6FvCqsLJT6v4dV7G9eA9jeTtG6Gv7kF+jGe3HxzA=="], + "electron-to-chromium": ["electron-to-chromium@1.5.286", "", {}, "sha512-9tfDXhJ4RKFNerfjdCcZfufu49vg620741MNs26a9+bhLThdB+plgMeou98CAaHu/WATj2iHOOHTp1hWtABj2A=="], "emoji-regex": ["emoji-regex@10.6.0", "", {}, "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A=="], @@ -1177,7 +1157,7 @@ "engine.io-parser": ["engine.io-parser@5.2.3", "", {}, "sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q=="], - "entities": ["entities@6.0.1", "", {}, "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g=="], + "entities": ["entities@7.0.1", "", {}, "sha512-TWrgLOFUQTH994YUyl1yT4uyavY5nNB5muff+RtWaqNVCAK408b5ZnnbNAUEWLTCpum9w6arT70i1XdQ4UeOPA=="], "es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="], @@ -1199,8 +1179,6 @@ "events": ["events@3.3.0", "", {}, "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q=="], - "events-universal": ["events-universal@1.0.1", "", { "dependencies": { "bare-events": "^2.7.0" } }, "sha512-LUd5euvbMLpwOF8m6ivPCbhQeSiYVNb8Vs0fQ8QjXo0JTkEHpz8pxdQf0gStltaPpw0Cca8b39KxvK9cfKRiAw=="], - "eventsource": ["eventsource@3.0.7", "", { "dependencies": { "eventsource-parser": "^3.0.1" } }, "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA=="], "eventsource-parser": ["eventsource-parser@3.0.6", "", {}, "sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg=="], @@ -1219,21 +1197,21 @@ "extend-shallow": ["extend-shallow@2.0.1", "", { "dependencies": { "is-extendable": "^0.1.0" } }, "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug=="], + "fast-check": ["fast-check@4.6.0", "", { "dependencies": { "pure-rand": "^8.0.0" } }, "sha512-h7H6Dm0Fy+H4ciQYFxFjXnXkzR2kr9Fb22c0UBpHnm59K2zpr2t13aPTHlltFiNT6zuxp6HMPAVVvgur4BLdpA=="], + "fast-content-type-parse": ["fast-content-type-parse@3.0.0", "", {}, "sha512-ZvLdcY8P+N8mGQJahJV5G4U88CSvT1rP8ApL6uETe88MBXrBHAkZlSEySdUlyztF7ccb+Znos3TFqaepHxdhBg=="], "fast-decode-uri-component": ["fast-decode-uri-component@1.0.1", "", {}, "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg=="], "fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="], - "fast-fifo": ["fast-fifo@1.3.2", "", {}, "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ=="], - "fast-json-stringify": ["fast-json-stringify@6.3.0", "", { "dependencies": { "@fastify/merge-json-schemas": "^0.2.0", "ajv": "^8.12.0", "ajv-formats": "^3.0.1", "fast-uri": "^3.0.0", "json-schema-ref-resolver": "^3.0.0", "rfdc": "^1.2.0" } }, "sha512-oRCntNDY/329HJPlmdNLIdogNtt6Vyjb1WuT01Soss3slIdyUp8kAcDU3saQTOquEK8KFVfwIIF7FebxUAu+yA=="], "fast-querystring": ["fast-querystring@1.1.2", "", { "dependencies": { "fast-decode-uri-component": "^1.0.1" } }, "sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg=="], "fast-uri": ["fast-uri@3.1.0", "", {}, "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA=="], - "fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="], + "fast-xml-parser": ["fast-xml-parser@5.3.6", "", { "dependencies": { "strnum": "^2.1.2" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA=="], "fastify": ["fastify@5.7.4", "", { "dependencies": { "@fastify/ajv-compiler": "^4.0.5", "@fastify/error": "^4.0.0", "@fastify/fast-json-stringify-compiler": "^5.0.0", "@fastify/proxy-addr": "^5.0.0", "abstract-logging": "^2.0.1", "avvio": "^9.0.0", "fast-json-stringify": "^6.0.0", "find-my-way": "^9.0.0", "light-my-request": "^6.0.0", "pino": "^10.1.0", "process-warning": "^5.0.0", "rfdc": "^1.3.1", "secure-json-parse": "^4.0.0", "semver": "^7.6.0", "toad-cache": "^3.7.0" } }, "sha512-e6l5NsRdaEP8rdD8VR0ErJASeyaRbzXYpmkrpr2SuvuMq6Si3lvsaVy5C+7gLanEkvjpMDzBXWE5HPeb/hgTxA=="], @@ -1253,6 +1231,8 @@ "find-my-way": ["find-my-way@9.4.0", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-querystring": "^1.0.0", "safe-regex2": "^5.0.0" } }, "sha512-5Ye4vHsypZRYtS01ob/iwHzGRUDELlsoCftI/OZFhcLs1M0tkGPcXldE80TAZC5yYuJMBPJQQ43UHlqbJWiX2w=="], + "find-my-way-ts": ["find-my-way-ts@0.1.6", "", {}, "sha512-a85L9ZoXtNAey3Y6Z+eBWW658kO/MwR7zIafkIUPUMf3isZG0NCs2pjW2wtjxAKuJPxMAsHUIP4ZPGv0o5gyTA=="], + "find-up": ["find-up@3.0.0", "", { "dependencies": { "locate-path": "^3.0.0" } }, "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg=="], "foreground-child": ["foreground-child@3.3.1", "", { "dependencies": { "cross-spawn": "^7.0.6", "signal-exit": "^4.0.1" } }, "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw=="], @@ -1265,8 +1245,6 @@ "fs.realpath": ["fs.realpath@1.0.0", "", {}, "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="], - "fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="], - "function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="], "fuzzysort": ["fuzzysort@3.1.0", "", {}, "sha512-sR9BNCjBg6LNgwvxlBd0sBABvQitkLzoVY9MYYROQVX/FvfJ4Mai9LsGhDgd8qYdds0bY77VzYd5iuB+v5rwQQ=="], @@ -1275,8 +1253,6 @@ "gcp-metadata": ["gcp-metadata@8.1.2", "", { "dependencies": { "gaxios": "^7.0.0", "google-logging-utils": "^1.0.0", "json-bigint": "^1.0.0" } }, "sha512-zV/5HKTfCeKWnxG0Dmrw51hEWFGfcF2xiXqcA3+J90WDuP0SvoiSO5ORvcBsifmx/FoIjgQN3oNOGaQ5PhLFkg=="], - "generate-function": ["generate-function@2.3.1", "", { "dependencies": { "is-property": "^1.0.2" } }, "sha512-eeB5GfMNeevm/GRYq20ShmsaGcmI81kIX2K9XQx5miC8KdHaC6Jm0qQ8ZNeGOi7wYB8OsdxKs+Y2oVuTFuVwKQ=="], - "gensync": ["gensync@1.0.0-beta.2", "", {}, "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg=="], "get-caller-file": ["get-caller-file@2.0.5", "", {}, "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="], @@ -1289,8 +1265,6 @@ "get-stream": ["get-stream@8.0.1", "", {}, "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA=="], - "get-tsconfig": ["get-tsconfig@4.13.6", "", { "dependencies": { "resolve-pkg-maps": "^1.0.0" } }, "sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw=="], - "gifwrap": ["gifwrap@0.10.1", "", { "dependencies": { "image-q": "^4.0.0", "omggif": "^1.0.10" } }, "sha512-2760b1vpJHNmLzZ/ubTtNnEx5WApN/PYWJvXvgS+tL1egTTthayFYIQQNi136FLEDcN/IyEY2EcGpIITD6eYUw=="], "giget": ["giget@2.0.0", "", { "dependencies": { "citty": "^0.1.6", "consola": "^3.4.0", "defu": "^6.1.4", "node-fetch-native": "^1.6.6", "nypm": "^0.6.0", "pathe": "^2.0.3" }, "bin": { "giget": "dist/cli.mjs" } }, "sha512-L5bGsVkxJbJgdnwyuheIunkGatUF/zssUoxxjACCseZYAVbaqdh9Tsmmlkl8vYan09H7sbvKt4pS8GqKLBrEzA=="], @@ -1303,8 +1277,6 @@ "gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="], - "graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="], - "graphql": ["graphql@16.12.0", "", {}, "sha512-DKKrynuQRne0PNpEbzuEdHlYOMksHSUI8Zc9Unei5gTsMNA2/vMpoMz/yKba50pejK56qj98qM0SjYxAKi13gQ=="], "graphql-request": ["graphql-request@6.1.0", "", { "dependencies": { "@graphql-typed-document-node/core": "^3.2.0", "cross-fetch": "^3.1.5" }, "peerDependencies": { "graphql": "14 - 16" } }, "sha512-p+XPfS4q7aIpKVcgmnZKhMNqhltk20hfXtkaIkTfjjmiKMJ5xrt5c743cL03y/K7y1rg3WrIC49xGiEQ4mxdNw=="], @@ -1349,6 +1321,8 @@ "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], + "ini": ["ini@6.0.0", "", {}, "sha512-IBTdIkzZNOpqm7q3dRqJvMaldXjDHWkEDfrwGEQTs5eaQMWV+djAhR+wahyNNMAa+qpbDUhBMVt4ZKNwpPm7xQ=="], + "ipaddr.js": ["ipaddr.js@2.3.0", "", {}, "sha512-Zv/pA+ciVFbCSBBjGfaKUya/CcGmUHzTydLMaTwrUUEM2DIEO3iZvueGxmacvmN50fGpGVKeTXpb2LcYQxeVdg=="], "is-core-module": ["is-core-module@2.16.1", "", { "dependencies": { "hasown": "^2.0.2" } }, "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w=="], @@ -1371,21 +1345,17 @@ "is-promise": ["is-promise@4.0.0", "", {}, "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ=="], - "is-property": ["is-property@1.0.2", "", {}, "sha512-Ks/IoX00TtClbGQr4TWXemAnktAQvYB7HzcCxDGqEZU6oCmb2INHuOoKxbtR+HFkmYWBKv/dOZtGRiAjDhj92g=="], - "is-stream": ["is-stream@3.0.0", "", {}, "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA=="], "is-wsl": ["is-wsl@3.1.1", "", { "dependencies": { "is-inside-container": "^1.0.0" } }, "sha512-e6rvdUCiQCAuumZslxRJWR/Doq4VpPR82kqclvcS0efgt430SlGIk05vdCN58+VrzgtIcfNODjozVielycD4Sw=="], "is64bit": ["is64bit@2.0.0", "", { "dependencies": { "system-architecture": "^0.1.0" } }, "sha512-jv+8jaWCl0g2lSBkNSVXdzfBA0npK1HGC2KtWM9FumFRoGS94g3NbCCLVnCYHLjp4GrW2KZeeSTMo5ddtznmGw=="], - "isarray": ["isarray@1.0.0", "", {}, "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="], - "isexe": ["isexe@4.0.0", "", {}, "sha512-FFUtZMpoZ8RqHS3XeXEmHWLA4thH+ZxCv2lOiPIn1Xc7CxrqhWzNSDzD+/chS/zbYezmiwWLdQC09JdQKmthOw=="], "isomorphic-ws": ["isomorphic-ws@5.0.0", "", { "peerDependencies": { "ws": "*" } }, "sha512-muId7Zzn9ywDsyXgTIafTry2sV3nySZeUDe6YedVd1Hvuuep5AsIlqK+XefWpYTyJG5e503F2xIuT2lcU6rCSw=="], - "jackspeak": ["jackspeak@3.4.3", "", { "dependencies": { "@isaacs/cliui": "^8.0.2" }, "optionalDependencies": { "@pkgjs/parseargs": "^0.11.0" } }, "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw=="], + "jackspeak": ["jackspeak@4.2.3", "", { "dependencies": { "@isaacs/cliui": "^9.0.0" } }, "sha512-ykkVRwrYvFm1nb2AJfKKYPr0emF6IiXDYUaFx4Zn9ZuIH7MrzEZ3sD5RlqGXNRpHtvUHJyOnCEFxOlNDtGo7wg=="], "jimp": ["jimp@1.6.0", "", { "dependencies": { "@jimp/core": "1.6.0", "@jimp/diff": "1.6.0", "@jimp/js-bmp": "1.6.0", "@jimp/js-gif": "1.6.0", "@jimp/js-jpeg": "1.6.0", "@jimp/js-png": "1.6.0", "@jimp/js-tiff": "1.6.0", "@jimp/plugin-blit": "1.6.0", "@jimp/plugin-blur": "1.6.0", "@jimp/plugin-circle": "1.6.0", "@jimp/plugin-color": "1.6.0", "@jimp/plugin-contain": "1.6.0", "@jimp/plugin-cover": "1.6.0", "@jimp/plugin-crop": "1.6.0", "@jimp/plugin-displace": "1.6.0", "@jimp/plugin-dither": "1.6.0", "@jimp/plugin-fisheye": "1.6.0", "@jimp/plugin-flip": "1.6.0", "@jimp/plugin-hash": "1.6.0", "@jimp/plugin-mask": "1.6.0", "@jimp/plugin-print": "1.6.0", "@jimp/plugin-quantize": "1.6.0", "@jimp/plugin-resize": "1.6.0", "@jimp/plugin-rotate": "1.6.0", "@jimp/plugin-threshold": "1.6.0", "@jimp/types": "1.6.0", "@jimp/utils": "1.6.0" } }, "sha512-YcwCHw1kiqEeI5xRpDlPPBGL2EOpBKLwO4yIBJcXWHPj5PnA5urGq0jbyhM5KoNpypQ6VboSoxc9D8HyfvngSg=="], @@ -1427,11 +1397,9 @@ "jws": ["jws@4.0.1", "", { "dependencies": { "jwa": "^2.0.1", "safe-buffer": "^5.0.1" } }, "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA=="], - "jwt-decode": ["jwt-decode@3.1.2", "", {}, "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A=="], - "kind-of": ["kind-of@6.0.3", "", {}, "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw=="], - "lazystream": ["lazystream@1.0.1", "", { "dependencies": { "readable-stream": "^2.0.5" } }, "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw=="], + "kubernetes-types": ["kubernetes-types@1.30.0", "", {}, "sha512-Dew1okvhM/SQcIa2rcgujNndZwU8VnSapDgdxlYoB84ZlpAD43U6KLAFqYo17ykSFGHNPrg0qry0bP+GJd9v7Q=="], "light-my-request": ["light-my-request@6.6.0", "", { "dependencies": { "cookie": "^1.0.1", "process-warning": "^4.0.0", "set-cookie-parser": "^2.6.0" } }, "sha512-CHYbu8RtboSIoVsHZ6Ye4cj4Aw/yg2oAFimlF7mNvfDV192LR7nDiKtSIfCuLT7KokPSTn/9kfVLm5OGN0A28A=="], @@ -1453,13 +1421,9 @@ "lodash.once": ["lodash.once@4.1.1", "", {}, "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg=="], - "long": ["long@5.3.2", "", {}, "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA=="], - "loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="], - "lru-cache": ["lru-cache@11.2.6", "", {}, "sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ=="], - - "lru.min": ["lru.min@1.1.4", "", {}, "sha512-DqC6n3QQ77zdFpCMASA1a3Jlb64Hv2N2DciFGkO/4L9+q/IpIAuRlKOvCXabtRW6cQf8usbmM6BE/TOPysCdIA=="], + "lru-cache": ["lru-cache@5.1.1", "", { "dependencies": { "yallist": "^3.0.2" } }, "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w=="], "lru_map": ["lru_map@0.4.1", "", {}, "sha512-I+lBvqMMFfqaV8CJCISjI3wbjmwVu/VyOoU7+qtu9d7ioW5klMgsTTiUOUp+DJvfTTzKXoPbyC6YfgkNcyPSOg=="], @@ -1499,21 +1463,19 @@ "minimatch": ["minimatch@10.0.3", "", { "dependencies": { "@isaacs/brace-expansion": "^5.0.0" } }, "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw=="], - "minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="], - "minipass": ["minipass@7.1.2", "", {}, "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="], - "mkdirp": ["mkdirp@0.5.6", "", { "dependencies": { "minimist": "^1.2.6" }, "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw=="], - "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], + "msgpackr": ["msgpackr@1.11.9", "", { "optionalDependencies": { "msgpackr-extract": "^3.0.2" } }, "sha512-FkoAAyyA6HM8wL882EcEyFZ9s7hVADSwG9xrVx3dxxNQAtgADTrJoEWivID82Iv1zWDsv/OtbrrcZAzGzOMdNw=="], + + "msgpackr-extract": ["msgpackr-extract@3.0.3", "", { "dependencies": { "node-gyp-build-optional-packages": "5.2.2" }, "optionalDependencies": { "@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3", "@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3", "@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3" }, "bin": { "download-msgpackr-prebuilds": "bin/download-prebuilds.js" } }, "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA=="], + "mssql": ["mssql@11.0.1", "", { "dependencies": { "@tediousjs/connection-string": "^0.5.0", "commander": "^11.0.0", "debug": "^4.3.3", "rfdc": "^1.3.0", "tarn": "^3.0.2", "tedious": "^18.2.1" }, "bin": { "mssql": "bin/mssql" } }, "sha512-KlGNsugoT90enKlR8/G36H0kTxPthDhmtNUCwEHvgRza5Cjpjoj+P2X6eMpFUDN7pFrJZsKadL4x990G8RBE1w=="], "multicast-dns": ["multicast-dns@7.2.5", "", { "dependencies": { "dns-packet": "^5.2.2", "thunky": "^1.0.2" }, "bin": { "multicast-dns": "cli.js" } }, "sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg=="], - "mysql2": ["mysql2@3.14.4", "", { "dependencies": { "aws-ssl-profiles": "^1.1.1", "denque": "^2.1.0", "generate-function": "^2.3.1", "iconv-lite": "^0.7.0", "long": "^5.2.1", "lru.min": "^1.0.0", "named-placeholders": "^1.1.3", "seq-queue": "^0.0.5", "sqlstring": "^2.3.2" } }, "sha512-Cs/jx3WZPNrYHVz+Iunp9ziahaG5uFMvD2R8Zlmc194AqXNxt9HBNu7ZsPYrUtmJsF0egETCWIdMIYAwOGjL1w=="], - - "named-placeholders": ["named-placeholders@1.1.6", "", { "dependencies": { "lru.min": "^1.1.0" } }, "sha512-Tz09sEL2EEuv5fFowm419c1+a/jSMiBjI9gHxVLrVdbUkkNUUfjsVYs9pVZu5oCon/kmRh9TfLEObFtkVxmY0w=="], + "multipasta": ["multipasta@0.2.7", "", {}, "sha512-KPA58d68KgGil15oDqXjkUBEBYc00XvbPj5/X+dyzeo/lWm9Nc25pQRlf1D+gv4OpK7NM0J1odrbu9JNNGvynA=="], "nanoevents": ["nanoevents@7.0.1", "", {}, "sha512-o6lpKiCxLeijK4hgsqfR6CNToPyRU3keKyyI6uwuHRvpRTbZ0wXw51WRgyldVugZqoJfkGFrjrIenYH3bfEO3Q=="], @@ -1531,9 +1493,9 @@ "node-gyp-build": ["node-gyp-build@4.8.4", "", { "bin": { "node-gyp-build": "bin.js", "node-gyp-build-optional": "optional.js", "node-gyp-build-test": "build-test.js" } }, "sha512-LA4ZjwlnUblHVgq0oBF3Jl/6h/Nvs5fzBLwdEF4nuxnFdsfajde4WfxtJr3CaiH+F6ewcIB/q4jQ4UzPyid+CQ=="], - "node-releases": ["node-releases@2.0.27", "", {}, "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA=="], + "node-gyp-build-optional-packages": ["node-gyp-build-optional-packages@5.2.2", "", { "dependencies": { "detect-libc": "^2.0.1" }, "bin": { "node-gyp-build-optional-packages": "bin.js", "node-gyp-build-optional-packages-optional": "optional.js", "node-gyp-build-optional-packages-test": "build-test.js" } }, "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw=="], - "normalize-path": ["normalize-path@3.0.0", "", {}, "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA=="], + "node-releases": ["node-releases@2.0.27", "", {}, "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA=="], "npm-run-path": ["npm-run-path@5.3.0", "", { "dependencies": { "path-key": "^4.0.0" } }, "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ=="], @@ -1627,22 +1589,20 @@ "pngjs": ["pngjs@7.0.0", "", {}, "sha512-LKWqWJRhstyYo9pGvgor/ivk2w94eSjE3RGVuzLGlr3NmD8bf7RcYGze1mNdEHRP6TRP6rMuDHk5t44hnTRyow=="], - "postgres": ["postgres@3.4.7", "", {}, "sha512-Jtc2612XINuBjIl/QTWsV5UvE8UHuNblcO3vVADSrKsrc6RqGX6lOW1cEo3CM2v0XG4Nat8nI+YM7/f26VxXLw=="], - "powershell-utils": ["powershell-utils@0.1.0", "", {}, "sha512-dM0jVuXJPsDN6DvRpea484tCUaMiXWjuCn++HGTqUWzGDjv5tZkEZldAJ/UMlqRYGFrD/etByo4/xOuC/snX2A=="], "prettier": ["prettier@3.6.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ=="], "process": ["process@0.11.10", "", {}, "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A=="], - "process-nextick-args": ["process-nextick-args@2.0.1", "", {}, "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="], - "process-warning": ["process-warning@5.0.0", "", {}, "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA=="], "property-information": ["property-information@7.1.0", "", {}, "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ=="], "proxy-addr": ["proxy-addr@2.0.7", "", { "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" } }, "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg=="], + "pure-rand": ["pure-rand@8.0.0", "", {}, "sha512-7rgWlxG2gAvFPIQfUreo1XYlNvrQ9VnQPFWdncPkdl3icucLK0InOxsaafbvxGTnI6Bk/Rxmslg0lQlRCuzOXw=="], + "qs": ["qs@6.15.0", "", { "dependencies": { "side-channel": "^1.1.0" } }, "sha512-mAZTtNCeetKMH+pSjrb76NAM8V9a05I9aBZOHztWy/UqcJdQYNsf59vrRKWnojAT9Y+GbIvoTBC++CPHqpDBhQ=="], "quansync": ["quansync@0.2.11", "", {}, "sha512-AifT7QEbW9Nri4tAwR5M/uzpBuqfZf+zwaEM/QkzEjj7NBuFD2rBuy0K3dE+8wltbezDV7JMA0WfnCPYRSYbXA=="], @@ -1663,8 +1623,6 @@ "readable-web-to-node-stream": ["readable-web-to-node-stream@3.0.4", "", { "dependencies": { "readable-stream": "^4.7.0" } }, "sha512-9nX56alTf5bwXQ3ZDipHJhusu9NTQJ/CVPtb/XHAJCXihZeitfJvIRS4GqQ/mfIoOE3IelHMrpayVrosdHBuLw=="], - "readdir-glob": ["readdir-glob@1.1.3", "", { "dependencies": { "minimatch": "^5.1.0" } }, "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA=="], - "readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="], "real-require": ["real-require@0.2.0", "", {}, "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg=="], @@ -1683,8 +1641,6 @@ "resolve": ["resolve@1.22.11", "", { "dependencies": { "is-core-module": "^2.16.1", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { "resolve": "bin/resolve" } }, "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ=="], - "resolve-pkg-maps": ["resolve-pkg-maps@1.0.0", "", {}, "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw=="], - "ret": ["ret@0.5.0", "", {}, "sha512-I1XxrZSQ+oErkRR4jYbAyEEu2I0avBvvMM5JN+6EBprOGRCs63ENqZ3vjavq8fBw2+62G5LF5XelKwuJpcvcxw=="], "reusify": ["reusify@1.1.0", "", {}, "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw=="], @@ -1719,8 +1675,6 @@ "send": ["send@1.2.1", "", { "dependencies": { "debug": "^4.4.3", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "fresh": "^2.0.0", "http-errors": "^2.0.1", "mime-types": "^3.0.2", "ms": "^2.1.3", "on-finished": "^2.4.1", "range-parser": "^1.2.1", "statuses": "^2.0.2" } }, "sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ=="], - "seq-queue": ["seq-queue@0.0.5", "", {}, "sha512-hr3Wtp/GZIc/6DAGPDcV4/9WoZhjrkXsi5B/07QgX8tsdc6ilr7BFM6PM6rbdAX1kFSDYeZGLipIZZKyQP0O5Q=="], - "seroval": ["seroval@1.3.2", "", {}, "sha512-RbcPH1n5cfwKrru7v7+zrZvjLurgHhGyso3HTyGtRivGWgYjbOmGuivCQaORNELjNONoK35nj28EoWul9sb1zQ=="], "seroval-plugins": ["seroval-plugins@1.3.3", "", { "peerDependencies": { "seroval": "^1.0" } }, "sha512-16OL3NnUBw8JG1jBLUoZJsLnQq0n5Ua6aHalhJK4fMQkz1lqR7Osz1sA30trBtd9VUDc2NgkuRCn8+/pBwqZ+w=="], @@ -1767,14 +1721,10 @@ "sprintf-js": ["sprintf-js@1.1.3", "", {}, "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA=="], - "sqlstring": ["sqlstring@2.3.3", "", {}, "sha512-qC9iz2FlN7DQl3+wjwn3802RTyjCx7sDvfQEXchwa6CWOx07/WVfh91gBmQ9fahw8snwGEWU3xGzOt4tFyHLxg=="], - "stage-js": ["stage-js@1.0.1", "", {}, "sha512-cz14aPp/wY0s3bkb/B93BPP5ZAEhgBbRmAT3CCDqert8eCAqIpQ0RB2zpK8Ksxf+Pisl5oTzvPHtL4CVzzeHcw=="], "statuses": ["statuses@2.0.2", "", {}, "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw=="], - "streamx": ["streamx@2.23.0", "", { "dependencies": { "events-universal": "^1.0.0", "fast-fifo": "^1.3.2", "text-decoder": "^1.1.0" } }, "sha512-kn+e44esVfn2Fa/O0CPFcex27fjIL6MkVae0Mm6q+E6f0hWv578YCERbv+4m02cjxvDsPKLnmxral/rR6lBMAg=="], - "string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="], "string-width-cjs": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], @@ -1799,14 +1749,10 @@ "system-architecture": ["system-architecture@0.1.0", "", {}, "sha512-ulAk51I9UVUyJgxlv9M6lFot2WP3e7t8Kz9+IS6D4rVba1tR9kON+Ey69f+1R4Q8cd45Lod6a4IcJIxnzGc/zA=="], - "tar-stream": ["tar-stream@3.1.7", "", { "dependencies": { "b4a": "^1.6.4", "fast-fifo": "^1.2.0", "streamx": "^2.15.0" } }, "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ=="], - "tarn": ["tarn@3.0.2", "", {}, "sha512-51LAVKUSZSVfI05vjPESNc5vwqqZpbXCsU+/+wxlOrUjk2SnFTt97v9ZgQrD4YmxYW1Px6w2KjaDitCfkvgxMQ=="], "tedious": ["tedious@18.6.2", "", { "dependencies": { "@azure/core-auth": "^1.7.2", "@azure/identity": "^4.2.1", "@azure/keyvault-keys": "^4.4.0", "@js-joda/core": "^5.6.1", "@types/node": ">=18", "bl": "^6.0.11", "iconv-lite": "^0.6.3", "js-md4": "^0.3.2", "native-duplexpair": "^1.0.0", "sprintf-js": "^1.1.3" } }, "sha512-g7jC56o3MzLkE3lHkaFe2ZdOVFBahq5bsB60/M4NYUbocw/MCrS89IOEQUFr+ba6pb8ZHczZ/VqCyYeYq0xBAg=="], - "text-decoder": ["text-decoder@1.2.7", "", { "dependencies": { "b4a": "^1.6.4" } }, "sha512-vlLytXkeP4xvEq2otHeJfSQIRyWxo/oZGEbXrtEEF9Hnmrdly59sUbzZ/QgyWuLYHctCHxFF4tRQZNQ9k60ExQ=="], - "thread-stream": ["thread-stream@4.0.0", "", { "dependencies": { "real-require": "^0.2.0" } }, "sha512-4iMVL6HAINXWf1ZKZjIPcz5wYaOdPhtO8ATvZ+Xqp3BTdaqtAwQkNmKORqcIo5YkQqGXq5cwfswDwMqqQNrpJA=="], "three": ["three@0.177.0", "", {}, "sha512-EiXv5/qWAaGI+Vz2A+JfavwYCMdGjxVsrn3oBwllUoqYeaBO75J63ZfyaQKoiLrqNHoTlUc6PFgMXnS0kI45zg=="], @@ -1825,9 +1771,9 @@ "token-types": ["token-types@4.2.1", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "ieee754": "^1.2.1" } }, "sha512-6udB24Q737UD/SDsKAHI9FCRP7Bqc9D/MQUV02ORQg5iskjtLJlZJNdN4kKtcdtwCeWIwIHDGaUsTsCCAa8sFQ=="], - "tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="], + "toml": ["toml@3.0.0", "", {}, "sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w=="], - "traverse": ["traverse@0.3.9", "", {}, "sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ=="], + "tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="], "tree-sitter-bash": ["tree-sitter-bash@0.25.0", "", { "dependencies": { "node-addon-api": "^8.2.1", "node-gyp-build": "^4.8.2" }, "peerDependencies": { "tree-sitter": "^0.25.0" }, "optionalPeers": ["tree-sitter"] }, "sha512-gZtlj9+qFS81qKxpLfD6H0UssQ3QBc/F0nKkPsiFDyfQF2YBqYvglFJUzchrPpVhZe9kLZTrJ9n2J6lmka69Vg=="], @@ -1837,8 +1783,6 @@ "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], - "tsx": ["tsx@4.21.0", "", { "dependencies": { "esbuild": "~0.27.0", "get-tsconfig": "^4.7.5" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "bin": { "tsx": "dist/cli.mjs" } }, "sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw=="], - "tunnel": ["tunnel@0.0.6", "", {}, "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="], "turbo": ["turbo@2.8.13", "", { "optionalDependencies": { "turbo-darwin-64": "2.8.13", "turbo-darwin-arm64": "2.8.13", "turbo-linux-64": "2.8.13", "turbo-linux-arm64": "2.8.13", "turbo-windows-64": "2.8.13", "turbo-windows-arm64": "2.8.13" }, "bin": { "turbo": "bin/turbo" } }, "sha512-nyM99hwFB9/DHaFyKEqatdayGjsMNYsQ/XBNO6MITc7roncZetKb97MpHxWf3uiU+LB9c9HUlU3Jp2Ixei2k1A=="], @@ -1883,15 +1827,11 @@ "unpipe": ["unpipe@1.0.0", "", {}, "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="], - "unzip-stream": ["unzip-stream@0.3.4", "", { "dependencies": { "binary": "^0.3.0", "mkdirp": "^0.5.1" } }, "sha512-PyofABPVv+d7fL7GOpusx7eRT9YETY2X04PhwbSipdj6bMxVCFJrr+nm0Mxqbf9hUiTin/UsnuFWBXlDZFy0Cw=="], - "update-browserslist-db": ["update-browserslist-db@1.2.3", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.1" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "update-browserslist-db": "cli.js" } }, "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w=="], "utif2": ["utif2@4.1.0", "", { "dependencies": { "pako": "^1.0.11" } }, "sha512-+oknB9FHrJ7oW7A2WZYajOcv4FcDR4CfoGB0dPNfxbi4GO05RRnFmt5oa23+9w32EanrYcSJWspUiJkLMs+37w=="], - "util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="], - - "uuid": ["uuid@8.3.2", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="], + "uuid": ["uuid@13.0.0", "", { "bin": { "uuid": "dist-node/bin/uuid" } }, "sha512-XQegIaBTVUjSHliKqcnFqYypAd4S+WCYt5NIeRs6w/UAry7z8Y9j5ZwRRL4kzq9U3sD6v+85er9FvkEaBpji2w=="], "vary": ["vary@1.1.2", "", {}, "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="], @@ -1939,28 +1879,20 @@ "yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="], + "yaml": ["yaml@2.8.2", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A=="], + "yargs": ["yargs@18.0.0", "", { "dependencies": { "cliui": "^9.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "string-width": "^7.2.0", "y18n": "^5.0.5", "yargs-parser": "^22.0.0" } }, "sha512-4UEqdc2RYGHZc7Doyqkrqiln3p9X2DZVxaGbwhn2pi7MrRagKaOcIKe8L3OxYcbhXLgLFUS3zAYuQjKBQgmuNg=="], "yargs-parser": ["yargs-parser@22.0.0", "", {}, "sha512-rwu/ClNdSMpkSrUb+d6BRsSkLUq1fmfsY6TOpYzTwvwkg1/NRG85KBy3kq++A8LKQwX6lsu+aWad+2khvuXrqw=="], "yoga-layout": ["yoga-layout@3.2.1", "", {}, "sha512-0LPOt3AxKqMdFBZA3HBAt/t/8vIKq7VaQYbuA8WxCgung+p9TVyKRYdpvCb80HcdTN2NkbIKbhNwKUfm3tQywQ=="], - "zip-stream": ["zip-stream@6.0.1", "", { "dependencies": { "archiver-utils": "^5.0.0", "compress-commons": "^6.0.2", "readable-stream": "^4.0.0" } }, "sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA=="], - "zod": ["zod@4.1.8", "", {}, "sha512-5R1P+WwQqmmMIEACyzSvo4JXHY5WiAFHRMg+zBZKgKS+Q1viRa0C1hmUKtHltoIFKtIdki3pRxkmpP74jnNYHQ=="], "zod-to-json-schema": ["zod-to-json-schema@3.24.5", "", { "peerDependencies": { "zod": "^3.24.1" } }, "sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g=="], "zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="], - "@actions/artifact/@actions/core": ["@actions/core@2.0.3", "", { "dependencies": { "@actions/exec": "^2.0.0", "@actions/http-client": "^3.0.2" } }, "sha512-Od9Thc3T1mQJYddvVPM4QGiLUewdh+3txmDYHHxoNdkqysR1MbCT+rFOtNUxYAz+7+6RIsqipVahY2GJqGPyxA=="], - - "@actions/core/@actions/http-client": ["@actions/http-client@2.2.3", "", { "dependencies": { "tunnel": "^0.0.6", "undici": "^5.25.4" } }, "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA=="], - - "@actions/github/@actions/http-client": ["@actions/http-client@2.2.3", "", { "dependencies": { "tunnel": "^0.0.6", "undici": "^5.25.4" } }, "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA=="], - - "@actions/http-client/undici": ["undici@6.23.0", "", {}, "sha512-VfQPToRA5FZs/qJxLIinmU59u0r7LXqoJkCzinq3ckNJp3vKEh7jTWN589YQ5+aoAC/TGRLyJLCPKcLQbM8r9g=="], - "@ai-sdk/azure/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@3.0.20", "", { "dependencies": { "@ai-sdk/provider": "2.0.1", "@standard-schema/spec": "^1.0.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-iXHVe0apM2zUEzauqJwqmpC37A5rihrStAih5Ks+JE32iTe4LZ58y17UGBjpQQTCRw9YxMeo2UFLxLpBluyvLQ=="], "@ai-sdk/cerebras/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@3.0.20", "", { "dependencies": { "@ai-sdk/provider": "2.0.1", "@standard-schema/spec": "^1.0.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-iXHVe0apM2zUEzauqJwqmpC37A5rihrStAih5Ks+JE32iTe4LZ58y17UGBjpQQTCRw9YxMeo2UFLxLpBluyvLQ=="], @@ -2005,18 +1937,14 @@ "@aws-sdk/credential-provider-cognito-identity/@aws-sdk/client-cognito-identity": ["@aws-sdk/client-cognito-identity@3.980.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "^3.973.5", "@aws-sdk/credential-provider-node": "^3.972.4", "@aws-sdk/middleware-host-header": "^3.972.3", "@aws-sdk/middleware-logger": "^3.972.3", "@aws-sdk/middleware-recursion-detection": "^3.972.3", "@aws-sdk/middleware-user-agent": "^3.972.5", "@aws-sdk/region-config-resolver": "^3.972.3", "@aws-sdk/types": "^3.973.1", "@aws-sdk/util-endpoints": "3.980.0", "@aws-sdk/util-user-agent-browser": "^3.972.3", "@aws-sdk/util-user-agent-node": "^3.972.3", "@smithy/config-resolver": "^4.4.6", "@smithy/core": "^3.22.0", "@smithy/fetch-http-handler": "^5.3.9", "@smithy/hash-node": "^4.2.8", "@smithy/invalid-dependency": "^4.2.8", "@smithy/middleware-content-length": "^4.2.8", "@smithy/middleware-endpoint": "^4.4.12", "@smithy/middleware-retry": "^4.4.29", "@smithy/middleware-serde": "^4.2.9", "@smithy/middleware-stack": "^4.2.8", "@smithy/node-config-provider": "^4.3.8", "@smithy/node-http-handler": "^4.4.8", "@smithy/protocol-http": "^5.3.8", "@smithy/smithy-client": "^4.11.1", "@smithy/types": "^4.12.0", "@smithy/url-parser": "^4.2.8", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.28", "@smithy/util-defaults-mode-node": "^4.2.31", "@smithy/util-endpoints": "^3.2.8", "@smithy/util-middleware": "^4.2.8", "@smithy/util-retry": "^4.2.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-nLgMW2drTzv+dTo3ORCcotQPcrUaTQ+xoaDTdSaUXdZO7zbbVyk7ysE5GDTnJdZWcUjHOSB8xfNQhOTTNVPhFw=="], - "@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.3.6", "", { "dependencies": { "strnum": "^2.1.2" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA=="], + "@azure/msal-node/uuid": ["uuid@8.3.2", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="], "@babel/core/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], - "@babel/helper-compilation-targets/lru-cache": ["lru-cache@5.1.1", "", { "dependencies": { "yallist": "^3.0.2" } }, "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w=="], - "@babel/helper-compilation-targets/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], "@babel/helper-create-class-features-plugin/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], - "@bufbuild/protoplugin/typescript": ["typescript@5.4.5", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ=="], - "@gitlab/gitlab-ai-provider/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], "@hey-api/json-schema-ref-parser/js-yaml": ["js-yaml@4.1.1", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA=="], @@ -2027,10 +1955,6 @@ "@hono/zod-validator/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], - "@isaacs/cliui/string-width": ["string-width@5.1.2", "", { "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" } }, "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="], - - "@isaacs/cliui/wrap-ansi": ["wrap-ansi@8.1.0", "", { "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } }, "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ=="], - "@jimp/plugin-blit/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], "@jimp/plugin-circle/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], @@ -2079,9 +2003,9 @@ "@octokit/plugin-paginate-rest/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="], - "@octokit/plugin-rest-endpoint-methods/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="], + "@octokit/plugin-request-log/@octokit/core": ["@octokit/core@7.0.6", "", { "dependencies": { "@octokit/auth-token": "^6.0.0", "@octokit/graphql": "^9.0.3", "@octokit/request": "^10.0.6", "@octokit/request-error": "^7.0.2", "@octokit/types": "^16.0.0", "before-after-hook": "^4.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-DhGl4xMVFGVIyMwswXeyzdL4uXD5OGILGX5N8Y+f6W7LhC1Ze2poSNrkF/fedpVDHEEZ+PHFW0vL14I+mm8K3Q=="], - "@octokit/plugin-retry/@octokit/types": ["@octokit/types@6.41.0", "", { "dependencies": { "@octokit/openapi-types": "^12.11.0" } }, "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg=="], + "@octokit/plugin-rest-endpoint-methods/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="], "@octokit/request/@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="], @@ -2093,8 +2017,6 @@ "@octokit/rest/@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@13.2.1", "", { "dependencies": { "@octokit/types": "^15.0.1" }, "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-Tj4PkZyIL6eBMYcG/76QGsedF0+dWVeLhYprTmuFVVxzDW7PQh23tM0TP0z+1MvSkxB29YFZwnUX+cXfTiSdyw=="], - "@octokit/rest/@octokit/plugin-request-log": ["@octokit/plugin-request-log@6.0.0", "", { "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-UkOzeEN3W91/eBq9sPZNQ7sUBvYCqYbrrD8gTbBuGtHEuycE4/awMXcYvx6sVYo7LypPhmQwwpUe4Yyu4QZN5Q=="], - "@octokit/rest/@octokit/plugin-rest-endpoint-methods": ["@octokit/plugin-rest-endpoint-methods@16.1.1", "", { "dependencies": { "@octokit/types": "^15.0.1" }, "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-VztDkhM0ketQYSh5Im3IcKWFZl7VIrrsCaHbDINkdYeiiAsJzjhS2xRFCSJgfN6VOcsoW4laMtsmf3HcNqIimg=="], "@openauthjs/openauth/@standard-schema/spec": ["@standard-schema/spec@1.0.0-beta.3", "", {}, "sha512-0ifF3BjA1E8SY9C+nUew8RefNOIq0cDlYALPty4rhUm8Rrl6tCM8hBT4bhGhx7I7iXD0uAgt50lgo8dD73ACMw=="], @@ -2107,8 +2029,6 @@ "@pierre/diffs/diff": ["diff@8.0.3", "", {}, "sha512-qejHi7bcSD4hQAZE0tNAawRK1ZtafHDmMTMkrrIGgSLl7hTnQHmKCeB45xAcbfTqK2zowkM3j3bHt/4b/ARbYQ=="], - "@protobuf-ts/plugin/typescript": ["typescript@3.9.10", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q=="], - "ai/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@3.0.20", "", { "dependencies": { "@ai-sdk/provider": "2.0.1", "@standard-schema/spec": "^1.0.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-iXHVe0apM2zUEzauqJwqmpC37A5rihrStAih5Ks+JE32iTe4LZ58y17UGBjpQQTCRw9YxMeo2UFLxLpBluyvLQ=="], "ai-gateway-provider/@ai-sdk/amazon-bedrock": ["@ai-sdk/amazon-bedrock@3.0.79", "", { "dependencies": { "@ai-sdk/anthropic": "2.0.62", "@ai-sdk/provider": "2.0.1", "@ai-sdk/provider-utils": "3.0.21", "@smithy/eventstream-codec": "^4.0.1", "@smithy/util-utf8": "^4.0.0", "aws4fetch": "^1.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-GfAQUb1GEmdTjLu5Ud1d5sieNHDpwoQdb4S14KmJlA5RsGREUZ1tfSKngFaiClxFtL0xPSZjePhTMV6Z65A7/g=="], @@ -2119,50 +2039,46 @@ "ai-gateway-provider/@ai-sdk/google-vertex": ["@ai-sdk/google-vertex@3.0.90", "", { "dependencies": { "@ai-sdk/anthropic": "2.0.56", "@ai-sdk/google": "2.0.46", "@ai-sdk/provider": "2.0.0", "@ai-sdk/provider-utils": "3.0.19", "google-auth-library": "^10.5.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-C9MLe1KZGg1ZbupV2osygHtL5qngyCDA6ATatunyfTbIe8TXKG8HGni/3O6ifbnI5qxTidIn150Ox7eIFZVMYg=="], - "archiver-utils/glob": ["glob@10.5.0", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg=="], - - "archiver-utils/is-stream": ["is-stream@2.0.1", "", {}, "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="], - "argparse/sprintf-js": ["sprintf-js@1.0.3", "", {}, "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g=="], "babel-plugin-jsx-dom-expressions/@babel/helper-module-imports": ["@babel/helper-module-imports@7.18.6", "", { "dependencies": { "@babel/types": "^7.18.6" } }, "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA=="], "babel-plugin-module-resolver/glob": ["glob@9.3.5", "", { "dependencies": { "fs.realpath": "^1.0.0", "minimatch": "^8.0.2", "minipass": "^4.2.4", "path-scurry": "^1.6.1" } }, "sha512-e1LleDykUz2Iu+MTYdkSsuWX8lvAjAcs0Xef0lNIu0S2wOAzuTxCJtcd9S3cijlwYF18EsU3rzb8jPVobxDh9Q=="], - "balanced-match/jackspeak": ["jackspeak@4.2.3", "", { "dependencies": { "@isaacs/cliui": "^9.0.0" } }, "sha512-ykkVRwrYvFm1nb2AJfKKYPr0emF6IiXDYUaFx4Zn9ZuIH7MrzEZ3sD5RlqGXNRpHtvUHJyOnCEFxOlNDtGo7wg=="], - "c12/chokidar": ["chokidar@5.0.0", "", { "dependencies": { "readdirp": "^5.0.0" } }, "sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw=="], - "compress-commons/is-stream": ["is-stream@2.0.1", "", {}, "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="], - "cross-fetch/node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="], "cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], + "effect/@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], + "encoding/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], "engine.io-client/ws": ["ws@8.18.3", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg=="], "glob/minimatch": ["minimatch@10.2.1", "", { "dependencies": { "brace-expansion": "^5.0.2" } }, "sha512-MClCe8IL5nRRmawL6ib/eT4oLyeKMGCghibcDWK+J0hh0Q8kqSdia6BvbRMVk6mPa6WqUa5uR2oxt6C5jd533A=="], - "lazystream/readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="], - "light-my-request/cookie": ["cookie@1.1.1", "", {}, "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ=="], "light-my-request/process-warning": ["process-warning@4.0.1", "", {}, "sha512-3c2LzQ3rY9d0hc1emcsHhfT9Jwz0cChib/QN89oME2R451w5fy3f0afAhERFZAwrbDU43wk12d0ORBpDVME50Q=="], "mssql/commander": ["commander@11.1.0", "", {}, "sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ=="], + "node-gyp-build-optional-packages/detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="], + "npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="], "nypm/citty": ["citty@0.2.1", "", {}, "sha512-kEV95lFBhQgtogAPlQfJJ0WGVSokvLr/UEoFPiKKOXF7pl98HfUVUD0ejsuTCld/9xH9vogSywZ5KqHzXrZpqg=="], + "parse5/entities": ["entities@6.0.1", "", {}, "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g=="], + + "path-scurry/lru-cache": ["lru-cache@11.2.6", "", {}, "sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ=="], + "pixelmatch/pngjs": ["pngjs@6.0.0", "", {}, "sha512-TRzzuFRRmEoSW/p1KVAmiOgPco2Irlah+bGFCeNfJXxxYGwSw7YwAOAcd7X28K/m5bjBWKsC29KyoMfHbypayg=="], "proxy-addr/ipaddr.js": ["ipaddr.js@1.9.1", "", {}, "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="], - "readdir-glob/minimatch": ["minimatch@5.1.6", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g=="], - "rimraf/glob": ["glob@10.5.0", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg=="], "string-width-cjs/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], @@ -2175,8 +2091,6 @@ "tree-sitter-bash/node-addon-api": ["node-addon-api@8.5.0", "", {}, "sha512-/bRZty2mXUIFY/xU5HLvveNHlswNJej+RnxBjOMkidWfwZzgTbPG1E3K5TOxRLOR+5hX7bSofy8yf1hZevMS8A=="], - "tsx/esbuild": ["esbuild@0.27.3", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.27.3", "@esbuild/android-arm": "0.27.3", "@esbuild/android-arm64": "0.27.3", "@esbuild/android-x64": "0.27.3", "@esbuild/darwin-arm64": "0.27.3", "@esbuild/darwin-x64": "0.27.3", "@esbuild/freebsd-arm64": "0.27.3", "@esbuild/freebsd-x64": "0.27.3", "@esbuild/linux-arm": "0.27.3", "@esbuild/linux-arm64": "0.27.3", "@esbuild/linux-ia32": "0.27.3", "@esbuild/linux-loong64": "0.27.3", "@esbuild/linux-mips64el": "0.27.3", "@esbuild/linux-ppc64": "0.27.3", "@esbuild/linux-riscv64": "0.27.3", "@esbuild/linux-s390x": "0.27.3", "@esbuild/linux-x64": "0.27.3", "@esbuild/netbsd-arm64": "0.27.3", "@esbuild/netbsd-x64": "0.27.3", "@esbuild/openbsd-arm64": "0.27.3", "@esbuild/openbsd-x64": "0.27.3", "@esbuild/openharmony-arm64": "0.27.3", "@esbuild/sunos-x64": "0.27.3", "@esbuild/win32-arm64": "0.27.3", "@esbuild/win32-ia32": "0.27.3", "@esbuild/win32-x64": "0.27.3" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg=="], - "wrap-ansi-cjs/ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="], "wrap-ansi-cjs/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], @@ -2185,8 +2099,6 @@ "zod-to-json-schema/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="], - "@actions/artifact/@actions/core/@actions/exec": ["@actions/exec@2.0.0", "", { "dependencies": { "@actions/io": "^2.0.0" } }, "sha512-k8ngrX2voJ/RIN6r9xB82NVqKpnMRtxDoiO+g3olkIUpQNqjArXrCQceduQZCQj3P3xm32pChRLqRrtXTlqhIw=="], - "@aws-crypto/sha256-browser/@smithy/util-utf8/@smithy/util-buffer-from": ["@smithy/util-buffer-from@2.2.0", "", { "dependencies": { "@smithy/is-array-buffer": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA=="], "@aws-crypto/util/@smithy/util-utf8/@smithy/util-buffer-from": ["@smithy/util-buffer-from@2.2.0", "", { "dependencies": { "@smithy/is-array-buffer": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA=="], @@ -2195,8 +2107,6 @@ "@hey-api/json-schema-ref-parser/js-yaml/argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="], - "@isaacs/cliui/string-width/emoji-regex": ["emoji-regex@9.2.2", "", {}, "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg=="], - "@octokit/core/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="], "@octokit/endpoint/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="], @@ -2209,9 +2119,19 @@ "@octokit/plugin-paginate-rest/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="], - "@octokit/plugin-rest-endpoint-methods/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="], + "@octokit/plugin-request-log/@octokit/core/@octokit/auth-token": ["@octokit/auth-token@6.0.0", "", {}, "sha512-P4YJBPdPSpWTQ1NU4XYdvHvXJJDxM6YwpS0FZHRgP7YFkdVxsWcpWGy/NVqlAA7PcPCnMacXlRm1y2PFZRWL/w=="], + + "@octokit/plugin-request-log/@octokit/core/@octokit/graphql": ["@octokit/graphql@9.0.3", "", { "dependencies": { "@octokit/request": "^10.0.6", "@octokit/types": "^16.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-grAEuupr/C1rALFnXTv6ZQhFuL1D8G5y8CN04RgrO4FIPMrtm+mcZzFG7dcBm+nq+1ppNixu+Jd78aeJOYxlGA=="], + + "@octokit/plugin-request-log/@octokit/core/@octokit/request": ["@octokit/request@10.0.7", "", { "dependencies": { "@octokit/endpoint": "^11.0.2", "@octokit/request-error": "^7.0.2", "@octokit/types": "^16.0.0", "fast-content-type-parse": "^3.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-v93h0i1yu4idj8qFPZwjehoJx4j3Ntn+JhXsdJrG9pYaX6j/XRz2RmasMUHtNgQD39nrv/VwTWSqK0RNXR8upA=="], + + "@octokit/plugin-request-log/@octokit/core/@octokit/request-error": ["@octokit/request-error@7.1.0", "", { "dependencies": { "@octokit/types": "^16.0.0" } }, "sha512-KMQIfq5sOPpkQYajXHwnhjCC0slzCNScLHs9JafXc4RAJI+9f+jNDlBNaIMTvazOPLgb4BnlhGJOTbnN0wIjPw=="], + + "@octokit/plugin-request-log/@octokit/core/@octokit/types": ["@octokit/types@16.0.0", "", { "dependencies": { "@octokit/openapi-types": "^27.0.0" } }, "sha512-sKq+9r1Mm4efXW1FCk7hFSeJo4QKreL/tTbR0rz/qx/r1Oa2VV83LTA/H/MuCOX7uCIJmQVRKBcbmWoySjAnSg=="], - "@octokit/plugin-retry/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@12.11.0", "", {}, "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ=="], + "@octokit/plugin-request-log/@octokit/core/before-after-hook": ["before-after-hook@4.0.0", "", {}, "sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ=="], + + "@octokit/plugin-rest-endpoint-methods/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="], "@octokit/request-error/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="], @@ -2243,27 +2163,17 @@ "ai-gateway-provider/@ai-sdk/google-vertex/@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@3.0.19", "", { "dependencies": { "@ai-sdk/provider": "2.0.0", "@standard-schema/spec": "^1.0.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-W41Wc9/jbUVXVwCN/7bWa4IKe8MtxO3EyA0Hfhx6grnmiYlCvpI8neSYWFE0zScXJkgA/YK3BRybzgyiXuu6JA=="], - "archiver-utils/glob/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], - - "archiver-utils/glob/path-scurry": ["path-scurry@1.11.1", "", { "dependencies": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" } }, "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA=="], - "babel-plugin-module-resolver/glob/minimatch": ["minimatch@8.0.4", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-W0Wvr9HyFXZRGIDgCicunpQ299OKXs9RgZfaukz4qAW/pJhcpUfupc9c+OObPOFueNy8VSrZgEmDtk6Kh4WzDA=="], "babel-plugin-module-resolver/glob/minipass": ["minipass@4.2.8", "", {}, "sha512-fNzuVyifolSLFL4NzpF+wEF4qrgqaaKX0haXPQEdQ7NKAN+WecoKMHV09YcuL/DHxrUsYQOK3MiuDf7Ip2OXfQ=="], "babel-plugin-module-resolver/glob/path-scurry": ["path-scurry@1.11.1", "", { "dependencies": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" } }, "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA=="], - "balanced-match/jackspeak/@isaacs/cliui": ["@isaacs/cliui@9.0.0", "", {}, "sha512-AokJm4tuBHillT+FpMtxQ60n8ObyXBatq7jD2/JA9dxbDDokKQm8KMht5ibGzLVU9IJDIKK4TPKgMHEYMn3lMg=="], - "c12/chokidar/readdirp": ["readdirp@5.0.0", "", {}, "sha512-9u/XQ1pvrQtYyMpZe7DXKv2p5CNvyVwzUB6uhLAnQwHMSgKMBR62lc7AHljaeteeHXn11XTAaLLUVZYVZyuRBQ=="], "cross-spawn/which/isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], - "lazystream/readable-stream/safe-buffer": ["safe-buffer@5.1.2", "", {}, "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="], - - "lazystream/readable-stream/string_decoder": ["string_decoder@1.1.1", "", { "dependencies": { "safe-buffer": "~5.1.0" } }, "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="], - - "readdir-glob/minimatch/brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="], + "rimraf/glob/jackspeak": ["jackspeak@3.4.3", "", { "dependencies": { "@isaacs/cliui": "^8.0.2" }, "optionalDependencies": { "@pkgjs/parseargs": "^0.11.0" } }, "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw=="], "rimraf/glob/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], @@ -2271,77 +2181,23 @@ "string-width-cjs/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], - "tsx/esbuild/@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.27.3", "", { "os": "aix", "cpu": "ppc64" }, "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg=="], - - "tsx/esbuild/@esbuild/android-arm": ["@esbuild/android-arm@0.27.3", "", { "os": "android", "cpu": "arm" }, "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA=="], - - "tsx/esbuild/@esbuild/android-arm64": ["@esbuild/android-arm64@0.27.3", "", { "os": "android", "cpu": "arm64" }, "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg=="], - - "tsx/esbuild/@esbuild/android-x64": ["@esbuild/android-x64@0.27.3", "", { "os": "android", "cpu": "x64" }, "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ=="], - - "tsx/esbuild/@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.27.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg=="], - - "tsx/esbuild/@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.27.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg=="], - - "tsx/esbuild/@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.27.3", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w=="], - - "tsx/esbuild/@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.27.3", "", { "os": "freebsd", "cpu": "x64" }, "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA=="], - - "tsx/esbuild/@esbuild/linux-arm": ["@esbuild/linux-arm@0.27.3", "", { "os": "linux", "cpu": "arm" }, "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw=="], - - "tsx/esbuild/@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.27.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg=="], - - "tsx/esbuild/@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.27.3", "", { "os": "linux", "cpu": "ia32" }, "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg=="], - - "tsx/esbuild/@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.27.3", "", { "os": "linux", "cpu": "none" }, "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA=="], - - "tsx/esbuild/@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.27.3", "", { "os": "linux", "cpu": "none" }, "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw=="], - - "tsx/esbuild/@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.27.3", "", { "os": "linux", "cpu": "ppc64" }, "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA=="], - - "tsx/esbuild/@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.27.3", "", { "os": "linux", "cpu": "none" }, "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ=="], - - "tsx/esbuild/@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.27.3", "", { "os": "linux", "cpu": "s390x" }, "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw=="], - - "tsx/esbuild/@esbuild/linux-x64": ["@esbuild/linux-x64@0.27.3", "", { "os": "linux", "cpu": "x64" }, "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA=="], - - "tsx/esbuild/@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.27.3", "", { "os": "none", "cpu": "arm64" }, "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA=="], - - "tsx/esbuild/@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.27.3", "", { "os": "none", "cpu": "x64" }, "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA=="], - - "tsx/esbuild/@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.27.3", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw=="], - - "tsx/esbuild/@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.27.3", "", { "os": "openbsd", "cpu": "x64" }, "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ=="], - - "tsx/esbuild/@esbuild/openharmony-arm64": ["@esbuild/openharmony-arm64@0.27.3", "", { "os": "none", "cpu": "arm64" }, "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g=="], - - "tsx/esbuild/@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.27.3", "", { "os": "sunos", "cpu": "x64" }, "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA=="], - - "tsx/esbuild/@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.27.3", "", { "os": "win32", "cpu": "arm64" }, "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA=="], - - "tsx/esbuild/@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.27.3", "", { "os": "win32", "cpu": "ia32" }, "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q=="], - - "tsx/esbuild/@esbuild/win32-x64": ["@esbuild/win32-x64@0.27.3", "", { "os": "win32", "cpu": "x64" }, "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA=="], - "wrap-ansi-cjs/string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], "wrap-ansi-cjs/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], - "@actions/artifact/@actions/core/@actions/exec/@actions/io": ["@actions/io@2.0.0", "", {}, "sha512-Jv33IN09XLO+0HS79aaODsvIRyduiF7NY/F6LYeK5oeUmrsz7aFdRphQjFoESF4jS7lMauDOttKALcpapVDIAg=="], - "@aws-crypto/sha256-browser/@smithy/util-utf8/@smithy/util-buffer-from/@smithy/is-array-buffer": ["@smithy/is-array-buffer@2.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA=="], "@aws-crypto/util/@smithy/util-utf8/@smithy/util-buffer-from/@smithy/is-array-buffer": ["@smithy/is-array-buffer@2.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA=="], "@octokit/graphql/@octokit/request/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@27.0.0", "", {}, "sha512-whrdktVs1h6gtR+09+QsNk2+FO+49j6ga1c55YZudfEG+oKJVvJLQi3zkOm5JjiUXAagWK2tI2kTGKJ2Ys7MGA=="], - "@octokit/rest/@octokit/core/@octokit/request/@octokit/endpoint": ["@octokit/endpoint@11.0.2", "", { "dependencies": { "@octokit/types": "^16.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-4zCpzP1fWc7QlqunZ5bSEjxc6yLAlRTnDwKtgXfcI/FxxGoqedDG8V2+xJ60bV2kODqcGB+nATdtap/XYq2NZQ=="], + "@octokit/plugin-request-log/@octokit/core/@octokit/request/@octokit/endpoint": ["@octokit/endpoint@11.0.2", "", { "dependencies": { "@octokit/types": "^16.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-4zCpzP1fWc7QlqunZ5bSEjxc6yLAlRTnDwKtgXfcI/FxxGoqedDG8V2+xJ60bV2kODqcGB+nATdtap/XYq2NZQ=="], - "@octokit/rest/@octokit/core/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@27.0.0", "", {}, "sha512-whrdktVs1h6gtR+09+QsNk2+FO+49j6ga1c55YZudfEG+oKJVvJLQi3zkOm5JjiUXAagWK2tI2kTGKJ2Ys7MGA=="], + "@octokit/plugin-request-log/@octokit/core/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@27.0.0", "", {}, "sha512-whrdktVs1h6gtR+09+QsNk2+FO+49j6ga1c55YZudfEG+oKJVvJLQi3zkOm5JjiUXAagWK2tI2kTGKJ2Ys7MGA=="], - "archiver-utils/glob/minimatch/brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="], + "@octokit/rest/@octokit/core/@octokit/request/@octokit/endpoint": ["@octokit/endpoint@11.0.2", "", { "dependencies": { "@octokit/types": "^16.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-4zCpzP1fWc7QlqunZ5bSEjxc6yLAlRTnDwKtgXfcI/FxxGoqedDG8V2+xJ60bV2kODqcGB+nATdtap/XYq2NZQ=="], - "archiver-utils/glob/path-scurry/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], + "@octokit/rest/@octokit/core/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@27.0.0", "", {}, "sha512-whrdktVs1h6gtR+09+QsNk2+FO+49j6ga1c55YZudfEG+oKJVvJLQi3zkOm5JjiUXAagWK2tI2kTGKJ2Ys7MGA=="], "babel-plugin-module-resolver/glob/minimatch/brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="], @@ -2349,16 +2205,20 @@ "babel-plugin-module-resolver/glob/path-scurry/minipass": ["minipass@7.1.2", "", {}, "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="], - "readdir-glob/minimatch/brace-expansion/balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], + "rimraf/glob/jackspeak/@isaacs/cliui": ["@isaacs/cliui@8.0.2", "", { "dependencies": { "string-width": "^5.1.2", "string-width-cjs": "npm:string-width@^4.2.0", "strip-ansi": "^7.0.1", "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", "wrap-ansi": "^8.1.0", "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" } }, "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA=="], "rimraf/glob/minimatch/brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="], "rimraf/glob/path-scurry/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], - "archiver-utils/glob/minimatch/brace-expansion/balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], - "babel-plugin-module-resolver/glob/minimatch/brace-expansion/balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], + "rimraf/glob/jackspeak/@isaacs/cliui/string-width": ["string-width@5.1.2", "", { "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" } }, "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="], + + "rimraf/glob/jackspeak/@isaacs/cliui/wrap-ansi": ["wrap-ansi@8.1.0", "", { "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } }, "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ=="], + "rimraf/glob/minimatch/brace-expansion/balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], + + "rimraf/glob/jackspeak/@isaacs/cliui/string-width/emoji-regex": ["emoji-regex@9.2.2", "", {}, "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg=="], } } diff --git a/docs/docs/configure/providers.md b/docs/docs/configure/providers.md index 26b5291739..0b73800508 100644 --- a/docs/docs/configure/providers.md +++ b/docs/docs/configure/providers.md @@ -98,6 +98,38 @@ Uses the standard AWS credential chain. Set `AWS_PROFILE` or provide credentials } ``` +## Google Vertex AI + +```json +{ + "provider": { + "google-vertex": { + "project": "my-gcp-project", + "location": "us-central1" + } + }, + "model": "google-vertex/gemini-2.5-pro" +} +``` + +Uses Google Cloud Application Default Credentials. Authenticate with: + +```bash +gcloud auth application-default login +``` + +The `project` and `location` fields can also be set via environment variables: + +| Field | Environment Variables (checked in order) | +|-------|----------------------------------------| +| `project` | `GOOGLE_CLOUD_PROJECT`, `GCP_PROJECT`, `GCLOUD_PROJECT` | +| `location` | `GOOGLE_VERTEX_LOCATION`, `GOOGLE_CLOUD_LOCATION`, `VERTEX_LOCATION` | + +If `location` is not set, it defaults to `us-central1`. + +!!! tip + You can also access Anthropic models through Vertex AI using the `google-vertex` provider (e.g., `google-vertex/claude-sonnet-4-6`). + ## Ollama (Local) ```json @@ -188,3 +220,5 @@ The `small_model` is used for lightweight tasks like summarization and context c | `region` | `string` | AWS region (Bedrock only) | | `accessKeyId` | `string` | AWS access key (Bedrock only) | | `secretAccessKey` | `string` | AWS secret key (Bedrock only) | +| `project` | `string` | GCP project ID (Google Vertex AI only) | +| `location` | `string` | GCP region (Google Vertex AI only, default: `us-central1`) | diff --git a/package.json b/package.json index e871c07d53..da07125ecf 100644 --- a/package.json +++ b/package.json @@ -7,6 +7,9 @@ "packageManager": "bun@1.3.10", "scripts": { "dev": "bun run --cwd packages/opencode --conditions=browser src/index.ts", + "dev:desktop": "bun --cwd packages/desktop tauri dev", + "dev:web": "bun --cwd packages/app dev", + "dev:storybook": "bun --cwd packages/storybook storybook", "typecheck": "bun turbo typecheck", "prepare": "husky", "random": "echo 'Random script'", @@ -26,16 +29,22 @@ "@octokit/rest": "22.0.0", "@hono/zod-validator": "0.4.2", "ulid": "3.0.1", + "@kobalte/core": "0.13.11", "@types/luxon": "3.7.1", "@types/node": "22.13.9", "@types/semver": "7.7.1", "@tsconfig/node22": "22.0.2", "@tsconfig/bun": "1.0.9", + "@cloudflare/workers-types": "4.20251008.0", "@openauthjs/openauth": "0.0.0-20250322224806", "@pierre/diffs": "1.1.0-beta.18", + "@solid-primitives/storage": "4.3.3", + "@tailwindcss/vite": "4.1.11", "diff": "8.0.2", - "drizzle-kit": "1.0.0-beta.12-a5629fb", - "drizzle-orm": "1.0.0-beta.12-a5629fb", + "dompurify": "3.3.1", + "drizzle-kit": "1.0.0-beta.16-ea816b6", + "drizzle-orm": "1.0.0-beta.16-ea816b6", + "effect": "4.0.0-beta.31", "ai": "5.0.124", "hono": "4.10.7", "hono-openapi": "1.1.2", @@ -43,20 +52,26 @@ "luxon": "3.6.1", "marked": "17.0.1", "marked-shiki": "1.2.1", + "@playwright/test": "1.51.0", "typescript": "5.8.2", "@typescript/native-preview": "7.0.0-dev.20251207.1", "zod": "4.1.8", "remeda": "2.26.0", "shiki": "3.20.0", - "solid-js": "1.9.10" + "solid-list": "0.3.0", + "tailwindcss": "4.1.11", + "virtua": "0.42.3", + "vite": "7.1.4", + "@solidjs/meta": "0.29.4", + "@solidjs/router": "0.15.4", + "@solidjs/start": "https://pkg.pr.new/@solidjs/start@dfb2020", + "solid-js": "1.9.10", + "vite-plugin-solid": "2.11.10" } }, "devDependencies": { - "@actions/artifact": "5.0.1", "@tsconfig/bun": "catalog:", - "@types/mime-types": "3.0.1", "@typescript/native-preview": "catalog:", - "glob": "13.0.5", "husky": "9.1.7", "prettier": "3.6.2", "semver": "^7.6.0", diff --git a/packages/opencode/AGENTS.md b/packages/opencode/AGENTS.md index dcfc336d65..930297baa9 100644 --- a/packages/opencode/AGENTS.md +++ b/packages/opencode/AGENTS.md @@ -8,3 +8,37 @@ - **Command**: `bun run db generate --name `. - **Output**: creates `migration/_/migration.sql` and `snapshot.json`. - **Tests**: migration tests should read the per-folder layout (no `_journal.json`). + +# opencode Effect guide + +Instructions to follow when writing Effect. + +## Schemas + +- Use `Schema.Class` for data types with multiple fields. +- Use branded schemas (`Schema.brand`) for single-value types. + +## Services + +- Services use `ServiceMap.Service()("@console/")`. +- In `Layer.effect`, always return service implementations with `ServiceName.of({ ... })`, never a plain object. + +## Errors + +- Use `Schema.TaggedErrorClass` for typed errors. +- For defect-like causes, use `Schema.Defect` instead of `unknown`. +- In `Effect.gen`, prefer `yield* new MyError(...)` over `yield* Effect.fail(new MyError(...))` for direct early-failure branches. + +## Effects + +- Use `Effect.gen(function* () { ... })` for composition. +- Use `Effect.fn("ServiceName.method")` for named/traced effects and `Effect.fnUntraced` for internal helpers. +- `Effect.fn` / `Effect.fnUntraced` accept pipeable operators as extra arguments, so avoid unnecessary `flow` or outer `.pipe()` wrappers. + +## Time + +- Prefer `DateTime.nowAsDate` over `new Date(yield* Clock.currentTimeMillis)` when you need a `Date`. + +## Errors + +- In `Effect.gen/fn`, prefer `yield* new MyError(...)` over `yield* Effect.fail(new MyError(...))` for direct early-failure branches. diff --git a/packages/opencode/migration/20260228203230_blue_harpoon/migration.sql b/packages/opencode/migration/20260228203230_blue_harpoon/migration.sql new file mode 100644 index 0000000000..85be58c88d --- /dev/null +++ b/packages/opencode/migration/20260228203230_blue_harpoon/migration.sql @@ -0,0 +1,17 @@ +CREATE TABLE `account` ( + `id` text PRIMARY KEY, + `email` text NOT NULL, + `url` text NOT NULL, + `access_token` text NOT NULL, + `refresh_token` text NOT NULL, + `token_expiry` integer, + `selected_org_id` text, + `time_created` integer NOT NULL, + `time_updated` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `account_state` ( + `id` integer PRIMARY KEY NOT NULL, + `active_account_id` text, + FOREIGN KEY (`active_account_id`) REFERENCES `account`(`id`) ON UPDATE no action ON DELETE set null +); diff --git a/packages/opencode/migration/20260228203230_blue_harpoon/snapshot.json b/packages/opencode/migration/20260228203230_blue_harpoon/snapshot.json new file mode 100644 index 0000000000..80d9451bae --- /dev/null +++ b/packages/opencode/migration/20260228203230_blue_harpoon/snapshot.json @@ -0,0 +1,1102 @@ +{ + "version": "7", + "dialect": "sqlite", + "id": "325559b7-104f-4d2a-a02c-934cfad7cfcc", + "prevIds": ["1f1dbf2d-bf66-4b25-8af4-4ba7633b7e40"], + "ddl": [ + { + "name": "account", + "entityType": "tables" + }, + { + "name": "account_state", + "entityType": "tables" + }, + { + "name": "control_account", + "entityType": "tables" + }, + { + "name": "workspace", + "entityType": "tables" + }, + { + "name": "project", + "entityType": "tables" + }, + { + "name": "message", + "entityType": "tables" + }, + { + "name": "part", + "entityType": "tables" + }, + { + "name": "permission", + "entityType": "tables" + }, + { + "name": "session", + "entityType": "tables" + }, + { + "name": "todo", + "entityType": "tables" + }, + { + "name": "session_share", + "entityType": "tables" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "email", + "entityType": "columns", + "table": "account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "url", + "entityType": "columns", + "table": "account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "access_token", + "entityType": "columns", + "table": "account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "refresh_token", + "entityType": "columns", + "table": "account" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "token_expiry", + "entityType": "columns", + "table": "account" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "selected_org_id", + "entityType": "columns", + "table": "account" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "account" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "account" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "account_state" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "active_account_id", + "entityType": "columns", + "table": "account_state" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "email", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "url", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "access_token", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "refresh_token", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "token_expiry", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "active", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "branch", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "project_id", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "config", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "worktree", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "vcs", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "name", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "icon_url", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "icon_color", + "entityType": "columns", + "table": "project" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "project" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "project" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_initialized", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "sandboxes", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "commands", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "message" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "session_id", + "entityType": "columns", + "table": "message" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "message" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "message" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "data", + "entityType": "columns", + "table": "message" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "part" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "message_id", + "entityType": "columns", + "table": "part" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "session_id", + "entityType": "columns", + "table": "part" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "part" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "part" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "data", + "entityType": "columns", + "table": "part" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "project_id", + "entityType": "columns", + "table": "permission" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "permission" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "permission" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "data", + "entityType": "columns", + "table": "permission" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "project_id", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "parent_id", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "slug", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "directory", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "title", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "version", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "share_url", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "summary_additions", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "summary_deletions", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "summary_files", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "summary_diffs", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "revert", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "permission", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_compacting", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_archived", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "session_id", + "entityType": "columns", + "table": "todo" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "content", + "entityType": "columns", + "table": "todo" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "status", + "entityType": "columns", + "table": "todo" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "priority", + "entityType": "columns", + "table": "todo" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "position", + "entityType": "columns", + "table": "todo" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "todo" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "todo" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "session_id", + "entityType": "columns", + "table": "session_share" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "session_share" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "secret", + "entityType": "columns", + "table": "session_share" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "url", + "entityType": "columns", + "table": "session_share" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "session_share" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "session_share" + }, + { + "columns": ["project_id"], + "tableTo": "project", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_workspace_project_id_project_id_fk", + "entityType": "fks", + "table": "workspace" + }, + { + "columns": ["session_id"], + "tableTo": "session", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_message_session_id_session_id_fk", + "entityType": "fks", + "table": "message" + }, + { + "columns": ["message_id"], + "tableTo": "message", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_part_message_id_message_id_fk", + "entityType": "fks", + "table": "part" + }, + { + "columns": ["project_id"], + "tableTo": "project", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_permission_project_id_project_id_fk", + "entityType": "fks", + "table": "permission" + }, + { + "columns": ["project_id"], + "tableTo": "project", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_session_project_id_project_id_fk", + "entityType": "fks", + "table": "session" + }, + { + "columns": ["session_id"], + "tableTo": "session", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_todo_session_id_session_id_fk", + "entityType": "fks", + "table": "todo" + }, + { + "columns": ["session_id"], + "tableTo": "session", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_session_share_session_id_session_id_fk", + "entityType": "fks", + "table": "session_share" + }, + { + "columns": ["active_account_id"], + "tableTo": "account", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "SET NULL", + "nameExplicit": false, + "name": "fk_account_state_active_account_id_account_id_fk", + "entityType": "fks", + "table": "account_state" + }, + { + "columns": ["email", "url"], + "nameExplicit": false, + "name": "control_account_pk", + "entityType": "pks", + "table": "control_account" + }, + { + "columns": ["session_id", "position"], + "nameExplicit": false, + "name": "todo_pk", + "entityType": "pks", + "table": "todo" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "account_pk", + "table": "account", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "account_state_pk", + "table": "account_state", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "workspace_pk", + "table": "workspace", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "project_pk", + "table": "project", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "message_pk", + "table": "message", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "part_pk", + "table": "part", + "entityType": "pks" + }, + { + "columns": ["project_id"], + "nameExplicit": false, + "name": "permission_pk", + "table": "permission", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "session_pk", + "table": "session", + "entityType": "pks" + }, + { + "columns": ["session_id"], + "nameExplicit": false, + "name": "session_share_pk", + "table": "session_share", + "entityType": "pks" + }, + { + "columns": [ + { + "value": "session_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "message_session_idx", + "entityType": "indexes", + "table": "message" + }, + { + "columns": [ + { + "value": "message_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "part_message_idx", + "entityType": "indexes", + "table": "part" + }, + { + "columns": [ + { + "value": "session_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "part_session_idx", + "entityType": "indexes", + "table": "part" + }, + { + "columns": [ + { + "value": "project_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "session_project_idx", + "entityType": "indexes", + "table": "session" + }, + { + "columns": [ + { + "value": "parent_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "session_parent_idx", + "entityType": "indexes", + "table": "session" + }, + { + "columns": [ + { + "value": "session_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "todo_session_idx", + "entityType": "indexes", + "table": "todo" + } + ], + "renames": [] +} diff --git a/packages/opencode/migration/20260309230000_move_org_to_state/migration.sql b/packages/opencode/migration/20260309230000_move_org_to_state/migration.sql new file mode 100644 index 0000000000..4d1c7bccd0 --- /dev/null +++ b/packages/opencode/migration/20260309230000_move_org_to_state/migration.sql @@ -0,0 +1,3 @@ +ALTER TABLE `account_state` ADD `active_org_id` text;--> statement-breakpoint +UPDATE `account_state` SET `active_org_id` = (SELECT `selected_org_id` FROM `account` WHERE `account`.`id` = `account_state`.`active_account_id`);--> statement-breakpoint +ALTER TABLE `account` DROP COLUMN `selected_org_id`; diff --git a/packages/opencode/migration/20260309230000_move_org_to_state/snapshot.json b/packages/opencode/migration/20260309230000_move_org_to_state/snapshot.json new file mode 100644 index 0000000000..488ecefffb --- /dev/null +++ b/packages/opencode/migration/20260309230000_move_org_to_state/snapshot.json @@ -0,0 +1,1156 @@ +{ + "version": "7", + "dialect": "sqlite", + "id": "fb311f30-9948-4131-b15c-7d308478a878", + "prevIds": ["325559b7-104f-4d2a-a02c-934cfad7cfcc", "4ec9de62-88a7-4bec-91cc-0a759e84db21"], + "ddl": [ + { + "name": "account_state", + "entityType": "tables" + }, + { + "name": "account", + "entityType": "tables" + }, + { + "name": "control_account", + "entityType": "tables" + }, + { + "name": "workspace", + "entityType": "tables" + }, + { + "name": "project", + "entityType": "tables" + }, + { + "name": "message", + "entityType": "tables" + }, + { + "name": "part", + "entityType": "tables" + }, + { + "name": "permission", + "entityType": "tables" + }, + { + "name": "session", + "entityType": "tables" + }, + { + "name": "todo", + "entityType": "tables" + }, + { + "name": "session_share", + "entityType": "tables" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "account_state" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "active_account_id", + "entityType": "columns", + "table": "account_state" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "active_org_id", + "entityType": "columns", + "table": "account_state" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "email", + "entityType": "columns", + "table": "account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "url", + "entityType": "columns", + "table": "account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "access_token", + "entityType": "columns", + "table": "account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "refresh_token", + "entityType": "columns", + "table": "account" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "token_expiry", + "entityType": "columns", + "table": "account" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "account" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "email", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "url", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "access_token", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "refresh_token", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "token_expiry", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "active", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "type", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "branch", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "name", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "directory", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "extra", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "project_id", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "worktree", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "vcs", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "name", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "icon_url", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "icon_color", + "entityType": "columns", + "table": "project" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "project" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "project" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_initialized", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "sandboxes", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "commands", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "message" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "session_id", + "entityType": "columns", + "table": "message" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "message" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "message" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "data", + "entityType": "columns", + "table": "message" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "part" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "message_id", + "entityType": "columns", + "table": "part" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "session_id", + "entityType": "columns", + "table": "part" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "part" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "part" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "data", + "entityType": "columns", + "table": "part" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "project_id", + "entityType": "columns", + "table": "permission" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "permission" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "permission" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "data", + "entityType": "columns", + "table": "permission" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "project_id", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "parent_id", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "slug", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "directory", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "title", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "version", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "share_url", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "summary_additions", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "summary_deletions", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "summary_files", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "summary_diffs", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "revert", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "permission", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_compacting", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_archived", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "session_id", + "entityType": "columns", + "table": "todo" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "content", + "entityType": "columns", + "table": "todo" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "status", + "entityType": "columns", + "table": "todo" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "priority", + "entityType": "columns", + "table": "todo" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "position", + "entityType": "columns", + "table": "todo" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "todo" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "todo" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "session_id", + "entityType": "columns", + "table": "session_share" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "session_share" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "secret", + "entityType": "columns", + "table": "session_share" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "url", + "entityType": "columns", + "table": "session_share" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "session_share" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "session_share" + }, + { + "columns": ["active_account_id"], + "tableTo": "account", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "SET NULL", + "nameExplicit": false, + "name": "fk_account_state_active_account_id_account_id_fk", + "entityType": "fks", + "table": "account_state" + }, + { + "columns": ["project_id"], + "tableTo": "project", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_workspace_project_id_project_id_fk", + "entityType": "fks", + "table": "workspace" + }, + { + "columns": ["session_id"], + "tableTo": "session", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_message_session_id_session_id_fk", + "entityType": "fks", + "table": "message" + }, + { + "columns": ["message_id"], + "tableTo": "message", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_part_message_id_message_id_fk", + "entityType": "fks", + "table": "part" + }, + { + "columns": ["project_id"], + "tableTo": "project", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_permission_project_id_project_id_fk", + "entityType": "fks", + "table": "permission" + }, + { + "columns": ["project_id"], + "tableTo": "project", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_session_project_id_project_id_fk", + "entityType": "fks", + "table": "session" + }, + { + "columns": ["session_id"], + "tableTo": "session", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_todo_session_id_session_id_fk", + "entityType": "fks", + "table": "todo" + }, + { + "columns": ["session_id"], + "tableTo": "session", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_session_share_session_id_session_id_fk", + "entityType": "fks", + "table": "session_share" + }, + { + "columns": ["email", "url"], + "nameExplicit": false, + "name": "control_account_pk", + "entityType": "pks", + "table": "control_account" + }, + { + "columns": ["session_id", "position"], + "nameExplicit": false, + "name": "todo_pk", + "entityType": "pks", + "table": "todo" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "account_state_pk", + "table": "account_state", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "account_pk", + "table": "account", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "workspace_pk", + "table": "workspace", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "project_pk", + "table": "project", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "message_pk", + "table": "message", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "part_pk", + "table": "part", + "entityType": "pks" + }, + { + "columns": ["project_id"], + "nameExplicit": false, + "name": "permission_pk", + "table": "permission", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "session_pk", + "table": "session", + "entityType": "pks" + }, + { + "columns": ["session_id"], + "nameExplicit": false, + "name": "session_share_pk", + "table": "session_share", + "entityType": "pks" + }, + { + "columns": [ + { + "value": "session_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "message_session_idx", + "entityType": "indexes", + "table": "message" + }, + { + "columns": [ + { + "value": "message_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "part_message_idx", + "entityType": "indexes", + "table": "part" + }, + { + "columns": [ + { + "value": "session_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "part_session_idx", + "entityType": "indexes", + "table": "part" + }, + { + "columns": [ + { + "value": "project_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "session_project_idx", + "entityType": "indexes", + "table": "session" + }, + { + "columns": [ + { + "value": "workspace_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "session_workspace_idx", + "entityType": "indexes", + "table": "session" + }, + { + "columns": [ + { + "value": "parent_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "session_parent_idx", + "entityType": "indexes", + "table": "session" + }, + { + "columns": [ + { + "value": "session_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "todo_session_idx", + "entityType": "indexes", + "table": "todo" + } + ], + "renames": [] +} diff --git a/packages/opencode/migration/20260312043431_session_message_cursor/migration.sql b/packages/opencode/migration/20260312043431_session_message_cursor/migration.sql new file mode 100644 index 0000000000..e2bd08137c --- /dev/null +++ b/packages/opencode/migration/20260312043431_session_message_cursor/migration.sql @@ -0,0 +1,4 @@ +DROP INDEX IF EXISTS `message_session_idx`;--> statement-breakpoint +DROP INDEX IF EXISTS `part_message_idx`;--> statement-breakpoint +CREATE INDEX `message_session_time_created_id_idx` ON `message` (`session_id`,`time_created`,`id`);--> statement-breakpoint +CREATE INDEX `part_message_id_id_idx` ON `part` (`message_id`,`id`); \ No newline at end of file diff --git a/packages/opencode/migration/20260312043431_session_message_cursor/snapshot.json b/packages/opencode/migration/20260312043431_session_message_cursor/snapshot.json new file mode 100644 index 0000000000..48958804ab --- /dev/null +++ b/packages/opencode/migration/20260312043431_session_message_cursor/snapshot.json @@ -0,0 +1,1168 @@ +{ + "version": "7", + "dialect": "sqlite", + "id": "37e1554d-af4c-43f2-aa7c-307fb49a315e", + "prevIds": ["fb311f30-9948-4131-b15c-7d308478a878"], + "ddl": [ + { + "name": "account_state", + "entityType": "tables" + }, + { + "name": "account", + "entityType": "tables" + }, + { + "name": "control_account", + "entityType": "tables" + }, + { + "name": "workspace", + "entityType": "tables" + }, + { + "name": "project", + "entityType": "tables" + }, + { + "name": "message", + "entityType": "tables" + }, + { + "name": "part", + "entityType": "tables" + }, + { + "name": "permission", + "entityType": "tables" + }, + { + "name": "session", + "entityType": "tables" + }, + { + "name": "todo", + "entityType": "tables" + }, + { + "name": "session_share", + "entityType": "tables" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "account_state" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "active_account_id", + "entityType": "columns", + "table": "account_state" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "active_org_id", + "entityType": "columns", + "table": "account_state" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "email", + "entityType": "columns", + "table": "account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "url", + "entityType": "columns", + "table": "account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "access_token", + "entityType": "columns", + "table": "account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "refresh_token", + "entityType": "columns", + "table": "account" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "token_expiry", + "entityType": "columns", + "table": "account" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "account" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "email", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "url", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "access_token", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "refresh_token", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "token_expiry", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "active", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "control_account" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "type", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "branch", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "name", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "directory", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "extra", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "project_id", + "entityType": "columns", + "table": "workspace" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "worktree", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "vcs", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "name", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "icon_url", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "icon_color", + "entityType": "columns", + "table": "project" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "project" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "project" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_initialized", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "sandboxes", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "commands", + "entityType": "columns", + "table": "project" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "message" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "session_id", + "entityType": "columns", + "table": "message" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "message" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "message" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "data", + "entityType": "columns", + "table": "message" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "part" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "message_id", + "entityType": "columns", + "table": "part" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "session_id", + "entityType": "columns", + "table": "part" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "part" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "part" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "data", + "entityType": "columns", + "table": "part" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "project_id", + "entityType": "columns", + "table": "permission" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "permission" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "permission" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "data", + "entityType": "columns", + "table": "permission" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "project_id", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "workspace_id", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "parent_id", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "slug", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "directory", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "title", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "version", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "share_url", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "summary_additions", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "summary_deletions", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "summary_files", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "summary_diffs", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "revert", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "permission", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_compacting", + "entityType": "columns", + "table": "session" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_archived", + "entityType": "columns", + "table": "session" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "session_id", + "entityType": "columns", + "table": "todo" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "content", + "entityType": "columns", + "table": "todo" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "status", + "entityType": "columns", + "table": "todo" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "priority", + "entityType": "columns", + "table": "todo" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "position", + "entityType": "columns", + "table": "todo" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "todo" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "todo" + }, + { + "type": "text", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "session_id", + "entityType": "columns", + "table": "session_share" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "session_share" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "secret", + "entityType": "columns", + "table": "session_share" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "url", + "entityType": "columns", + "table": "session_share" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_created", + "entityType": "columns", + "table": "session_share" + }, + { + "type": "integer", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "time_updated", + "entityType": "columns", + "table": "session_share" + }, + { + "columns": ["active_account_id"], + "tableTo": "account", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "SET NULL", + "nameExplicit": false, + "name": "fk_account_state_active_account_id_account_id_fk", + "entityType": "fks", + "table": "account_state" + }, + { + "columns": ["project_id"], + "tableTo": "project", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_workspace_project_id_project_id_fk", + "entityType": "fks", + "table": "workspace" + }, + { + "columns": ["session_id"], + "tableTo": "session", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_message_session_id_session_id_fk", + "entityType": "fks", + "table": "message" + }, + { + "columns": ["message_id"], + "tableTo": "message", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_part_message_id_message_id_fk", + "entityType": "fks", + "table": "part" + }, + { + "columns": ["project_id"], + "tableTo": "project", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_permission_project_id_project_id_fk", + "entityType": "fks", + "table": "permission" + }, + { + "columns": ["project_id"], + "tableTo": "project", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_session_project_id_project_id_fk", + "entityType": "fks", + "table": "session" + }, + { + "columns": ["session_id"], + "tableTo": "session", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_todo_session_id_session_id_fk", + "entityType": "fks", + "table": "todo" + }, + { + "columns": ["session_id"], + "tableTo": "session", + "columnsTo": ["id"], + "onUpdate": "NO ACTION", + "onDelete": "CASCADE", + "nameExplicit": false, + "name": "fk_session_share_session_id_session_id_fk", + "entityType": "fks", + "table": "session_share" + }, + { + "columns": ["email", "url"], + "nameExplicit": false, + "name": "control_account_pk", + "entityType": "pks", + "table": "control_account" + }, + { + "columns": ["session_id", "position"], + "nameExplicit": false, + "name": "todo_pk", + "entityType": "pks", + "table": "todo" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "account_state_pk", + "table": "account_state", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "account_pk", + "table": "account", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "workspace_pk", + "table": "workspace", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "project_pk", + "table": "project", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "message_pk", + "table": "message", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "part_pk", + "table": "part", + "entityType": "pks" + }, + { + "columns": ["project_id"], + "nameExplicit": false, + "name": "permission_pk", + "table": "permission", + "entityType": "pks" + }, + { + "columns": ["id"], + "nameExplicit": false, + "name": "session_pk", + "table": "session", + "entityType": "pks" + }, + { + "columns": ["session_id"], + "nameExplicit": false, + "name": "session_share_pk", + "table": "session_share", + "entityType": "pks" + }, + { + "columns": [ + { + "value": "session_id", + "isExpression": false + }, + { + "value": "time_created", + "isExpression": false + }, + { + "value": "id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "message_session_time_created_id_idx", + "entityType": "indexes", + "table": "message" + }, + { + "columns": [ + { + "value": "message_id", + "isExpression": false + }, + { + "value": "id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "part_message_id_id_idx", + "entityType": "indexes", + "table": "part" + }, + { + "columns": [ + { + "value": "session_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "part_session_idx", + "entityType": "indexes", + "table": "part" + }, + { + "columns": [ + { + "value": "project_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "session_project_idx", + "entityType": "indexes", + "table": "session" + }, + { + "columns": [ + { + "value": "workspace_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "session_workspace_idx", + "entityType": "indexes", + "table": "session" + }, + { + "columns": [ + { + "value": "parent_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "session_parent_idx", + "entityType": "indexes", + "table": "session" + }, + { + "columns": [ + { + "value": "session_id", + "isExpression": false + } + ], + "isUnique": false, + "where": null, + "origin": "manual", + "name": "todo_session_idx", + "entityType": "indexes", + "table": "todo" + } + ], + "renames": [] +} diff --git a/packages/opencode/package.json b/packages/opencode/package.json index 0a707640b5..b5c917260a 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -10,17 +10,26 @@ "test": "bun test --timeout 30000", "build": "bun run script/build.ts", "dev": "bun run --conditions=browser ./src/index.ts", + "random": "echo 'Random script updated at $(date)' && echo 'Change queued successfully' && echo 'Another change made' && echo 'Yet another change' && echo 'One more change' && echo 'Final change' && echo 'Another final change' && echo 'Yet another final change'", + "clean": "echo 'Cleaning up...' && rm -rf node_modules dist", + "lint": "echo 'Running lint checks...' && bun test --coverage", + "format": "echo 'Formatting code...' && bun run --prettier --write src/**/*.ts", + "docs": "echo 'Generating documentation...' && find src -name '*.ts' -exec echo 'Processing: {}' \\;", + "deploy": "echo 'Deploying application...' && bun run build && echo 'Deployment completed successfully'", "db": "bun drizzle-kit" }, "bin": { - "altimate": "./bin/altimate", - "altimate-code": "./bin/altimate-code" + "altimate-code": "./bin/altimate-code", + "altimate": "./bin/altimate-code", + "opencode": "./bin/opencode" }, + "randomField": "this-is-a-random-value-12345", "exports": { "./*": "./src/*.ts" }, "devDependencies": { "@babel/core": "7.28.4", + "@effect/language-service": "0.79.0", "@octokit/webhooks-types": "7.6.1", "@opencode-ai/script": "workspace:*", "@parcel/watcher-darwin-arm64": "2.5.1", @@ -29,18 +38,20 @@ "@parcel/watcher-linux-arm64-musl": "2.5.1", "@parcel/watcher-linux-x64-glibc": "2.5.1", "@parcel/watcher-linux-x64-musl": "2.5.1", + "@parcel/watcher-win32-arm64": "2.5.1", "@parcel/watcher-win32-x64": "2.5.1", "@standard-schema/spec": "1.0.0", "@tsconfig/bun": "catalog:", "@types/babel__core": "7.20.5", "@types/bun": "catalog:", "@types/mime-types": "3.0.1", + "@types/semver": "^7.5.8", "@types/turndown": "5.0.5", "@types/yargs": "17.0.33", "@types/which": "3.0.4", "@typescript/native-preview": "catalog:", - "drizzle-kit": "1.0.0-beta.12-a5629fb", - "drizzle-orm": "1.0.0-beta.12-a5629fb", + "drizzle-kit": "1.0.0-beta.16-ea816b6", + "drizzle-orm": "1.0.0-beta.16-ea816b6", "typescript": "catalog:", "vscode-languageserver-types": "3.17.5", "why-is-node-running": "3.2.2", @@ -84,8 +95,8 @@ "@opencode-ai/sdk": "workspace:*", "@opencode-ai/util": "workspace:*", "@openrouter/ai-sdk-provider": "1.5.4", - "@opentui/core": "0.1.86", - "@opentui/solid": "0.1.86", + "@opentui/core": "0.1.87", + "@opentui/solid": "0.1.87", "@parcel/watcher": "2.5.1", "@pierre/diffs": "catalog:", "@solid-primitives/event-bus": "1.1.2", @@ -100,7 +111,8 @@ "clipboardy": "4.0.0", "decimal.js": "10.5.0", "diff": "catalog:", - "drizzle-orm": "1.0.0-beta.12-a5629fb", + "drizzle-orm": "1.0.0-beta.16-ea816b6", + "effect": "catalog:", "fuzzysort": "3.1.0", "glob": "13.0.5", "google-auth-library": "10.5.0", @@ -115,6 +127,7 @@ "opentui-spinner": "0.0.6", "partial-json": "0.1.7", "remeda": "catalog:", + "semver": "^7.6.3", "solid-js": "catalog:", "strip-ansi": "7.1.2", "tree-sitter-bash": "0.25.0", @@ -129,6 +142,6 @@ "zod-to-json-schema": "3.24.5" }, "overrides": { - "drizzle-orm": "1.0.0-beta.12-a5629fb" + "drizzle-orm": "1.0.0-beta.16-ea816b6" } } diff --git a/packages/opencode/parsers-config.ts b/packages/opencode/parsers-config.ts index 0b10d8bbe4..aa32650f5b 100644 --- a/packages/opencode/parsers-config.ts +++ b/packages/opencode/parsers-config.ts @@ -215,7 +215,9 @@ export default { { filetype: "clojure", // temporarily using fork to fix issues - wasm: "https://github.com/anomalyco/tree-sitter-clojure/releases/download/v0.0.1/tree-sitter-clojure.wasm", + // altimate_change start — rebranded tree-sitter fork URL + wasm: "https://github.com/AltimateAI/tree-sitter-clojure/releases/download/v0.0.1/tree-sitter-clojure.wasm", + // altimate_change end queries: { highlights: [ "https://raw.githubusercontent.com/nvim-treesitter/nvim-treesitter/refs/heads/master/queries/clojure/highlights.scm", diff --git a/packages/opencode/script/build.ts b/packages/opencode/script/build.ts index 1bfe685f39..c6a83eb736 100755 --- a/packages/opencode/script/build.ts +++ b/packages/opencode/script/build.ts @@ -15,21 +15,6 @@ process.chdir(dir) import { Script } from "@opencode-ai/script" import pkg from "../package.json" -// Read engine version from pyproject.toml -const enginePyprojectPath = path.resolve(dir, "../altimate-engine/pyproject.toml") -const enginePyproject = await Bun.file(enginePyprojectPath).text() -const engineVersionMatch = enginePyproject.match(/^version\s*=\s*"([^"]+)"/m) -if (!engineVersionMatch) { - throw new Error("Could not read engine version from altimate-engine/pyproject.toml") -} -const engineVersion = engineVersionMatch[1] -console.log(`Engine version: ${engineVersion}`) - -// Read CHANGELOG.md for bundling -const changelogPath = path.resolve(dir, "../../CHANGELOG.md") -const changelog = fs.existsSync(changelogPath) ? await Bun.file(changelogPath).text() : "" -console.log(`Loaded CHANGELOG.md (${changelog.length} chars)`) - const modelsUrl = process.env.OPENCODE_MODELS_URL || "https://models.dev" // Fetch and generate models.dev snapshot const modelsData = process.env.MODELS_DEV_API_JSON @@ -37,7 +22,7 @@ const modelsData = process.env.MODELS_DEV_API_JSON : await fetch(`${modelsUrl}/api.json`).then((x) => x.text()) await Bun.write( path.join(dir, "src/provider/models-snapshot.ts"), - `// Auto-generated by build.ts - do not edit\nexport const snapshot = ${modelsData.trim()} as const\n`, + `// Auto-generated by build.ts - do not edit\nexport const snapshot = ${modelsData} as const\n`, ) console.log("Generated models-snapshot.ts") @@ -66,7 +51,7 @@ const migrations = await Promise.all( Number(match[6]), ) : 0 - return { sql, timestamp } + return { sql, timestamp, name } }), ) console.log(`Loaded ${migrations.length} migrations`) @@ -123,6 +108,10 @@ const allTargets: { arch: "x64", avx2: false, }, + { + os: "win32", + arch: "arm64", + }, { os: "win32", arch: "x64", @@ -134,17 +123,6 @@ const allTargets: { }, ] -// If --targets is provided, filter to only matching OS values -const validOsValues = new Set(allTargets.map(t => t.os)) -const targetsFlag = process.argv.find(a => a.startsWith('--targets='))?.split('=')[1]?.split(',') -if (targetsFlag) { - const invalid = targetsFlag.filter(t => !validOsValues.has(t)) - if (invalid.length > 0) { - console.error(`error: invalid --targets value(s): ${invalid.join(', ')}. Valid values: ${[...validOsValues].join(', ')}`) - process.exit(1) - } -} - const targets = singleFlag ? allTargets.filter((item) => { if (item.os !== process.platform || item.arch !== process.arch) { @@ -164,9 +142,7 @@ const targets = singleFlag return true }) - : targetsFlag - ? allTargets.filter(t => targetsFlag.includes(t.os)) - : allTargets + : allTargets await $`rm -rf dist` @@ -189,8 +165,9 @@ for (const item of targets) { console.log(`building ${name}`) await $`mkdir -p dist/${name}/bin` - const opentuiCoreDir = path.dirname(fileURLToPath(import.meta.resolve("@opentui/core"))) - const parserWorker = fs.realpathSync(path.join(opentuiCoreDir, "parser.worker.js")) + const localPath = path.resolve(dir, "node_modules/@opentui/core/parser.worker.js") + const rootPath = path.resolve(dir, "../../node_modules/@opentui/core/parser.worker.js") + const parserWorker = fs.realpathSync(fs.existsSync(localPath) ? localPath : rootPath) const workerPath = "./src/cli/cmd/tui/worker.ts" // Use platform-specific bunfs root path based on target OS @@ -201,36 +178,27 @@ for (const item of targets) { conditions: ["browser"], tsconfig: "./tsconfig.json", plugins: [solidPlugin], - sourcemap: "external", compile: { autoloadBunfig: false, autoloadDotenv: false, autoloadTsconfig: true, autoloadPackageJson: true, target: name.replace(pkg.name, "bun") as any, - outfile: `dist/${name}/bin/altimate`, - execArgv: [`--user-agent=altimate/${Script.version}`, "--use-system-ca", "--"], + outfile: `dist/${name}/bin/opencode`, + execArgv: [`--user-agent=opencode/${Script.version}`, "--use-system-ca", "--"], windows: {}, }, entrypoints: ["./src/index.ts", parserWorker, workerPath], define: { OPENCODE_VERSION: `'${Script.version}'`, - OPENCODE_CHANNEL: `'${Script.channel}'`, - ALTIMATE_ENGINE_VERSION: `'${engineVersion}'`, - OPENCODE_LIBC: item.os === "linux" ? `'${item.abi ?? "glibc"}'` : "undefined", OPENCODE_MIGRATIONS: JSON.stringify(migrations), - OPENCODE_CHANGELOG: JSON.stringify(changelog), OTUI_TREE_SITTER_WORKER_PATH: bunfsRoot + workerRelativePath, + OPENCODE_WORKER_PATH: workerPath, + OPENCODE_CHANNEL: `'${Script.channel}'`, + OPENCODE_LIBC: item.os === "linux" ? `'${item.abi ?? "glibc"}'` : "", }, }) - // Create backward-compatible altimate-code alias - if (item.os === "win32") { - await $`cp dist/${name}/bin/altimate.exe dist/${name}/bin/altimate-code.exe`.nothrow() - } else { - await $`ln -sf altimate dist/${name}/bin/altimate-code`.nothrow() - } - await $`rm -rf ./dist/${name}/bin/tui` await Bun.file(`dist/${name}/package.json`).write( JSON.stringify( @@ -249,14 +217,13 @@ for (const item of targets) { if (Script.release) { for (const key of Object.keys(binaries)) { - const archiveName = key.replace(/^@altimateai\//, "") - const archivePath = path.resolve("dist", archiveName) if (key.includes("linux")) { - await $`tar -czf ${archivePath}.tar.gz *`.cwd(`dist/${key}/bin`) + await $`tar -czf ../../${key}.tar.gz *`.cwd(`dist/${key}/bin`) } else { - await $`zip -r ${archivePath}.zip *`.cwd(`dist/${key}/bin`) + await $`zip -r ../../${key}.zip *`.cwd(`dist/${key}/bin`) } } + await $`gh release upload v${Script.version} ./dist/*.zip ./dist/*.tar.gz --clobber --repo ${process.env.GH_REPO}` } export { binaries } diff --git a/packages/opencode/script/seed-e2e.ts b/packages/opencode/script/seed-e2e.ts index ba2155cb69..f34dd051db 100644 --- a/packages/opencode/script/seed-e2e.ts +++ b/packages/opencode/script/seed-e2e.ts @@ -10,17 +10,23 @@ const now = Date.now() const seed = async () => { const { Instance } = await import("../src/project/instance") const { InstanceBootstrap } = await import("../src/project/bootstrap") + const { Config } = await import("../src/config/config") const { Session } = await import("../src/session") - const { Identifier } = await import("../src/id/id") + const { MessageID, PartID } = await import("../src/session/schema") const { Project } = await import("../src/project/project") + const { ModelID, ProviderID } = await import("../src/provider/schema") + const { ToolRegistry } = await import("../src/tool/registry") await Instance.provide({ directory: dir, init: InstanceBootstrap, fn: async () => { + await Config.waitForDependencies() + await ToolRegistry.ids() + const session = await Session.create({ title }) - const messageID = Identifier.descending("message") - const partID = Identifier.descending("part") + const messageID = MessageID.ascending() + const partID = PartID.ascending() const message = { id: messageID, sessionID: session.id, @@ -28,8 +34,8 @@ const seed = async () => { time: { created: now }, agent: "build", model: { - providerID, - modelID, + providerID: ProviderID.make(providerID), + modelID: ModelID.make(modelID), }, } const part = { diff --git a/packages/opencode/src/account/account.sql.ts b/packages/opencode/src/account/account.sql.ts new file mode 100644 index 0000000000..35bfd1e3ed --- /dev/null +++ b/packages/opencode/src/account/account.sql.ts @@ -0,0 +1,39 @@ +import { sqliteTable, text, integer, primaryKey } from "drizzle-orm/sqlite-core" + +import { type AccessToken, type AccountID, type OrgID, type RefreshToken } from "./schema" +import { Timestamps } from "../storage/schema.sql" + +export const AccountTable = sqliteTable("account", { + id: text().$type().primaryKey(), + email: text().notNull(), + url: text().notNull(), + access_token: text().$type().notNull(), + refresh_token: text().$type().notNull(), + token_expiry: integer(), + ...Timestamps, +}) + +export const AccountStateTable = sqliteTable("account_state", { + id: integer().primaryKey(), + active_account_id: text() + .$type() + .references(() => AccountTable.id, { onDelete: "set null" }), + active_org_id: text().$type(), +}) + +// LEGACY +export const ControlAccountTable = sqliteTable( + "control_account", + { + email: text().notNull(), + url: text().notNull(), + access_token: text().$type().notNull(), + refresh_token: text().$type().notNull(), + token_expiry: integer(), + active: integer({ mode: "boolean" }) + .notNull() + .$default(() => false), + ...Timestamps, + }, + (table) => [primaryKey({ columns: [table.email, table.url] })], +) diff --git a/packages/opencode/src/account/index.ts b/packages/opencode/src/account/index.ts new file mode 100644 index 0000000000..ed4c3d8798 --- /dev/null +++ b/packages/opencode/src/account/index.ts @@ -0,0 +1,41 @@ +import { Effect, Option } from "effect" + +import { + Account as AccountSchema, + type AccountError, + type AccessToken, + AccountID, + AccountService, + OrgID, +} from "./service" + +export { AccessToken, AccountID, OrgID } from "./service" + +import { runtime } from "@/effect/runtime" + +function runSync(f: (service: AccountService.Service) => Effect.Effect) { + return runtime.runSync(AccountService.use(f)) +} + +function runPromise(f: (service: AccountService.Service) => Effect.Effect) { + return runtime.runPromise(AccountService.use(f)) +} + +export namespace Account { + export const Account = AccountSchema + export type Account = AccountSchema + + export function active(): Account | undefined { + return Option.getOrUndefined(runSync((service) => service.active())) + } + + export async function config(accountID: AccountID, orgID: OrgID): Promise | undefined> { + const config = await runPromise((service) => service.config(accountID, orgID)) + return Option.getOrUndefined(config) + } + + export async function token(accountID: AccountID): Promise { + const token = await runPromise((service) => service.token(accountID)) + return Option.getOrUndefined(token) + } +} diff --git a/packages/opencode/src/account/repo.ts b/packages/opencode/src/account/repo.ts new file mode 100644 index 0000000000..5caf1a3b94 --- /dev/null +++ b/packages/opencode/src/account/repo.ts @@ -0,0 +1,160 @@ +import { eq } from "drizzle-orm" +import { Effect, Layer, Option, Schema, ServiceMap } from "effect" + +import { Database } from "@/storage/db" +import { AccountStateTable, AccountTable } from "./account.sql" +import { AccessToken, Account, AccountID, AccountRepoError, OrgID, RefreshToken } from "./schema" + +export type AccountRow = (typeof AccountTable)["$inferSelect"] + +type DbClient = Parameters[0] extends (db: infer T) => unknown ? T : never + +const ACCOUNT_STATE_ID = 1 + +export namespace AccountRepo { + export interface Service { + readonly active: () => Effect.Effect, AccountRepoError> + readonly list: () => Effect.Effect + readonly remove: (accountID: AccountID) => Effect.Effect + readonly use: (accountID: AccountID, orgID: Option.Option) => Effect.Effect + readonly getRow: (accountID: AccountID) => Effect.Effect, AccountRepoError> + readonly persistToken: (input: { + accountID: AccountID + accessToken: AccessToken + refreshToken: RefreshToken + expiry: Option.Option + }) => Effect.Effect + readonly persistAccount: (input: { + id: AccountID + email: string + url: string + accessToken: AccessToken + refreshToken: RefreshToken + expiry: number + orgID: Option.Option + }) => Effect.Effect + } +} + +export class AccountRepo extends ServiceMap.Service()("@opencode/AccountRepo") { + static readonly layer: Layer.Layer = Layer.effect( + AccountRepo, + Effect.gen(function* () { + const decode = Schema.decodeUnknownSync(Account) + + const query = (f: (db: DbClient) => A) => + Effect.try({ + try: () => Database.use(f), + catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }), + }) + + const tx = (f: (db: DbClient) => A) => + Effect.try({ + try: () => Database.transaction(f), + catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }), + }) + + const current = (db: DbClient) => { + const state = db.select().from(AccountStateTable).where(eq(AccountStateTable.id, ACCOUNT_STATE_ID)).get() + if (!state?.active_account_id) return + const account = db.select().from(AccountTable).where(eq(AccountTable.id, state.active_account_id)).get() + if (!account) return + return { ...account, active_org_id: state.active_org_id ?? null } + } + + const state = (db: DbClient, accountID: AccountID, orgID: Option.Option) => { + const id = Option.getOrNull(orgID) + return db + .insert(AccountStateTable) + .values({ id: ACCOUNT_STATE_ID, active_account_id: accountID, active_org_id: id }) + .onConflictDoUpdate({ + target: AccountStateTable.id, + set: { active_account_id: accountID, active_org_id: id }, + }) + .run() + } + + const active = Effect.fn("AccountRepo.active")(() => + query((db) => current(db)).pipe(Effect.map((row) => (row ? Option.some(decode(row)) : Option.none()))), + ) + + const list = Effect.fn("AccountRepo.list")(() => + query((db) => + db + .select() + .from(AccountTable) + .all() + .map((row: AccountRow) => decode({ ...row, active_org_id: null })), + ), + ) + + const remove = Effect.fn("AccountRepo.remove")((accountID: AccountID) => + tx((db) => { + db.update(AccountStateTable) + .set({ active_account_id: null, active_org_id: null }) + .where(eq(AccountStateTable.active_account_id, accountID)) + .run() + db.delete(AccountTable).where(eq(AccountTable.id, accountID)).run() + }).pipe(Effect.asVoid), + ) + + const use = Effect.fn("AccountRepo.use")((accountID: AccountID, orgID: Option.Option) => + query((db) => state(db, accountID, orgID)).pipe(Effect.asVoid), + ) + + const getRow = Effect.fn("AccountRepo.getRow")((accountID: AccountID) => + query((db) => db.select().from(AccountTable).where(eq(AccountTable.id, accountID)).get()).pipe( + Effect.map(Option.fromNullishOr), + ), + ) + + const persistToken = Effect.fn("AccountRepo.persistToken")((input) => + query((db) => + db + .update(AccountTable) + .set({ + access_token: input.accessToken, + refresh_token: input.refreshToken, + token_expiry: Option.getOrNull(input.expiry), + }) + .where(eq(AccountTable.id, input.accountID)) + .run(), + ).pipe(Effect.asVoid), + ) + + const persistAccount = Effect.fn("AccountRepo.persistAccount")((input) => + tx((db) => { + db.insert(AccountTable) + .values({ + id: input.id, + email: input.email, + url: input.url, + access_token: input.accessToken, + refresh_token: input.refreshToken, + token_expiry: input.expiry, + }) + .onConflictDoUpdate({ + target: AccountTable.id, + set: { + access_token: input.accessToken, + refresh_token: input.refreshToken, + token_expiry: input.expiry, + }, + }) + .run() + void state(db, input.id, input.orgID) + }).pipe(Effect.asVoid), + ) + + return AccountRepo.of({ + active, + list, + remove, + use, + getRow, + persistToken, + persistAccount, + }) + }), + ) +} diff --git a/packages/opencode/src/account/schema.ts b/packages/opencode/src/account/schema.ts new file mode 100644 index 0000000000..9b31c4ba65 --- /dev/null +++ b/packages/opencode/src/account/schema.ts @@ -0,0 +1,91 @@ +import { Schema } from "effect" + +import { withStatics } from "@/util/schema" + +export const AccountID = Schema.String.pipe( + Schema.brand("AccountID"), + withStatics((s) => ({ make: (id: string) => s.makeUnsafe(id) })), +) +export type AccountID = Schema.Schema.Type + +export const OrgID = Schema.String.pipe( + Schema.brand("OrgID"), + withStatics((s) => ({ make: (id: string) => s.makeUnsafe(id) })), +) +export type OrgID = Schema.Schema.Type + +export const AccessToken = Schema.String.pipe( + Schema.brand("AccessToken"), + withStatics((s) => ({ make: (token: string) => s.makeUnsafe(token) })), +) +export type AccessToken = Schema.Schema.Type + +export const RefreshToken = Schema.String.pipe( + Schema.brand("RefreshToken"), + withStatics((s) => ({ make: (token: string) => s.makeUnsafe(token) })), +) +export type RefreshToken = Schema.Schema.Type + +export const DeviceCode = Schema.String.pipe( + Schema.brand("DeviceCode"), + withStatics((s) => ({ make: (code: string) => s.makeUnsafe(code) })), +) +export type DeviceCode = Schema.Schema.Type + +export const UserCode = Schema.String.pipe( + Schema.brand("UserCode"), + withStatics((s) => ({ make: (code: string) => s.makeUnsafe(code) })), +) +export type UserCode = Schema.Schema.Type + +export class Account extends Schema.Class("Account")({ + id: AccountID, + email: Schema.String, + url: Schema.String, + active_org_id: Schema.NullOr(OrgID), +}) {} + +export class Org extends Schema.Class("Org")({ + id: OrgID, + name: Schema.String, +}) {} + +export class AccountRepoError extends Schema.TaggedErrorClass()("AccountRepoError", { + message: Schema.String, + cause: Schema.optional(Schema.Defect), +}) {} + +export class AccountServiceError extends Schema.TaggedErrorClass()("AccountServiceError", { + message: Schema.String, + cause: Schema.optional(Schema.Defect), +}) {} + +export type AccountError = AccountRepoError | AccountServiceError + +export class Login extends Schema.Class("Login")({ + code: DeviceCode, + user: UserCode, + url: Schema.String, + server: Schema.String, + expiry: Schema.Duration, + interval: Schema.Duration, +}) {} + +export class PollSuccess extends Schema.TaggedClass()("PollSuccess", { + email: Schema.String, +}) {} + +export class PollPending extends Schema.TaggedClass()("PollPending", {}) {} + +export class PollSlow extends Schema.TaggedClass()("PollSlow", {}) {} + +export class PollExpired extends Schema.TaggedClass()("PollExpired", {}) {} + +export class PollDenied extends Schema.TaggedClass()("PollDenied", {}) {} + +export class PollError extends Schema.TaggedClass()("PollError", { + cause: Schema.Defect, +}) {} + +export const PollResult = Schema.Union([PollSuccess, PollPending, PollSlow, PollExpired, PollDenied, PollError]) +export type PollResult = Schema.Schema.Type diff --git a/packages/opencode/src/account/service.ts b/packages/opencode/src/account/service.ts new file mode 100644 index 0000000000..87e95c8f44 --- /dev/null +++ b/packages/opencode/src/account/service.ts @@ -0,0 +1,359 @@ +import { Clock, Duration, Effect, Layer, Option, Schema, SchemaGetter, ServiceMap } from "effect" +import { FetchHttpClient, HttpClient, HttpClientRequest, HttpClientResponse } from "effect/unstable/http" + +import { withTransientReadRetry } from "@/util/effect-http-client" +import { AccountRepo, type AccountRow } from "./repo" +import { + type AccountError, + AccessToken, + Account, + AccountID, + DeviceCode, + RefreshToken, + AccountServiceError, + Login, + Org, + OrgID, + PollDenied, + PollError, + PollExpired, + PollPending, + type PollResult, + PollSlow, + PollSuccess, + UserCode, +} from "./schema" + +export * from "./schema" + +export type AccountOrgs = { + account: Account + orgs: readonly Org[] +} + +class RemoteConfig extends Schema.Class("RemoteConfig")({ + config: Schema.Record(Schema.String, Schema.Json), +}) {} + +const DurationFromSeconds = Schema.Number.pipe( + Schema.decodeTo(Schema.Duration, { + decode: SchemaGetter.transform((n) => Duration.seconds(n)), + encode: SchemaGetter.transform((d) => Duration.toSeconds(d)), + }), +) + +class TokenRefresh extends Schema.Class("TokenRefresh")({ + access_token: AccessToken, + refresh_token: RefreshToken, + expires_in: DurationFromSeconds, +}) {} + +class DeviceAuth extends Schema.Class("DeviceAuth")({ + device_code: DeviceCode, + user_code: UserCode, + verification_uri_complete: Schema.String, + expires_in: DurationFromSeconds, + interval: DurationFromSeconds, +}) {} + +class DeviceTokenSuccess extends Schema.Class("DeviceTokenSuccess")({ + access_token: AccessToken, + refresh_token: RefreshToken, + token_type: Schema.Literal("Bearer"), + expires_in: DurationFromSeconds, +}) {} + +class DeviceTokenError extends Schema.Class("DeviceTokenError")({ + error: Schema.String, + error_description: Schema.String, +}) { + toPollResult(): PollResult { + if (this.error === "authorization_pending") return new PollPending() + if (this.error === "slow_down") return new PollSlow() + if (this.error === "expired_token") return new PollExpired() + if (this.error === "access_denied") return new PollDenied() + return new PollError({ cause: this.error }) + } +} + +const DeviceToken = Schema.Union([DeviceTokenSuccess, DeviceTokenError]) + +class User extends Schema.Class("User")({ + id: AccountID, + email: Schema.String, +}) {} + +class ClientId extends Schema.Class("ClientId")({ client_id: Schema.String }) {} + +class DeviceTokenRequest extends Schema.Class("DeviceTokenRequest")({ + grant_type: Schema.String, + device_code: DeviceCode, + client_id: Schema.String, +}) {} + +class TokenRefreshRequest extends Schema.Class("TokenRefreshRequest")({ + grant_type: Schema.String, + refresh_token: RefreshToken, + client_id: Schema.String, +}) {} + +const clientId = "opencode-cli" + +const mapAccountServiceError = + (message = "Account service operation failed") => + (effect: Effect.Effect): Effect.Effect => + effect.pipe( + Effect.mapError((cause) => + cause instanceof AccountServiceError ? cause : new AccountServiceError({ message, cause }), + ), + ) + +export namespace AccountService { + export interface Service { + readonly active: () => Effect.Effect, AccountError> + readonly list: () => Effect.Effect + readonly orgsByAccount: () => Effect.Effect + readonly remove: (accountID: AccountID) => Effect.Effect + readonly use: (accountID: AccountID, orgID: Option.Option) => Effect.Effect + readonly orgs: (accountID: AccountID) => Effect.Effect + readonly config: ( + accountID: AccountID, + orgID: OrgID, + ) => Effect.Effect>, AccountError> + readonly token: (accountID: AccountID) => Effect.Effect, AccountError> + readonly login: (url: string) => Effect.Effect + readonly poll: (input: Login) => Effect.Effect + } +} + +export class AccountService extends ServiceMap.Service()("@opencode/Account") { + static readonly layer: Layer.Layer = Layer.effect( + AccountService, + Effect.gen(function* () { + const repo = yield* AccountRepo + const http = yield* HttpClient.HttpClient + const httpRead = withTransientReadRetry(http) + const httpOk = HttpClient.filterStatusOk(http) + const httpReadOk = HttpClient.filterStatusOk(httpRead) + + const executeRead = (request: HttpClientRequest.HttpClientRequest) => + httpRead.execute(request).pipe(mapAccountServiceError("HTTP request failed")) + + const executeReadOk = (request: HttpClientRequest.HttpClientRequest) => + httpReadOk.execute(request).pipe(mapAccountServiceError("HTTP request failed")) + + const executeEffectOk = (request: Effect.Effect) => + request.pipe( + Effect.flatMap((req) => httpOk.execute(req)), + mapAccountServiceError("HTTP request failed"), + ) + + // Returns a usable access token for a stored account row, refreshing and + // persisting it when the cached token has expired. + const resolveToken = Effect.fnUntraced(function* (row: AccountRow) { + const now = yield* Clock.currentTimeMillis + if (row.token_expiry && row.token_expiry > now) return row.access_token + + const response = yield* executeEffectOk( + HttpClientRequest.post(`${row.url}/auth/device/token`).pipe( + HttpClientRequest.acceptJson, + HttpClientRequest.schemaBodyJson(TokenRefreshRequest)( + new TokenRefreshRequest({ + grant_type: "refresh_token", + refresh_token: row.refresh_token, + client_id: clientId, + }), + ), + ), + ) + + const parsed = yield* HttpClientResponse.schemaBodyJson(TokenRefresh)(response).pipe( + mapAccountServiceError("Failed to decode response"), + ) + + const expiry = Option.some(now + Duration.toMillis(parsed.expires_in)) + + yield* repo.persistToken({ + accountID: row.id, + accessToken: parsed.access_token, + refreshToken: parsed.refresh_token, + expiry, + }) + + return parsed.access_token + }) + + const resolveAccess = Effect.fnUntraced(function* (accountID: AccountID) { + const maybeAccount = yield* repo.getRow(accountID) + if (Option.isNone(maybeAccount)) return Option.none() + + const account = maybeAccount.value + const accessToken = yield* resolveToken(account) + return Option.some({ account, accessToken }) + }) + + const fetchOrgs = Effect.fnUntraced(function* (url: string, accessToken: AccessToken) { + const response = yield* executeReadOk( + HttpClientRequest.get(`${url}/api/orgs`).pipe( + HttpClientRequest.acceptJson, + HttpClientRequest.bearerToken(accessToken), + ), + ) + + return yield* HttpClientResponse.schemaBodyJson(Schema.Array(Org))(response).pipe( + mapAccountServiceError("Failed to decode response"), + ) + }) + + const fetchUser = Effect.fnUntraced(function* (url: string, accessToken: AccessToken) { + const response = yield* executeReadOk( + HttpClientRequest.get(`${url}/api/user`).pipe( + HttpClientRequest.acceptJson, + HttpClientRequest.bearerToken(accessToken), + ), + ) + + return yield* HttpClientResponse.schemaBodyJson(User)(response).pipe( + mapAccountServiceError("Failed to decode response"), + ) + }) + + const token = Effect.fn("AccountService.token")((accountID: AccountID) => + resolveAccess(accountID).pipe(Effect.map(Option.map((r) => r.accessToken))), + ) + + const orgsByAccount = Effect.fn("AccountService.orgsByAccount")(function* () { + const accounts = yield* repo.list() + const [errors, results] = yield* Effect.partition( + accounts, + (account) => orgs(account.id).pipe(Effect.map((orgs) => ({ account, orgs }))), + { concurrency: 3 }, + ) + for (const error of errors) { + yield* Effect.logWarning("failed to fetch orgs for account").pipe( + Effect.annotateLogs({ error: String(error) }), + ) + } + return results + }) + + const orgs = Effect.fn("AccountService.orgs")(function* (accountID: AccountID) { + const resolved = yield* resolveAccess(accountID) + if (Option.isNone(resolved)) return [] + + const { account, accessToken } = resolved.value + + return yield* fetchOrgs(account.url, accessToken) + }) + + const config = Effect.fn("AccountService.config")(function* (accountID: AccountID, orgID: OrgID) { + const resolved = yield* resolveAccess(accountID) + if (Option.isNone(resolved)) return Option.none() + + const { account, accessToken } = resolved.value + + const response = yield* executeRead( + HttpClientRequest.get(`${account.url}/api/config`).pipe( + HttpClientRequest.acceptJson, + HttpClientRequest.bearerToken(accessToken), + HttpClientRequest.setHeaders({ "x-org-id": orgID }), + ), + ) + + if (response.status === 404) return Option.none() + + const ok = yield* HttpClientResponse.filterStatusOk(response).pipe(mapAccountServiceError()) + + const parsed = yield* HttpClientResponse.schemaBodyJson(RemoteConfig)(ok).pipe( + mapAccountServiceError("Failed to decode response"), + ) + return Option.some(parsed.config) + }) + + const login = Effect.fn("AccountService.login")(function* (server: string) { + const response = yield* executeEffectOk( + HttpClientRequest.post(`${server}/auth/device/code`).pipe( + HttpClientRequest.acceptJson, + HttpClientRequest.schemaBodyJson(ClientId)(new ClientId({ client_id: clientId })), + ), + ) + + const parsed = yield* HttpClientResponse.schemaBodyJson(DeviceAuth)(response).pipe( + mapAccountServiceError("Failed to decode response"), + ) + return new Login({ + code: parsed.device_code, + user: parsed.user_code, + url: `${server}${parsed.verification_uri_complete}`, + server, + expiry: parsed.expires_in, + interval: parsed.interval, + }) + }) + + const poll = Effect.fn("AccountService.poll")(function* (input: Login) { + const response = yield* executeEffectOk( + HttpClientRequest.post(`${input.server}/auth/device/token`).pipe( + HttpClientRequest.acceptJson, + HttpClientRequest.schemaBodyJson(DeviceTokenRequest)( + new DeviceTokenRequest({ + grant_type: "urn:ietf:params:oauth:grant-type:device_code", + device_code: input.code, + client_id: clientId, + }), + ), + ), + ) + + const parsed = yield* HttpClientResponse.schemaBodyJson(DeviceToken)(response).pipe( + mapAccountServiceError("Failed to decode response"), + ) + + if (parsed instanceof DeviceTokenError) return parsed.toPollResult() + const accessToken = parsed.access_token + + const user = fetchUser(input.server, accessToken) + const orgs = fetchOrgs(input.server, accessToken) + + const [account, remoteOrgs] = yield* Effect.all([user, orgs], { concurrency: 2 }) + + // TODO: When there are multiple orgs, let the user choose + const firstOrgID = remoteOrgs.length > 0 ? Option.some(remoteOrgs[0].id) : Option.none() + + const now = yield* Clock.currentTimeMillis + const expiry = now + Duration.toMillis(parsed.expires_in) + const refreshToken = parsed.refresh_token + + yield* repo.persistAccount({ + id: account.id, + email: account.email, + url: input.server, + accessToken, + refreshToken, + expiry, + orgID: firstOrgID, + }) + + return new PollSuccess({ email: account.email }) + }) + + return AccountService.of({ + active: repo.active, + list: repo.list, + orgsByAccount, + remove: repo.remove, + use: repo.use, + orgs, + config, + token, + login, + poll, + }) + }), + ) + + static readonly defaultLayer = AccountService.layer.pipe( + Layer.provide(AccountRepo.layer), + Layer.provide(FetchHttpClient.layer), + ) +} diff --git a/packages/opencode/src/acp/README.md b/packages/opencode/src/acp/README.md index aab33259bb..4afc28a281 100644 --- a/packages/opencode/src/acp/README.md +++ b/packages/opencode/src/acp/README.md @@ -69,7 +69,7 @@ Add to your Zed configuration (`~/.config/zed/settings.json`): ```json { "agent_servers": { - "OpenCode": { + "Altimate Code": { "command": "opencode", "args": ["acp"] } @@ -158,7 +158,7 @@ Each component has a single responsibility: This makes the codebase maintainable and testable. -### Mapping to OpenCode +### Mapping to Altimate Code ACP sessions map cleanly to opencode's internal session model: diff --git a/packages/opencode/src/acp/agent.ts b/packages/opencode/src/acp/agent.ts index 5bc6f7333d..99b0c286cb 100644 --- a/packages/opencode/src/acp/agent.ts +++ b/packages/opencode/src/acp/agent.ts @@ -29,12 +29,13 @@ import { } from "@agentclientprotocol/sdk" import { Log } from "../util/log" -import { pathToFileURL } from "bun" +import { pathToFileURL } from "url" import { Filesystem } from "../util/filesystem" import { Hash } from "../util/hash" import { ACPSessionManager } from "./session" import type { ACPConfig } from "./types" import { Provider } from "../provider/provider" +import { ModelID, ProviderID } from "../provider/schema" import { Agent as AgentModule } from "../agent/agent" import { Installation } from "@/installation" import { MessageV2 } from "@/session/message-v2" @@ -55,8 +56,8 @@ export namespace ACP { async function getContextLimit( sdk: OpencodeClient, - providerID: string, - modelID: string, + providerID: ProviderID, + modelID: ModelID, directory: string, ): Promise { const providers = await sdk.config @@ -96,7 +97,8 @@ export namespace ACP { if (!lastAssistant) return const msg = lastAssistant.info - const size = await getContextLimit(sdk, msg.providerID, msg.modelID, directory) + if (!msg.providerID || !msg.modelID) return + const size = await getContextLimit(sdk, ProviderID.make(msg.providerID), ModelID.make(msg.modelID), directory) if (!size) { // Cannot calculate usage without known context size @@ -137,7 +139,7 @@ export namespace ACP { private eventAbort = new AbortController() private eventStarted = false private bashSnapshots = new Map() - private pendingEmitted = new Set() + private toolStarts = new Set() private permissionQueues = new Map>() private permissionOptions: PermissionOption[] = [ { optionId: "once", kind: "allow_once", name: "Allow once" }, @@ -269,68 +271,16 @@ export namespace ACP { const session = this.sessionManager.tryGet(part.sessionID) if (!session) return const sessionId = session.id - const directory = session.cwd - - const message = await this.sdk.session - .message( - { - sessionID: part.sessionID, - messageID: part.messageID, - directory, - }, - { throwOnError: true }, - ) - .then((x) => x.data) - .catch((error) => { - log.error("unexpected error when fetching message", { error }) - return undefined - }) - - if (!message || message.info.role !== "assistant") return if (part.type === "tool") { + await this.toolStart(sessionId, part) + switch (part.state.status) { case "pending": this.bashSnapshots.delete(part.callID) - this.pendingEmitted.add(part.callID) - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "tool_call", - toolCallId: part.callID, - title: part.tool, - kind: toToolKind(part.tool), - status: "pending", - locations: [], - rawInput: {}, - }, - }) - .catch((error) => { - log.error("failed to send tool pending to ACP", { error }) - }) return case "running": - if (!this.pendingEmitted.has(part.callID)) { - this.pendingEmitted.add(part.callID) - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "tool_call", - toolCallId: part.callID, - title: part.tool, - kind: toToolKind(part.tool), - status: "pending", - locations: [], - rawInput: {}, - }, - }) - .catch((error) => { - log.error("failed to send synthetic tool pending to ACP", { error }) - }) - } const output = this.bashOutput(part) const content: ToolCallContent[] = [] if (output) { @@ -385,6 +335,8 @@ export namespace ACP { return case "completed": { + this.toolStarts.delete(part.callID) + this.bashSnapshots.delete(part.callID) const kind = toToolKind(part.tool) const content: ToolCallContent[] = [ { @@ -464,6 +416,8 @@ export namespace ACP { return } case "error": + this.toolStarts.delete(part.callID) + this.bashSnapshots.delete(part.callID) await this.connection .sessionUpdate({ sessionId, @@ -485,6 +439,7 @@ export namespace ACP { ], rawOutput: { error: part.state.error, + metadata: part.state.metadata, }, }, }) @@ -566,18 +521,18 @@ export namespace ACP { log.info("initialize", { protocolVersion: params.protocolVersion }) const authMethod: AuthMethod = { - description: "Run `altimate auth login` in the terminal", - name: "Login with altimate", - id: "altimate-code-login", + description: "Run `opencode auth login` in the terminal", + name: "Login with opencode", + id: "opencode-login", } // If client supports terminal-auth capability, use that instead. if (params.clientCapabilities?._meta?.["terminal-auth"] === true) { authMethod._meta = { "terminal-auth": { - command: "altimate", + command: "opencode", args: ["auth", "login"], - label: "Altimate CLI Login", + label: "Altimate Code Login", }, } } @@ -602,7 +557,7 @@ export namespace ACP { }, authMethods: [authMethod], agentInfo: { - name: "Altimate CLI", + name: "Altimate Code", version: Installation.VERSION, }, } @@ -637,7 +592,7 @@ export namespace ACP { } } catch (e) { const error = MessageV2.fromError(e, { - providerID: this.config.defaultModel?.providerID ?? "unknown", + providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), }) if (LoadAPIKeyError.isInstance(error)) { throw RequestError.authRequired() @@ -683,8 +638,8 @@ export namespace ACP { if (lastUser?.role === "user") { result.models.currentModelId = `${lastUser.model.providerID}/${lastUser.model.modelID}` this.sessionManager.setModel(sessionId, { - providerID: lastUser.model.providerID, - modelID: lastUser.model.modelID, + providerID: ProviderID.make(lastUser.model.providerID), + modelID: ModelID.make(lastUser.model.modelID), }) if (result.modes?.availableModes.some((m) => m.id === lastUser.agent)) { result.modes.currentModeId = lastUser.agent @@ -702,7 +657,7 @@ export namespace ACP { return result } catch (e) { const error = MessageV2.fromError(e, { - providerID: this.config.defaultModel?.providerID ?? "unknown", + providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), }) if (LoadAPIKeyError.isInstance(error)) { throw RequestError.authRequired() @@ -747,7 +702,7 @@ export namespace ACP { return response } catch (e) { const error = MessageV2.fromError(e, { - providerID: this.config.defaultModel?.providerID ?? "unknown", + providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), }) if (LoadAPIKeyError.isInstance(error)) { throw RequestError.authRequired() @@ -812,7 +767,7 @@ export namespace ACP { return mode } catch (e) { const error = MessageV2.fromError(e, { - providerID: this.config.defaultModel?.providerID ?? "unknown", + providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), }) if (LoadAPIKeyError.isInstance(error)) { throw RequestError.authRequired() @@ -843,7 +798,7 @@ export namespace ACP { return result } catch (e) { const error = MessageV2.fromError(e, { - providerID: this.config.defaultModel?.providerID ?? "unknown", + providerID: ProviderID.make(this.config.defaultModel?.providerID ?? "unknown"), }) if (LoadAPIKeyError.isInstance(error)) { throw RequestError.authRequired() @@ -859,45 +814,22 @@ export namespace ACP { for (const part of message.parts) { if (part.type === "tool") { + await this.toolStart(sessionId, part) switch (part.state.status) { case "pending": - this.pendingEmitted.add(part.callID) - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "tool_call", - toolCallId: part.callID, - title: part.tool, - kind: toToolKind(part.tool), - status: "pending", - locations: [], - rawInput: {}, - }, - }) - .catch((err) => { - log.error("failed to send tool pending to ACP", { error: err }) - }) + this.bashSnapshots.delete(part.callID) break case "running": - if (!this.pendingEmitted.has(part.callID)) { - this.pendingEmitted.add(part.callID) - await this.connection - .sessionUpdate({ - sessionId, - update: { - sessionUpdate: "tool_call", - toolCallId: part.callID, - title: part.tool, - kind: toToolKind(part.tool), - status: "pending", - locations: [], - rawInput: {}, - }, - }) - .catch((err) => { - log.error("failed to send synthetic tool pending to ACP", { error: err }) - }) + const output = this.bashOutput(part) + const runningContent: ToolCallContent[] = [] + if (output) { + runningContent.push({ + type: "content", + content: { + type: "text", + text: output, + }, + }) } await this.connection .sessionUpdate({ @@ -910,6 +842,7 @@ export namespace ACP { title: part.tool, locations: toLocations(part.tool, part.state.input), rawInput: part.state.input, + ...(runningContent.length > 0 && { content: runningContent }), }, }) .catch((err) => { @@ -917,6 +850,8 @@ export namespace ACP { }) break case "completed": + this.toolStarts.delete(part.callID) + this.bashSnapshots.delete(part.callID) const kind = toToolKind(part.tool) const content: ToolCallContent[] = [ { @@ -995,6 +930,8 @@ export namespace ACP { }) break case "error": + this.toolStarts.delete(part.callID) + this.bashSnapshots.delete(part.callID) await this.connection .sessionUpdate({ sessionId, @@ -1016,6 +953,7 @@ export namespace ACP { ], rawOutput: { error: part.state.error, + metadata: part.state.metadata, }, }, }) @@ -1045,7 +983,7 @@ export namespace ACP { } } else if (part.type === "file") { // Replay file attachments as appropriate ACP content blocks. - // Altimate CLI stores files internally as { type: "file", url, filename, mime }. + // Altimate Code stores files internally as { type: "file", url, filename, mime }. // We convert these back to ACP blocks based on the URL scheme and MIME type: // - file:// URLs → resource_link // - data: URLs with image/* → image block @@ -1142,6 +1080,35 @@ export namespace ACP { } } + private bashOutput(part: ToolPart) { + if (part.tool !== "bash") return + if (!("metadata" in part.state) || !part.state.metadata || typeof part.state.metadata !== "object") return + const output = part.state.metadata["output"] + if (typeof output !== "string") return + return output + } + + private async toolStart(sessionId: string, part: ToolPart) { + if (this.toolStarts.has(part.callID)) return + this.toolStarts.add(part.callID) + await this.connection + .sessionUpdate({ + sessionId, + update: { + sessionUpdate: "tool_call", + toolCallId: part.callID, + title: part.tool, + kind: toToolKind(part.tool), + status: "pending", + locations: [], + rawInput: {}, + }, + }) + .catch((error) => { + log.error("failed to send tool pending to ACP", { error }) + }) + } + private async loadAvailableModes(directory: string): Promise { const agents = await this.config.sdk.app .agents( @@ -1511,14 +1478,6 @@ export namespace ACP { { throwOnError: true }, ) } - - private bashOutput(part: ToolPart) { - if (part.tool !== "bash") return - if (!("metadata" in part.state) || !part.state.metadata || typeof part.state.metadata !== "object") return - const output = part.state.metadata["output"] - if (typeof output !== "string") return - return output - } } function toToolKind(toolName: string): ToolKind { @@ -1568,7 +1527,7 @@ export namespace ACP { } } - async function defaultModel(config: ACPConfig, cwd?: string) { + async function defaultModel(config: ACPConfig, cwd?: string): Promise<{ providerID: ProviderID; modelID: ModelID }> { const sdk = config.sdk const configured = config.defaultModel if (configured) return configured @@ -1580,11 +1539,7 @@ export namespace ACP { .then((resp) => { const cfg = resp.data if (!cfg || !cfg.model) return undefined - const parsed = Provider.parseModel(cfg.model) - return { - providerID: parsed.providerID, - modelID: parsed.modelID, - } + return Provider.parseModel(cfg.model) }) .catch((error) => { log.error("failed to load user config for default model", { error }) @@ -1606,16 +1561,16 @@ export namespace ACP { if (specified && !providers.length) return specified - const altimateCodeProvider = providers.find((p) => p.id === "altimate-code") - if (altimateCodeProvider) { - if (altimateCodeProvider.models["big-pickle"]) { - return { providerID: "altimate-code", modelID: "big-pickle" } + const opencodeProvider = providers.find((p) => p.id === "opencode") + if (opencodeProvider) { + if (opencodeProvider.models["big-pickle"]) { + return { providerID: ProviderID.opencode, modelID: ModelID.make("big-pickle") } } - const [best] = Provider.sort(Object.values(altimateCodeProvider.models)) + const [best] = Provider.sort(Object.values(opencodeProvider.models)) if (best) { return { - providerID: best.providerID, - modelID: best.id, + providerID: ProviderID.make(best.providerID), + modelID: ModelID.make(best.id), } } } @@ -1624,14 +1579,14 @@ export namespace ACP { const [best] = Provider.sort(models) if (best) { return { - providerID: best.providerID, - modelID: best.id, + providerID: ProviderID.make(best.providerID), + modelID: ModelID.make(best.id), } } if (specified) return specified - return { providerID: "altimate-code", modelID: "big-pickle" } + return { providerID: ProviderID.opencode, modelID: ModelID.make("big-pickle") } } function parseUri( @@ -1694,7 +1649,7 @@ export namespace ACP { function modelVariantsFromProviders( providers: Array<{ id: string; models: Record }> }>, - model: { providerID: string; modelID: string }, + model: { providerID: ProviderID; modelID: ModelID }, ): string[] { const provider = providers.find((entry) => entry.id === model.providerID) if (!provider) return [] @@ -1709,7 +1664,10 @@ export namespace ACP { ): ModelOption[] { const includeVariants = options.includeVariants ?? false return providers.flatMap((provider) => { - const models = Provider.sort(Object.values(provider.models) as any) + const unsorted: Array<{ id: string; name: string; variants?: Record }> = Object.values( + provider.models, + ) + const models = Provider.sort(unsorted) return models.flatMap((model) => { const base: ModelOption = { modelId: `${provider.id}/${model.id}`, @@ -1727,7 +1685,7 @@ export namespace ACP { } function formatModelIdWithVariant( - model: { providerID: string; modelID: string }, + model: { providerID: ProviderID; modelID: ModelID }, variant: string | undefined, availableVariants: string[], includeVariant: boolean, @@ -1738,12 +1696,12 @@ export namespace ACP { } function buildVariantMeta(input: { - model: { providerID: string; modelID: string } + model: { providerID: ProviderID; modelID: ModelID } variant?: string availableVariants: string[] }) { return { - "altimate-code": { + opencode: { modelId: `${input.model.providerID}/${input.model.modelID}`, variant: input.variant ?? null, availableVariants: input.availableVariants, @@ -1754,7 +1712,7 @@ export namespace ACP { function parseModelSelection( modelId: string, providers: Array<{ id: string; models: Record }> }>, - ): { model: { providerID: string; modelID: string }; variant?: string } { + ): { model: { providerID: ProviderID; modelID: ModelID }; variant?: string } { const parsed = Provider.parseModel(modelId) const provider = providers.find((p) => p.id === parsed.providerID) if (!provider) { @@ -1774,7 +1732,7 @@ export namespace ACP { const baseModelInfo = provider.models[baseModelId] if (baseModelInfo?.variants && candidateVariant in baseModelInfo.variants) { return { - model: { providerID: parsed.providerID, modelID: baseModelId }, + model: { providerID: parsed.providerID, modelID: ModelID.make(baseModelId) }, variant: candidateVariant, } } diff --git a/packages/opencode/src/acp/types.ts b/packages/opencode/src/acp/types.ts index de8ac50812..2c3e886bc1 100644 --- a/packages/opencode/src/acp/types.ts +++ b/packages/opencode/src/acp/types.ts @@ -1,5 +1,6 @@ import type { McpServer } from "@agentclientprotocol/sdk" import type { OpencodeClient } from "@opencode-ai/sdk/v2" +import type { ProviderID, ModelID } from "../provider/schema" export interface ACPSessionState { id: string @@ -7,8 +8,8 @@ export interface ACPSessionState { mcpServers: McpServer[] createdAt: Date model?: { - providerID: string - modelID: string + providerID: ProviderID + modelID: ModelID } variant?: string modeId?: string @@ -17,7 +18,7 @@ export interface ACPSessionState { export interface ACPConfig { sdk: OpencodeClient defaultModel?: { - providerID: string - modelID: string + providerID: ProviderID + modelID: ModelID } } diff --git a/packages/opencode/src/agent/agent.ts b/packages/opencode/src/agent/agent.ts index 6eb16f80fa..543cf4bde1 100644 --- a/packages/opencode/src/agent/agent.ts +++ b/packages/opencode/src/agent/agent.ts @@ -1,6 +1,7 @@ import { Config } from "../config/config" import z from "zod" import { Provider } from "../provider/provider" +import { ModelID, ProviderID } from "../provider/schema" import { generateObject, streamObject, type ModelMessage } from "ai" import { SystemPrompt } from "../session/system" import { Instance } from "../project/instance" @@ -41,8 +42,8 @@ export namespace Agent { permission: PermissionNext.Ruleset, model: z .object({ - modelID: z.string(), - providerID: z.string(), + modelID: ModelID.zod, + providerID: ProviderID.zod, }) .optional(), variant: z.string().optional(), @@ -415,7 +416,7 @@ export namespace Agent { return primaryVisible.name } - export async function generate(input: { description: string; model?: { providerID: string; modelID: string } }) { + export async function generate(input: { description: string; model?: { providerID: ProviderID; modelID: ModelID } }) { const cfg = await Config.get() const defaultModel = input.model ?? (await Provider.defaultModel()) const model = await Provider.getModel(defaultModel.providerID, defaultModel.modelID) diff --git a/packages/opencode/src/altimate/telemetry/index.ts b/packages/opencode/src/altimate/telemetry/index.ts index 4d6a5dd40b..7c5f193b4b 100644 --- a/packages/opencode/src/altimate/telemetry/index.ts +++ b/packages/opencode/src/altimate/telemetry/index.ts @@ -1,4 +1,4 @@ -import { Control } from "@/control" +import { Account } from "@/account" import { Config } from "@/config/config" import { Installation } from "@/installation" import { Log } from "@/util/log" @@ -233,7 +233,7 @@ export namespace Telemetry { duration_ms: number cost: number compactions: number - outcome: "completed" | "abandoned" | "error" + outcome: "completed" | "abandoned" | "aborted" | "error" } | { type: "error_recovered" @@ -426,7 +426,7 @@ export namespace Telemetry { } appInsights = cfg try { - const account = Control.account() + const account = Account.active() if (account) { userEmail = createHash("sha256").update(account.email.toLowerCase().trim()).digest("hex") } diff --git a/packages/opencode/src/auth/index.ts b/packages/opencode/src/auth/index.ts index 80253a665e..79e9e615d2 100644 --- a/packages/opencode/src/auth/index.ts +++ b/packages/opencode/src/auth/index.ts @@ -1,9 +1,13 @@ -import path from "path" -import { Global } from "../global" +import { Effect } from "effect" import z from "zod" -import { Filesystem } from "../util/filesystem" +import { runtime } from "@/effect/runtime" +import * as S from "./service" -export const OAUTH_DUMMY_KEY = "opencode-oauth-dummy-key" +export { OAUTH_DUMMY_KEY } from "./service" + +function runPromise(f: (service: S.AuthService.Service) => Effect.Effect) { + return runtime.runPromise(S.AuthService.use(f)) +} export namespace Auth { export const Oauth = z @@ -35,39 +39,19 @@ export namespace Auth { export const Info = z.discriminatedUnion("type", [Oauth, Api, WellKnown]).meta({ ref: "Auth" }) export type Info = z.infer - const filepath = path.join(Global.Path.data, "auth.json") - export async function get(providerID: string) { - const auth = await all() - return auth[providerID] + return runPromise((service) => service.get(providerID)) } export async function all(): Promise> { - const data = await Filesystem.readJson>(filepath).catch(() => ({})) - return Object.entries(data).reduce( - (acc, [key, value]) => { - const parsed = Info.safeParse(value) - if (!parsed.success) return acc - acc[key] = parsed.data - return acc - }, - {} as Record, - ) + return runPromise((service) => service.all()) } export async function set(key: string, info: Info) { - const normalized = key.replace(/\/+$/, "") - const data = await all() - if (normalized !== key) delete data[key] - delete data[normalized + "/"] - await Filesystem.writeJson(filepath, { ...data, [normalized]: info }, 0o600) + return runPromise((service) => service.set(key, info)) } export async function remove(key: string) { - const normalized = key.replace(/\/+$/, "") - const data = await all() - delete data[key] - delete data[normalized] - await Filesystem.writeJson(filepath, data, 0o600) + return runPromise((service) => service.remove(key)) } } diff --git a/packages/opencode/src/auth/service.ts b/packages/opencode/src/auth/service.ts new file mode 100644 index 0000000000..100a132b87 --- /dev/null +++ b/packages/opencode/src/auth/service.ts @@ -0,0 +1,101 @@ +import path from "path" +import { Effect, Layer, Record, Result, Schema, ServiceMap } from "effect" +import { Global } from "../global" +import { Filesystem } from "../util/filesystem" + +export const OAUTH_DUMMY_KEY = "opencode-oauth-dummy-key" + +export class Oauth extends Schema.Class("OAuth")({ + type: Schema.Literal("oauth"), + refresh: Schema.String, + access: Schema.String, + expires: Schema.Number, + accountId: Schema.optional(Schema.String), + enterpriseUrl: Schema.optional(Schema.String), +}) {} + +export class Api extends Schema.Class("ApiAuth")({ + type: Schema.Literal("api"), + key: Schema.String, +}) {} + +export class WellKnown extends Schema.Class("WellKnownAuth")({ + type: Schema.Literal("wellknown"), + key: Schema.String, + token: Schema.String, +}) {} + +export const Info = Schema.Union([Oauth, Api, WellKnown]) +export type Info = Schema.Schema.Type + +export class AuthServiceError extends Schema.TaggedErrorClass()("AuthServiceError", { + message: Schema.String, + cause: Schema.optional(Schema.Defect), +}) {} + +const file = path.join(Global.Path.data, "auth.json") + +const fail = (message: string) => (cause: unknown) => new AuthServiceError({ message, cause }) + +export namespace AuthService { + export interface Service { + readonly get: (providerID: string) => Effect.Effect + readonly all: () => Effect.Effect, AuthServiceError> + readonly set: (key: string, info: Info) => Effect.Effect + readonly remove: (key: string) => Effect.Effect + } +} + +export class AuthService extends ServiceMap.Service()("@opencode/Auth") { + static readonly layer = Layer.effect( + AuthService, + Effect.gen(function* () { + const decode = Schema.decodeUnknownOption(Info) + + const all = Effect.fn("AuthService.all")(() => + Effect.tryPromise({ + try: async () => { + const data = await Filesystem.readJson>(file).catch(() => ({})) + return Record.filterMap(data, (value) => Result.fromOption(decode(value), () => undefined)) + }, + catch: fail("Failed to read auth data"), + }), + ) + + const get = Effect.fn("AuthService.get")(function* (providerID: string) { + return (yield* all())[providerID] + }) + + const set = Effect.fn("AuthService.set")(function* (key: string, info: Info) { + const norm = key.replace(/\/+$/, "") + const data = yield* all() + if (norm !== key) delete data[key] + delete data[norm + "/"] + yield* Effect.tryPromise({ + try: () => Filesystem.writeJson(file, { ...data, [norm]: info }, 0o600), + catch: fail("Failed to write auth data"), + }) + }) + + const remove = Effect.fn("AuthService.remove")(function* (key: string) { + const norm = key.replace(/\/+$/, "") + const data = yield* all() + delete data[key] + delete data[norm] + yield* Effect.tryPromise({ + try: () => Filesystem.writeJson(file, data, 0o600), + catch: fail("Failed to write auth data"), + }) + }) + + return AuthService.of({ + get, + all, + set, + remove, + }) + }), + ) + + static readonly defaultLayer = AuthService.layer +} diff --git a/packages/opencode/src/bun/index.ts b/packages/opencode/src/bun/index.ts index e3bddcc226..d6c4538259 100644 --- a/packages/opencode/src/bun/index.ts +++ b/packages/opencode/src/bun/index.ts @@ -4,7 +4,6 @@ import { Log } from "../util/log" import path from "path" import { Filesystem } from "../util/filesystem" import { NamedError } from "@opencode-ai/util/error" -import { text } from "node:stream/consumers" import { Lock } from "../util/lock" import { PackageRegistry } from "./registry" import { proxied } from "@/util/proxied" @@ -13,32 +12,29 @@ import { Process } from "../util/process" export namespace BunProc { const log = Log.create({ service: "bun" }) - export async function run(cmd: string[], options?: Process.Options) { + export async function run(cmd: string[], options?: Process.RunOptions) { + const full = [which(), ...cmd] log.info("running", { - cmd: [which(), ...cmd], + cmd: full, ...options, }) - const result = Process.spawn([which(), ...cmd], { - ...options, - stdout: "pipe", - stderr: "pipe", + const result = await Process.run(full, { + cwd: options?.cwd, + abort: options?.abort, + kill: options?.kill, + timeout: options?.timeout, + nothrow: options?.nothrow, env: { ...process.env, ...options?.env, BUN_BE_BUN: "1", }, }) - const code = await result.exited - const stdout = result.stdout ? await text(result.stdout) : undefined - const stderr = result.stderr ? await text(result.stderr) : undefined log.info("done", { - code, - stdout, - stderr, + code: result.code, + stdout: result.stdout.toString(), + stderr: result.stderr.toString(), }) - if (code !== 0) { - throw new Error(`Command failed with exit code ${code}`) - } return result } diff --git a/packages/opencode/src/bun/registry.ts b/packages/opencode/src/bun/registry.ts index a85a6c989c..e43e20e6c5 100644 --- a/packages/opencode/src/bun/registry.ts +++ b/packages/opencode/src/bun/registry.ts @@ -1,5 +1,4 @@ -import { semver } from "bun" -import { text } from "node:stream/consumers" +import semver from "semver" import { Log } from "../util/log" import { Process } from "../util/process" @@ -11,26 +10,21 @@ export namespace PackageRegistry { } export async function info(pkg: string, field: string, cwd?: string): Promise { - const result = Process.spawn([which(), "info", pkg, field], { + const { code, stdout, stderr } = await Process.run([which(), "info", pkg, field], { cwd, - stdout: "pipe", - stderr: "pipe", env: { ...process.env, BUN_BE_BUN: "1", }, + nothrow: true, }) - const code = await result.exited - const stdout = result.stdout ? await text(result.stdout) : "" - const stderr = result.stderr ? await text(result.stderr) : "" - if (code !== 0) { - log.warn("bun info failed", { pkg, field, code, stderr }) + log.warn("bun info failed", { pkg, field, code, stderr: stderr.toString() }) return null } - const value = stdout.trim() + const value = stdout.toString().trim() if (!value) return null return value } @@ -45,6 +39,6 @@ export namespace PackageRegistry { const isRange = /[\s^~*xX<>|=]/.test(cachedVersion) if (isRange) return !semver.satisfies(latestVersion, cachedVersion) - return semver.order(cachedVersion, latestVersion) === -1 + return semver.lt(cachedVersion, latestVersion) } } diff --git a/packages/opencode/src/cli/cmd/account.ts b/packages/opencode/src/cli/cmd/account.ts new file mode 100644 index 0000000000..8ad42c5eb1 --- /dev/null +++ b/packages/opencode/src/cli/cmd/account.ts @@ -0,0 +1,219 @@ +import { cmd } from "./cmd" +import { Duration, Effect, Match, Option } from "effect" +import { UI } from "../ui" +import { runtime } from "@/effect/runtime" +import { AccountID, AccountService, OrgID, PollExpired, type PollResult } from "@/account/service" +import { type AccountError } from "@/account/schema" +import * as Prompt from "../effect/prompt" +import open from "open" + +const openBrowser = (url: string) => Effect.promise(() => open(url).catch(() => undefined)) + +const println = (msg: string) => Effect.sync(() => UI.println(msg)) + +const loginEffect = Effect.fn("login")(function* (url: string) { + const service = yield* AccountService + + yield* Prompt.intro("Log in") + const login = yield* service.login(url) + + yield* Prompt.log.info("Go to: " + login.url) + yield* Prompt.log.info("Enter code: " + login.user) + yield* openBrowser(login.url) + + const s = Prompt.spinner() + yield* s.start("Waiting for authorization...") + + const poll = (wait: Duration.Duration): Effect.Effect => + Effect.gen(function* () { + yield* Effect.sleep(wait) + const result = yield* service.poll(login) + if (result._tag === "PollPending") return yield* poll(wait) + if (result._tag === "PollSlow") return yield* poll(Duration.sum(wait, Duration.seconds(5))) + return result + }) + + const result = yield* poll(login.interval).pipe( + Effect.timeout(login.expiry), + Effect.catchTag("TimeoutError", () => Effect.succeed(new PollExpired())), + ) + + yield* Match.valueTags(result, { + PollSuccess: (r) => + Effect.gen(function* () { + yield* s.stop("Logged in as " + r.email) + yield* Prompt.outro("Done") + }), + PollExpired: () => s.stop("Device code expired", 1), + PollDenied: () => s.stop("Authorization denied", 1), + PollError: (r) => s.stop("Error: " + String(r.cause), 1), + PollPending: () => s.stop("Unexpected state", 1), + PollSlow: () => s.stop("Unexpected state", 1), + }) +}) + +const logoutEffect = Effect.fn("logout")(function* (email?: string) { + const service = yield* AccountService + const accounts = yield* service.list() + if (accounts.length === 0) return yield* println("Not logged in") + + if (email) { + const match = accounts.find((a) => a.email === email) + if (!match) return yield* println("Account not found: " + email) + yield* service.remove(match.id) + yield* Prompt.outro("Logged out from " + email) + return + } + + const active = yield* service.active() + const activeID = Option.map(active, (a) => a.id) + + yield* Prompt.intro("Log out") + + const opts = accounts.map((a) => { + const isActive = Option.isSome(activeID) && activeID.value === a.id + const server = UI.Style.TEXT_DIM + a.url + UI.Style.TEXT_NORMAL + return { + value: a, + label: isActive ? `${a.email} ${server}` + UI.Style.TEXT_DIM + " (active)" : `${a.email} ${server}`, + } + }) + + const selected = yield* Prompt.select({ message: "Select account to log out", options: opts }) + if (Option.isNone(selected)) return + + yield* service.remove(selected.value.id) + yield* Prompt.outro("Logged out from " + selected.value.email) +}) + +interface OrgChoice { + orgID: OrgID + accountID: AccountID + label: string +} + +const switchEffect = Effect.fn("switch")(function* () { + const service = yield* AccountService + + const groups = yield* service.orgsByAccount() + if (groups.length === 0) return yield* println("Not logged in") + + const active = yield* service.active() + const activeOrgID = Option.flatMap(active, (a) => Option.fromNullishOr(a.active_org_id)) + + const opts = groups.flatMap((group) => + group.orgs.map((org) => { + const isActive = Option.isSome(activeOrgID) && activeOrgID.value === org.id + return { + value: { orgID: org.id, accountID: group.account.id, label: org.name }, + label: isActive + ? `${org.name} (${group.account.email})` + UI.Style.TEXT_DIM + " (active)" + : `${org.name} (${group.account.email})`, + } + }), + ) + if (opts.length === 0) return yield* println("No orgs found") + + yield* Prompt.intro("Switch org") + + const selected = yield* Prompt.select({ message: "Select org", options: opts }) + if (Option.isNone(selected)) return + + const choice = selected.value + yield* service.use(choice.accountID, Option.some(choice.orgID)) + yield* Prompt.outro("Switched to " + choice.label) +}) + +const orgsEffect = Effect.fn("orgs")(function* () { + const service = yield* AccountService + + const groups = yield* service.orgsByAccount() + if (groups.length === 0) return yield* println("No accounts found") + if (!groups.some((group) => group.orgs.length > 0)) return yield* println("No orgs found") + + const active = yield* service.active() + const activeOrgID = Option.flatMap(active, (a) => Option.fromNullishOr(a.active_org_id)) + + for (const group of groups) { + for (const org of group.orgs) { + const isActive = Option.isSome(activeOrgID) && activeOrgID.value === org.id + const dot = isActive ? UI.Style.TEXT_SUCCESS + "●" + UI.Style.TEXT_NORMAL : " " + const name = isActive ? UI.Style.TEXT_HIGHLIGHT_BOLD + org.name + UI.Style.TEXT_NORMAL : org.name + const email = UI.Style.TEXT_DIM + group.account.email + UI.Style.TEXT_NORMAL + const id = UI.Style.TEXT_DIM + org.id + UI.Style.TEXT_NORMAL + yield* println(` ${dot} ${name} ${email} ${id}`) + } + } +}) + +export const LoginCommand = cmd({ + command: "login ", + describe: false, + builder: (yargs) => + yargs.positional("url", { + describe: "server URL", + type: "string", + demandOption: true, + }), + async handler(args) { + UI.empty() + await runtime.runPromise(loginEffect(args.url)) + }, +}) + +export const LogoutCommand = cmd({ + command: "logout [email]", + describe: false, + builder: (yargs) => + yargs.positional("email", { + describe: "account email to log out from", + type: "string", + }), + async handler(args) { + UI.empty() + await runtime.runPromise(logoutEffect(args.email)) + }, +}) + +export const SwitchCommand = cmd({ + command: "switch", + describe: false, + async handler() { + UI.empty() + await runtime.runPromise(switchEffect()) + }, +}) + +export const OrgsCommand = cmd({ + command: "orgs", + describe: false, + async handler() { + UI.empty() + await runtime.runPromise(orgsEffect()) + }, +}) + +export const ConsoleCommand = cmd({ + command: "console", + describe: false, + builder: (yargs) => + yargs + .command({ + ...LoginCommand, + describe: "log in to console", + }) + .command({ + ...LogoutCommand, + describe: "log out from console", + }) + .command({ + ...SwitchCommand, + describe: "switch active org", + }) + .command({ + ...OrgsCommand, + describe: "list orgs", + }) + .demandCommand(), + async handler() {}, +}) diff --git a/packages/opencode/src/cli/cmd/debug/agent.ts b/packages/opencode/src/cli/cmd/debug/agent.ts index fe30034859..297a7ec021 100644 --- a/packages/opencode/src/cli/cmd/debug/agent.ts +++ b/packages/opencode/src/cli/cmd/debug/agent.ts @@ -4,7 +4,7 @@ import { Agent } from "../../../agent/agent" import { Provider } from "../../../provider/provider" import { Session } from "../../../session" import type { MessageV2 } from "../../../session/message-v2" -import { Identifier } from "../../../id/id" +import { MessageID, PartID } from "../../../session/schema" import { ToolRegistry } from "../../../tool/registry" import { Instance } from "../../../project/instance" import { PermissionNext } from "../../../permission/next" @@ -113,7 +113,7 @@ function parseToolParams(input?: string) { async function createToolContext(agent: Agent.Info) { const session = await Session.create({ title: `Debug tool run (${agent.name})` }) - const messageID = Identifier.ascending("message") + const messageID = MessageID.ascending() const model = agent.model ?? (await Provider.defaultModel()) const now = Date.now() const message: MessageV2.Assistant = { @@ -150,7 +150,7 @@ async function createToolContext(agent: Agent.Info) { return { sessionID: session.id, messageID, - callID: Identifier.ascending("part"), + callID: PartID.ascending(), agent: agent.name, abort: new AbortController().signal, messages: [], diff --git a/packages/opencode/src/cli/cmd/export.ts b/packages/opencode/src/cli/cmd/export.ts index 20c95bf87e..4088b4818d 100644 --- a/packages/opencode/src/cli/cmd/export.ts +++ b/packages/opencode/src/cli/cmd/export.ts @@ -1,5 +1,6 @@ import type { Argv } from "yargs" import { Session } from "../../session" +import { SessionID } from "../../session/schema" import { cmd } from "./cmd" import { bootstrap } from "../bootstrap" import { UI } from "../ui" @@ -17,7 +18,7 @@ export const ExportCommand = cmd({ }, handler: async (args) => { await bootstrap(process.cwd(), async () => { - let sessionID = args.sessionID + let sessionID = args.sessionID ? SessionID.make(args.sessionID) : undefined process.stderr.write(`Exporting session: ${sessionID ?? "latest"}\n`) if (!sessionID) { @@ -58,7 +59,7 @@ export const ExportCommand = cmd({ throw new UI.CancelledError() } - sessionID = selectedSession as string + sessionID = selectedSession prompts.outro("Exporting session...", { output: process.stderr, @@ -67,7 +68,7 @@ export const ExportCommand = cmd({ try { const sessionInfo = await Session.get(sessionID!) - const messages = await Session.messages({ sessionID: sessionID! }) + const messages = await Session.messages({ sessionID: sessionInfo.id }) const exportData = { info: sessionInfo, diff --git a/packages/opencode/src/cli/cmd/github.ts b/packages/opencode/src/cli/cmd/github.ts index 88b8c4f65e..4ae8c5632c 100644 --- a/packages/opencode/src/cli/cmd/github.ts +++ b/packages/opencode/src/cli/cmd/github.ts @@ -22,13 +22,15 @@ import { ModelsDev } from "../../provider/models" import { Instance } from "@/project/instance" import { bootstrap } from "../bootstrap" import { Session } from "../../session" -import { Identifier } from "../../id/id" +import type { SessionID } from "../../session/schema" +import { MessageID, PartID } from "../../session/schema" import { Provider } from "../../provider/provider" import { Bus } from "../../bus" import { MessageV2 } from "../../session/message-v2" import { SessionPrompt } from "@/session/prompt" -import { $ } from "bun" import { setTimeout as sleep } from "node:timers/promises" +import { Process } from "@/util/process" +import { git } from "@/util/git" type GitHubAuthor = { login: string @@ -255,7 +257,7 @@ export const GithubInstallCommand = cmd({ } // Get repo info - const info = (await $`git remote get-url origin`.quiet().nothrow().text()).trim() + const info = (await git(["remote", "get-url", "origin"], { cwd: Instance.worktree })).text().trim() const parsed = parseGitHubRemote(info) if (!parsed) { prompts.log.error(`Could not find git repository. Please run this command from a git repository.`) @@ -480,7 +482,7 @@ export const GithubRunCommand = cmd({ let octoRest: Octokit let octoGraph: typeof graphql let gitConfig: string - let session: { id: string; title: string; version: string } + let session: { id: SessionID; title: string; version: string } let shareId: string | undefined let exitCode = 0 type PromptFiles = Awaited>["promptFiles"] @@ -493,6 +495,26 @@ export const GithubRunCommand = cmd({ ? "pr_review" : "issue" : undefined + const gitText = async (args: string[]) => { + const result = await git(args, { cwd: Instance.worktree }) + if (result.exitCode !== 0) { + throw new Process.RunFailedError(["git", ...args], result.exitCode, result.stdout, result.stderr) + } + return result.text().trim() + } + const gitRun = async (args: string[]) => { + const result = await git(args, { cwd: Instance.worktree }) + if (result.exitCode !== 0) { + throw new Process.RunFailedError(["git", ...args], result.exitCode, result.stdout, result.stderr) + } + return result + } + const gitStatus = (args: string[]) => git(args, { cwd: Instance.worktree }) + const commitChanges = async (summary: string, actor?: string) => { + const args = ["commit", "-m", summary] + if (actor) args.push("-m", `Co-authored-by: ${actor} <${actor}@users.noreply.github.com>`) + await gitRun(args) + } try { if (useGithubToken) { @@ -553,7 +575,7 @@ export const GithubRunCommand = cmd({ } const branchPrefix = isWorkflowDispatchEvent ? "dispatch" : "schedule" const branch = await checkoutNewBranch(branchPrefix) - const head = (await $`git rev-parse HEAD`).stdout.toString().trim() + const head = await gitText(["rev-parse", "HEAD"]) const response = await chat(userPrompt, promptFiles) const { dirty, uncommittedChanges, switched } = await branchIsDirty(head, branch) if (switched) { @@ -587,7 +609,7 @@ export const GithubRunCommand = cmd({ // Local PR if (prData.headRepository.nameWithOwner === prData.baseRepository.nameWithOwner) { await checkoutLocalBranch(prData) - const head = (await $`git rev-parse HEAD`).stdout.toString().trim() + const head = await gitText(["rev-parse", "HEAD"]) const dataPrompt = buildPromptDataForPR(prData) const response = await chat(`${userPrompt}\n\n${dataPrompt}`, promptFiles) const { dirty, uncommittedChanges, switched } = await branchIsDirty(head, prData.headRefName) @@ -605,7 +627,7 @@ export const GithubRunCommand = cmd({ // Fork PR else { const forkBranch = await checkoutForkBranch(prData) - const head = (await $`git rev-parse HEAD`).stdout.toString().trim() + const head = await gitText(["rev-parse", "HEAD"]) const dataPrompt = buildPromptDataForPR(prData) const response = await chat(`${userPrompt}\n\n${dataPrompt}`, promptFiles) const { dirty, uncommittedChanges, switched } = await branchIsDirty(head, forkBranch) @@ -624,7 +646,7 @@ export const GithubRunCommand = cmd({ // Issue else { const branch = await checkoutNewBranch("issue") - const head = (await $`git rev-parse HEAD`).stdout.toString().trim() + const head = await gitText(["rev-parse", "HEAD"]) const issueData = await fetchIssue() const dataPrompt = buildPromptDataForIssue(issueData) const response = await chat(`${userPrompt}\n\n${dataPrompt}`, promptFiles) @@ -658,7 +680,7 @@ export const GithubRunCommand = cmd({ exitCode = 1 console.error(e instanceof Error ? e.message : String(e)) let msg = e - if (e instanceof $.ShellError) { + if (e instanceof Process.RunFailedError) { msg = e.stderr.toString() } else if (e instanceof Error) { msg = e.message @@ -913,7 +935,7 @@ export const GithubRunCommand = cmd({ const result = await SessionPrompt.prompt({ sessionID: session.id, - messageID: Identifier.ascending("message"), + messageID: MessageID.ascending(), variant, model: { providerID, @@ -922,13 +944,13 @@ export const GithubRunCommand = cmd({ // agent is omitted - server will use default_agent from config or fall back to "build" parts: [ { - id: Identifier.ascending("part"), + id: PartID.ascending(), type: "text", text: message, }, ...files.flatMap((f) => [ { - id: Identifier.ascending("part"), + id: PartID.ascending(), type: "file" as const, mime: f.mime, url: `data:${f.mime};base64,${f.content}`, @@ -967,7 +989,7 @@ export const GithubRunCommand = cmd({ console.log("Requesting summary from agent...") const summary = await SessionPrompt.prompt({ sessionID: session.id, - messageID: Identifier.ascending("message"), + messageID: MessageID.ascending(), variant, model: { providerID, @@ -976,7 +998,7 @@ export const GithubRunCommand = cmd({ tools: { "*": false }, // Disable all tools to force text response parts: [ { - id: Identifier.ascending("part"), + id: PartID.ascending(), type: "text", text: "Summarize the actions (tool calls & reasoning) you did for the user in 1-2 sentences.", }, @@ -1049,29 +1071,29 @@ export const GithubRunCommand = cmd({ const config = "http.https://github.com/.extraheader" // actions/checkout@v6 no longer stores credentials in .git/config, // so this may not exist - use nothrow() to handle gracefully - const ret = await $`git config --local --get ${config}`.nothrow() + const ret = await gitStatus(["config", "--local", "--get", config]) if (ret.exitCode === 0) { gitConfig = ret.stdout.toString().trim() - await $`git config --local --unset-all ${config}` + await gitRun(["config", "--local", "--unset-all", config]) } const newCredentials = Buffer.from(`x-access-token:${appToken}`, "utf8").toString("base64") - await $`git config --local ${config} "AUTHORIZATION: basic ${newCredentials}"` - await $`git config --global user.name "${AGENT_USERNAME}"` - await $`git config --global user.email "${AGENT_USERNAME}@users.noreply.github.com"` + await gitRun(["config", "--local", config, `AUTHORIZATION: basic ${newCredentials}`]) + await gitRun(["config", "--global", "user.name", AGENT_USERNAME]) + await gitRun(["config", "--global", "user.email", `${AGENT_USERNAME}@users.noreply.github.com`]) } async function restoreGitConfig() { if (gitConfig === undefined) return const config = "http.https://github.com/.extraheader" - await $`git config --local ${config} "${gitConfig}"` + await gitRun(["config", "--local", config, gitConfig]) } async function checkoutNewBranch(type: "issue" | "schedule" | "dispatch") { console.log("Checking out new branch...") const branch = generateBranchName(type) - await $`git checkout -b ${branch}` + await gitRun(["checkout", "-b", branch]) return branch } @@ -1081,8 +1103,8 @@ export const GithubRunCommand = cmd({ const branch = pr.headRefName const depth = Math.max(pr.commits.totalCount, 20) - await $`git fetch origin --depth=${depth} ${branch}` - await $`git checkout ${branch}` + await gitRun(["fetch", "origin", `--depth=${depth}`, branch]) + await gitRun(["checkout", branch]) } async function checkoutForkBranch(pr: GitHubPullRequest) { @@ -1092,9 +1114,9 @@ export const GithubRunCommand = cmd({ const localBranch = generateBranchName("pr") const depth = Math.max(pr.commits.totalCount, 20) - await $`git remote add fork https://github.com/${pr.headRepository.nameWithOwner}.git` - await $`git fetch fork --depth=${depth} ${remoteBranch}` - await $`git checkout -b ${localBranch} fork/${remoteBranch}` + await gitRun(["remote", "add", "fork", `https://github.com/${pr.headRepository.nameWithOwner}.git`]) + await gitRun(["fetch", "fork", `--depth=${depth}`, remoteBranch]) + await gitRun(["checkout", "-b", localBranch, `fork/${remoteBranch}`]) return localBranch } @@ -1115,28 +1137,23 @@ export const GithubRunCommand = cmd({ async function pushToNewBranch(summary: string, branch: string, commit: boolean, isSchedule: boolean) { console.log("Pushing to new branch...") if (commit) { - await $`git add .` + await gitRun(["add", "."]) if (isSchedule) { - // No co-author for scheduled events - the schedule is operating as the repo - await $`git commit -m "${summary}"` + await commitChanges(summary) } else { - await $`git commit -m "${summary} - -Co-authored-by: ${actor} <${actor}@users.noreply.github.com>"` + await commitChanges(summary, actor) } } - await $`git push -u origin ${branch}` + await gitRun(["push", "-u", "origin", branch]) } async function pushToLocalBranch(summary: string, commit: boolean) { console.log("Pushing to local branch...") if (commit) { - await $`git add .` - await $`git commit -m "${summary} - -Co-authored-by: ${actor} <${actor}@users.noreply.github.com>"` + await gitRun(["add", "."]) + await commitChanges(summary, actor) } - await $`git push` + await gitRun(["push"]) } async function pushToForkBranch(summary: string, pr: GitHubPullRequest, commit: boolean) { @@ -1145,30 +1162,28 @@ Co-authored-by: ${actor} <${actor}@users.noreply.github.com>"` const remoteBranch = pr.headRefName if (commit) { - await $`git add .` - await $`git commit -m "${summary} - -Co-authored-by: ${actor} <${actor}@users.noreply.github.com>"` + await gitRun(["add", "."]) + await commitChanges(summary, actor) } - await $`git push fork HEAD:${remoteBranch}` + await gitRun(["push", "fork", `HEAD:${remoteBranch}`]) } async function branchIsDirty(originalHead: string, expectedBranch: string) { console.log("Checking if branch is dirty...") // Detect if the agent switched branches during chat (e.g. created // its own branch, committed, and possibly pushed/created a PR). - const current = (await $`git rev-parse --abbrev-ref HEAD`).stdout.toString().trim() + const current = await gitText(["rev-parse", "--abbrev-ref", "HEAD"]) if (current !== expectedBranch) { console.log(`Branch changed during chat: expected ${expectedBranch}, now on ${current}`) return { dirty: true, uncommittedChanges: false, switched: true } } - const ret = await $`git status --porcelain` + const ret = await gitStatus(["status", "--porcelain"]) const status = ret.stdout.toString().trim() if (status.length > 0) { return { dirty: true, uncommittedChanges: true, switched: false } } - const head = (await $`git rev-parse HEAD`).stdout.toString().trim() + const head = await gitText(["rev-parse", "HEAD"]) return { dirty: head !== originalHead, uncommittedChanges: false, @@ -1180,11 +1195,11 @@ Co-authored-by: ${actor} <${actor}@users.noreply.github.com>"` // Falls back to fetching from origin when local refs are missing // (common in shallow clones from actions/checkout). async function hasNewCommits(base: string, head: string) { - const result = await $`git rev-list --count ${base}..${head}`.nothrow() + const result = await gitStatus(["rev-list", "--count", `${base}..${head}`]) if (result.exitCode !== 0) { console.log(`rev-list failed, fetching origin/${base}...`) - await $`git fetch origin ${base} --depth=1`.nothrow() - const retry = await $`git rev-list --count origin/${base}..${head}`.nothrow() + await gitStatus(["fetch", "origin", base, "--depth=1"]) + const retry = await gitStatus(["rev-list", "--count", `origin/${base}..${head}`]) if (retry.exitCode !== 0) return true // assume dirty if we can't tell return parseInt(retry.stdout.toString().trim()) > 0 } diff --git a/packages/opencode/src/cli/cmd/import.ts b/packages/opencode/src/cli/cmd/import.ts index a2e498cef6..078ffb5d74 100644 --- a/packages/opencode/src/cli/cmd/import.ts +++ b/packages/opencode/src/cli/cmd/import.ts @@ -1,6 +1,7 @@ import type { Argv } from "yargs" import type { Session as SDKSession, Message, Part } from "@opencode-ai/sdk/v2" import { Session } from "../../session" +import { MessageV2 } from "../../session/message-v2" import { cmd } from "./cmd" import { bootstrap } from "../bootstrap" import { Database } from "../../storage/db" @@ -10,7 +11,7 @@ import { ShareNext } from "../../share/share-next" import { EOL } from "os" import { Filesystem } from "../../util/filesystem" -/** Discriminated union returned by the ShareNext API (GET /api/share/:id/data) */ +/** Discriminated union returned by the ShareNext API (GET /api/shares/:id/data) */ export type ShareData = | { type: "session"; data: SDKSession } | { type: "message"; data: Message } @@ -24,6 +25,14 @@ export function parseShareUrl(url: string): string | null { return match ? match[1] : null } +export function shouldAttachShareAuthHeaders(shareUrl: string, accountBaseUrl: string): boolean { + try { + return new URL(shareUrl).origin === new URL(accountBaseUrl).origin + } catch { + return false + } +} + /** * Transform ShareNext API response (flat array) into the nested structure for local file storage. * @@ -78,7 +87,7 @@ export const ImportCommand = cmd({ await bootstrap(process.cwd(), async () => { let exportData: | { - info: Session.Info + info: SDKSession messages: Array<{ info: Message parts: Part[] @@ -97,8 +106,21 @@ export const ImportCommand = cmd({ return } - const baseUrl = await ShareNext.url() - const response = await fetch(`${baseUrl}/api/share/${slug}/data`) + const parsed = new URL(args.file) + const baseUrl = parsed.origin + const req = await ShareNext.request() + const headers = shouldAttachShareAuthHeaders(args.file, req.baseUrl) ? req.headers : {} + + const dataPath = req.api.data(slug) + let response = await fetch(`${baseUrl}${dataPath}`, { + headers, + }) + + if (!response.ok && dataPath !== `/api/share/${slug}/data`) { + response = await fetch(`${baseUrl}/api/share/${slug}/data`, { + headers, + }) + } if (!response.ok) { process.stdout.write(`Failed to fetch share data: ${response.statusText}`) @@ -131,7 +153,11 @@ export const ImportCommand = cmd({ return } - const row = { ...Session.toRow(exportData.info), project_id: Instance.project.id } + const info = Session.Info.parse({ + ...exportData.info, + projectID: Instance.project.id, + }) + const row = Session.toRow(info) Database.use((db) => db .insert(SessionTable) @@ -141,28 +167,32 @@ export const ImportCommand = cmd({ ) for (const msg of exportData.messages) { + const msgInfo = MessageV2.Info.parse(msg.info) + const { id, sessionID: _, ...msgData } = msgInfo Database.use((db) => db .insert(MessageTable) .values({ - id: msg.info.id, - session_id: exportData.info.id, - time_created: msg.info.time?.created ?? Date.now(), - data: msg.info, + id, + session_id: row.id, + time_created: msgInfo.time?.created ?? Date.now(), + data: msgData, }) .onConflictDoNothing() .run(), ) for (const part of msg.parts) { + const partInfo = MessageV2.Part.parse(part) + const { id: partId, sessionID: _s, messageID, ...partData } = partInfo Database.use((db) => db .insert(PartTable) .values({ - id: part.id, - message_id: msg.info.id, - session_id: exportData.info.id, - data: part, + id: partId, + message_id: messageID, + session_id: row.id, + data: partData, }) .onConflictDoNothing() .run(), diff --git a/packages/opencode/src/cli/cmd/models.ts b/packages/opencode/src/cli/cmd/models.ts index 156dae91c6..8395d4628e 100644 --- a/packages/opencode/src/cli/cmd/models.ts +++ b/packages/opencode/src/cli/cmd/models.ts @@ -1,6 +1,7 @@ import type { Argv } from "yargs" import { Instance } from "../../project/instance" import { Provider } from "../../provider/provider" +import { ProviderID } from "../../provider/schema" import { ModelsDev } from "../../provider/models" import { cmd } from "./cmd" import { UI } from "../ui" @@ -36,7 +37,7 @@ export const ModelsCommand = cmd({ async fn() { const providers = await Provider.list() - function printModels(providerID: string, verbose?: boolean) { + function printModels(providerID: ProviderID, verbose?: boolean) { const provider = providers[providerID] const sortedModels = Object.entries(provider.models).sort(([a], [b]) => a.localeCompare(b)) for (const [modelID, model] of sortedModels) { @@ -56,7 +57,7 @@ export const ModelsCommand = cmd({ return } - printModels(args.provider, args.verbose) + printModels(ProviderID.make(args.provider), args.verbose) return } @@ -69,7 +70,7 @@ export const ModelsCommand = cmd({ }) for (const providerID of providerIDs) { - printModels(providerID, args.verbose) + printModels(ProviderID.make(providerID), args.verbose) } }, }) diff --git a/packages/opencode/src/cli/cmd/pr.ts b/packages/opencode/src/cli/cmd/pr.ts index 3c89ad0c29..ea61354741 100644 --- a/packages/opencode/src/cli/cmd/pr.ts +++ b/packages/opencode/src/cli/cmd/pr.ts @@ -1,11 +1,12 @@ import { UI } from "../ui" import { cmd } from "./cmd" import { Instance } from "@/project/instance" -import { $ } from "bun" +import { Process } from "@/util/process" +import { git } from "@/util/git" export const PrCommand = cmd({ command: "pr ", - describe: "fetch and checkout a GitHub PR branch, then run altimate", + describe: "fetch and checkout a GitHub PR branch, then run opencode", builder: (yargs) => yargs.positional("number", { type: "number", @@ -27,21 +28,35 @@ export const PrCommand = cmd({ UI.println(`Fetching and checking out PR #${prNumber}...`) // Use gh pr checkout with custom branch name - const result = await $`gh pr checkout ${prNumber} --branch ${localBranchName} --force`.nothrow() + const result = await Process.run( + ["gh", "pr", "checkout", `${prNumber}`, "--branch", localBranchName, "--force"], + { + nothrow: true, + }, + ) - if (result.exitCode !== 0) { + if (result.code !== 0) { UI.error(`Failed to checkout PR #${prNumber}. Make sure you have gh CLI installed and authenticated.`) process.exit(1) } // Fetch PR info for fork handling and session link detection - const prInfoResult = - await $`gh pr view ${prNumber} --json headRepository,headRepositoryOwner,isCrossRepository,headRefName,body`.nothrow() + const prInfoResult = await Process.text( + [ + "gh", + "pr", + "view", + `${prNumber}`, + "--json", + "headRepository,headRepositoryOwner,isCrossRepository,headRefName,body", + ], + { nothrow: true }, + ) let sessionId: string | undefined - if (prInfoResult.exitCode === 0) { - const prInfoText = prInfoResult.text() + if (prInfoResult.code === 0) { + const prInfoText = prInfoResult.text if (prInfoText.trim()) { const prInfo = JSON.parse(prInfoText) @@ -52,28 +67,34 @@ export const PrCommand = cmd({ const remoteName = forkOwner // Check if remote already exists - const remotes = (await $`git remote`.nothrow().text()).trim() + const remotes = (await git(["remote"], { cwd: Instance.worktree })).text().trim() if (!remotes.split("\n").includes(remoteName)) { - await $`git remote add ${remoteName} https://github.com/${forkOwner}/${forkName}.git`.nothrow() + await git(["remote", "add", remoteName, `https://github.com/${forkOwner}/${forkName}.git`], { + cwd: Instance.worktree, + }) UI.println(`Added fork remote: ${remoteName}`) } // Set upstream to the fork so pushes go there const headRefName = prInfo.headRefName - await $`git branch --set-upstream-to=${remoteName}/${headRefName} ${localBranchName}`.nothrow() + await git(["branch", `--set-upstream-to=${remoteName}/${headRefName}`, localBranchName], { + cwd: Instance.worktree, + }) } - // Check for altimate-code session link in PR body + // Check for opencode session link in PR body if (prInfo && prInfo.body) { const sessionMatch = prInfo.body.match(/https:\/\/opncd\.ai\/s\/([a-zA-Z0-9_-]+)/) if (sessionMatch) { const sessionUrl = sessionMatch[0] - UI.println(`Found altimate-code session: ${sessionUrl}`) + UI.println(`Found opencode session: ${sessionUrl}`) UI.println(`Importing session...`) - const importResult = await $`altimate import ${sessionUrl}`.nothrow() - if (importResult.exitCode === 0) { - const importOutput = importResult.text().trim() + const importResult = await Process.text(["opencode", "import", sessionUrl], { + nothrow: true, + }) + if (importResult.code === 0) { + const importOutput = importResult.text.trim() // Extract session ID from the output (format: "Imported session: ") const sessionIdMatch = importOutput.match(/Imported session: ([a-zA-Z0-9_-]+)/) if (sessionIdMatch) { @@ -88,23 +109,23 @@ export const PrCommand = cmd({ UI.println(`Successfully checked out PR #${prNumber} as branch '${localBranchName}'`) UI.println() - UI.println("Starting altimate...") + UI.println("Starting opencode...") UI.println() - // Launch altimate TUI with session ID if available + // Launch opencode TUI with session ID if available const { spawn } = await import("child_process") - const altimateCodeArgs = sessionId ? ["-s", sessionId] : [] - const altimateCodeProcess = spawn("altimate", altimateCodeArgs, { + const opencodeArgs = sessionId ? ["-s", sessionId] : [] + const opencodeProcess = spawn("opencode", opencodeArgs, { stdio: "inherit", cwd: process.cwd(), }) await new Promise((resolve, reject) => { - altimateCodeProcess.on("exit", (code) => { + opencodeProcess.on("exit", (code) => { if (code === 0) resolve() - else reject(new Error(`altimate exited with code ${code}`)) + else reject(new Error(`opencode exited with code ${code}`)) }) - altimateCodeProcess.on("error", reject) + opencodeProcess.on("error", reject) }) }, }) diff --git a/packages/opencode/src/cli/cmd/auth.ts b/packages/opencode/src/cli/cmd/providers.ts similarity index 64% rename from packages/opencode/src/cli/cmd/auth.ts rename to packages/opencode/src/cli/cmd/providers.ts index c156086c3c..fa8ced6ce8 100644 --- a/packages/opencode/src/cli/cmd/auth.ts +++ b/packages/opencode/src/cli/cmd/providers.ts @@ -10,21 +10,24 @@ import { Config } from "../../config/config" import { Global } from "../../global" import { Plugin } from "../../plugin" import { Instance } from "../../project/instance" -import { Telemetry } from "../../telemetry" import type { Hooks } from "@opencode-ai/plugin" import { Process } from "../../util/process" import { text } from "node:stream/consumers" -import { setTimeout as sleep } from "node:timers/promises" type PluginAuth = NonNullable -/** - * Handle plugin-based authentication flow. - * Returns true if auth was handled, false if it should fall through to default handling. - */ -async function handlePluginAuth(plugin: { auth: PluginAuth }, provider: string): Promise { +async function handlePluginAuth(plugin: { auth: PluginAuth }, provider: string, methodName?: string): Promise { let index = 0 - if (plugin.auth.methods.length > 1) { + if (methodName) { + const match = plugin.auth.methods.findIndex((x) => x.label.toLowerCase() === methodName.toLowerCase()) + if (match === -1) { + prompts.log.error( + `Unknown method "${methodName}" for ${provider}. Available: ${plugin.auth.methods.map((x) => x.label).join(", ")}`, + ) + process.exit(1) + } + index = match + } else if (plugin.auth.methods.length > 1) { const method = await prompts.select({ message: "Login method", options: [ @@ -39,8 +42,7 @@ async function handlePluginAuth(plugin: { auth: PluginAuth }, provider: string): } const method = plugin.auth.methods[index] - // Handle prompts for all auth types - await sleep(10) + await new Promise((r) => setTimeout(r, 10)) const inputs: Record = {} if (method.prompts) { for (const prompt of method.prompts) { @@ -82,15 +84,6 @@ async function handlePluginAuth(plugin: { auth: PluginAuth }, provider: string): const result = await authorize.callback() if (result.type === "failed") { spinner.stop("Failed to authorize", 1) - Telemetry.track({ - type: "auth_login", - timestamp: Date.now(), - session_id: Telemetry.getContext().sessionId || "cli", - provider_id: provider, - method: "oauth", - status: "error", - error: "OAuth auto authorization failed", - }) } if (result.type === "success") { const saveProvider = result.provider ?? provider @@ -111,14 +104,6 @@ async function handlePluginAuth(plugin: { auth: PluginAuth }, provider: string): }) } spinner.stop("Login successful") - Telemetry.track({ - type: "auth_login", - timestamp: Date.now(), - session_id: Telemetry.getContext().sessionId || "cli", - provider_id: saveProvider, - method: "oauth", - status: "success", - }) } } @@ -131,15 +116,6 @@ async function handlePluginAuth(plugin: { auth: PluginAuth }, provider: string): const result = await authorize.callback(code) if (result.type === "failed") { prompts.log.error("Failed to authorize") - Telemetry.track({ - type: "auth_login", - timestamp: Date.now(), - session_id: Telemetry.getContext().sessionId || "cli", - provider_id: provider, - method: "oauth", - status: "error", - error: "OAuth code authorization failed", - }) } if (result.type === "success") { const saveProvider = result.provider ?? provider @@ -160,14 +136,6 @@ async function handlePluginAuth(plugin: { auth: PluginAuth }, provider: string): }) } prompts.log.success("Login successful") - Telemetry.track({ - type: "auth_login", - timestamp: Date.now(), - session_id: Telemetry.getContext().sessionId || "cli", - provider_id: saveProvider, - method: "oauth", - status: "success", - }) } } @@ -180,15 +148,6 @@ async function handlePluginAuth(plugin: { auth: PluginAuth }, provider: string): const result = await method.authorize(inputs) if (result.type === "failed") { prompts.log.error("Failed to authorize") - Telemetry.track({ - type: "auth_login", - timestamp: Date.now(), - session_id: Telemetry.getContext().sessionId || "cli", - provider_id: provider, - method: "api_key", - status: "error", - error: "API key authorization failed", - }) } if (result.type === "success") { const saveProvider = result.provider ?? provider @@ -197,14 +156,6 @@ async function handlePluginAuth(plugin: { auth: PluginAuth }, provider: string): key: result.key, }) prompts.log.success("Login successful") - Telemetry.track({ - type: "auth_login", - timestamp: Date.now(), - session_id: Telemetry.getContext().sessionId || "cli", - provider_id: saveProvider, - method: "api_key", - status: "success", - }) } prompts.outro("Done") return true @@ -214,11 +165,6 @@ async function handlePluginAuth(plugin: { auth: PluginAuth }, provider: string): return false } -/** - * Build a deduplicated list of plugin-registered auth providers that are not - * already present in models.dev, respecting enabled/disabled provider lists. - * Pure function with no side effects; safe to test without mocking. - */ export function resolvePluginProviders(input: { hooks: Hooks[] existingProviders: Record @@ -246,19 +192,20 @@ export function resolvePluginProviders(input: { return result } -export const AuthCommand = cmd({ - command: "auth", - describe: "manage credentials", +export const ProvidersCommand = cmd({ + command: "providers", + aliases: ["auth"], + describe: "manage AI providers and credentials", builder: (yargs) => - yargs.command(AuthLoginCommand).command(AuthLogoutCommand).command(AuthListCommand).demandCommand(), + yargs.command(ProvidersListCommand).command(ProvidersLoginCommand).command(ProvidersLogoutCommand).demandCommand(), async handler() {}, }) -export const AuthListCommand = cmd({ +export const ProvidersListCommand = cmd({ command: "list", aliases: ["ls"], - describe: "list providers", - async handler() { + describe: "list providers and credentials", + async handler(_args) { UI.empty() const authPath = path.join(Global.Path.data, "auth.json") const homedir = os.homedir() @@ -274,7 +221,6 @@ export const AuthListCommand = cmd({ prompts.outro(`${results.length} credentials`) - // Environment variables section const activeEnvVars: Array<{ provider: string; envVar: string }> = [] for (const [providerID, provider] of Object.entries(database)) { @@ -301,14 +247,25 @@ export const AuthListCommand = cmd({ }, }) -export const AuthLoginCommand = cmd({ +export const ProvidersLoginCommand = cmd({ command: "login [url]", describe: "log in to a provider", builder: (yargs) => - yargs.positional("url", { - describe: "altimate auth provider", - type: "string", - }), + yargs + .positional("url", { + describe: "opencode auth provider", + type: "string", + }) + .option("provider", { + alias: ["p"], + describe: "provider id or name to log in to (skips provider selection)", + type: "string", + }) + .option("method", { + alias: ["m"], + describe: "login method label (skips method selection)", + type: "string", + }), async handler(args) { await Instance.provide({ directory: process.cwd(), @@ -316,56 +273,29 @@ export const AuthLoginCommand = cmd({ UI.empty() prompts.intro("Add credential") if (args.url) { - const wellknown = await fetch(`${args.url}/.well-known/altimate-code`).then((x) => x.json() as any) - const raw = wellknown?.auth?.command - if (!Array.isArray(raw) || !raw.every((c: unknown) => typeof c === 'string')) { - prompts.log.warn('Invalid auth command from server') - prompts.outro('Done') - return - } - const cmd = raw as string[] - const confirm = await prompts.confirm({ - message: `The server requests to run: ${cmd.join(" ")}. Allow?`, + const url = args.url.replace(/\/+$/, "") + const wellknown = await fetch(`${url}/.well-known/opencode`).then((x) => x.json() as any) + prompts.log.info(`Running \`${wellknown.auth.command.join(" ")}\``) + const proc = Process.spawn(wellknown.auth.command, { + stdout: "pipe", }) - if (prompts.isCancel(confirm) || !confirm) { - prompts.log.warn("Aborted.") + if (!proc.stdout) { + prompts.log.error("Failed") prompts.outro("Done") return } - const proc = Bun.spawn({ - cmd, - stdout: "pipe", - }) - const exit = await proc.exited + const [exit, token] = await Promise.all([proc.exited, text(proc.stdout)]) if (exit !== 0) { prompts.log.error("Failed") - Telemetry.track({ - type: "auth_login", - timestamp: Date.now(), - session_id: Telemetry.getContext().sessionId || "cli", - provider_id: args.url!, - method: "api_key", - status: "error", - error: "Well-known auth command failed", - }) prompts.outro("Done") return } - const token = await new Response(proc.stdout).text() - await Auth.set(args.url, { + await Auth.set(url, { type: "wellknown", key: wellknown.auth.env, token: token.trim(), }) - prompts.log.success("Logged into " + args.url) - Telemetry.track({ - type: "auth_login", - timestamp: Date.now(), - session_id: Telemetry.getContext().sessionId || "cli", - provider_id: args.url!, - method: "api_key", - status: "success", - }) + prompts.log.success("Logged into " + url) prompts.outro("Done") return } @@ -387,11 +317,11 @@ export const AuthLoginCommand = cmd({ }) const priority: Record = { - "altimate-code": 0, - anthropic: 1, + opencode: 0, + openai: 1, "github-copilot": 2, - openai: 3, - google: 4, + google: 3, + anthropic: 4, openrouter: 5, vercel: 6, } @@ -402,65 +332,80 @@ export const AuthLoginCommand = cmd({ enabled, providerNames: Object.fromEntries(Object.entries(config.provider ?? {}).map(([id, p]) => [id, p.name])), }) - let provider = await prompts.autocomplete({ - message: "Select provider", - maxItems: 8, - options: [ - ...pipe( - providers, - values(), - sortBy( - (x) => priority[x.id] ?? 99, - (x) => x.name ?? x.id, - ), - map((x) => ({ - label: x.name, - value: x.id, - hint: { - "altimate-code": "recommended", - anthropic: "Claude Max or API key", - openai: "ChatGPT Plus/Pro or API key", - }[x.id], - })), + const options = [ + ...pipe( + providers, + values(), + sortBy( + (x) => priority[x.id] ?? 99, + (x) => x.name ?? x.id, ), - ...pluginProviders.map((x) => ({ + map((x) => ({ label: x.name, value: x.id, - hint: "plugin", + hint: { + opencode: "recommended", + anthropic: "API key", + openai: "ChatGPT Plus/Pro or API key", + }[x.id], })), - { - value: "other", - label: "Other", - }, - ], - }) - - if (prompts.isCancel(provider)) throw new UI.CancelledError() + ), + ...pluginProviders.map((x) => ({ + label: x.name, + value: x.id, + hint: "plugin", + })), + ] + + let provider: string + if (args.provider) { + const input = args.provider + const byID = options.find((x) => x.value === input) + const byName = options.find((x) => x.label.toLowerCase() === input.toLowerCase()) + const match = byID ?? byName + if (!match) { + prompts.log.error(`Unknown provider "${input}"`) + process.exit(1) + } + provider = match.value + } else { + const selected = await prompts.autocomplete({ + message: "Select provider", + maxItems: 8, + options: [ + ...options, + { + value: "other", + label: "Other", + }, + ], + }) + if (prompts.isCancel(selected)) throw new UI.CancelledError() + provider = selected as string + } const plugin = await Plugin.list().then((x) => x.findLast((x) => x.auth?.provider === provider)) if (plugin && plugin.auth) { - const handled = await handlePluginAuth({ auth: plugin.auth }, provider) + const handled = await handlePluginAuth({ auth: plugin.auth }, provider, args.method) if (handled) return } if (provider === "other") { - provider = await prompts.text({ + const custom = await prompts.text({ message: "Enter provider id", validate: (x) => (x && x.match(/^[0-9a-z-]+$/) ? undefined : "a-z, 0-9 and hyphens only"), }) - if (prompts.isCancel(provider)) throw new UI.CancelledError() - provider = provider.replace(/^@ai-sdk\//, "") - if (prompts.isCancel(provider)) throw new UI.CancelledError() + if (prompts.isCancel(custom)) throw new UI.CancelledError() + provider = custom.replace(/^@ai-sdk\//, "") - // Check if a plugin provides auth for this custom provider const customPlugin = await Plugin.list().then((x) => x.findLast((x) => x.auth?.provider === provider)) if (customPlugin && customPlugin.auth) { - const handled = await handlePluginAuth({ auth: customPlugin.auth }, provider) + const handled = await handlePluginAuth({ auth: customPlugin.auth }, provider, args.method) if (handled) return } prompts.log.warn( - `This only stores a credential for ${provider} - you will need configure it in altimate-code.json, check the docs for examples.`, + `This only stores a credential for ${provider} - you will need configure it in opencode.json, check the docs for examples.`, ) } @@ -469,13 +414,13 @@ export const AuthLoginCommand = cmd({ "Amazon Bedrock authentication priority:\n" + " 1. Bearer token (AWS_BEARER_TOKEN_BEDROCK or /connect)\n" + " 2. AWS credential chain (profile, access keys, IAM roles, EKS IRSA)\n\n" + - "Configure via altimate-code.json options (profile, region, endpoint) or\n" + + "Configure via opencode.json options (profile, region, endpoint) or\n" + "AWS environment variables (AWS_PROFILE, AWS_REGION, AWS_ACCESS_KEY_ID, AWS_WEB_IDENTITY_TOKEN_FILE).", ) } - if (provider === "altimate-code") { - prompts.log.info("Create an api key at https://altimate-code.dev/auth") + if (provider === "opencode") { + prompts.log.info("Create an api key at https://altimate.ai/auth") } if (provider === "vercel") { @@ -484,7 +429,7 @@ export const AuthLoginCommand = cmd({ if (["cloudflare", "cloudflare-ai-gateway"].includes(provider)) { prompts.log.info( - "Cloudflare AI Gateway can be configured with CLOUDFLARE_GATEWAY_ID, CLOUDFLARE_ACCOUNT_ID, and CLOUDFLARE_API_TOKEN environment variables. Read more: https://altimate-code.dev/docs/providers/#cloudflare-ai-gateway", + "Cloudflare AI Gateway can be configured with CLOUDFLARE_GATEWAY_ID, CLOUDFLARE_ACCOUNT_ID, and CLOUDFLARE_API_TOKEN environment variables. Read more: https://altimate.ai/docs/providers/#cloudflare-ai-gateway", ) } @@ -497,14 +442,6 @@ export const AuthLoginCommand = cmd({ type: "api", key, }) - Telemetry.track({ - type: "auth_login", - timestamp: Date.now(), - session_id: Telemetry.getContext().sessionId || "cli", - provider_id: provider, - method: "api_key", - status: "success", - }) prompts.outro("Done") }, @@ -512,10 +449,10 @@ export const AuthLoginCommand = cmd({ }, }) -export const AuthLogoutCommand = cmd({ +export const ProvidersLogoutCommand = cmd({ command: "logout", describe: "log out from a configured provider", - async handler() { + async handler(_args) { UI.empty() const credentials = await Auth.all().then((x) => Object.entries(x)) prompts.intro("Remove credential") @@ -533,12 +470,6 @@ export const AuthLogoutCommand = cmd({ }) if (prompts.isCancel(providerID)) throw new UI.CancelledError() await Auth.remove(providerID) - Telemetry.track({ - type: "auth_logout", - timestamp: Date.now(), - session_id: Telemetry.getContext().sessionId || "cli", - provider_id: providerID, - }) prompts.outro("Logout successful") }, }) diff --git a/packages/opencode/src/cli/cmd/run.ts b/packages/opencode/src/cli/cmd/run.ts index 5d9472f550..820b3c5902 100644 --- a/packages/opencode/src/cli/cmd/run.ts +++ b/packages/opencode/src/cli/cmd/run.ts @@ -1,6 +1,6 @@ import type { Argv } from "yargs" import path from "path" -import { pathToFileURL } from "bun" +import { pathToFileURL } from "url" import { UI } from "../ui" import { cmd } from "./cmd" import { Flag } from "../../flag/flag" @@ -307,6 +307,11 @@ export const RunCommand = cmd({ type: "string", describe: "attach to a running altimate server (e.g., http://localhost:4096)", }) + .option("password", { + alias: ["p"], + type: "string", + describe: "basic auth password (defaults to OPENCODE_SERVER_PASSWORD)", + }) .option("dir", { type: "string", describe: "directory to run in, path on remote server if attaching", @@ -703,14 +708,21 @@ You are speaking to a non-technical business executive. Follow these rules stric } if (args.attach) { - const sdk = createOpencodeClient({ baseUrl: args.attach, directory }) + const headers = (() => { + const password = args.password ?? process.env.OPENCODE_SERVER_PASSWORD + if (!password) return undefined + const username = process.env.OPENCODE_SERVER_USERNAME ?? "opencode" + const auth = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}` + return { Authorization: auth } + })() + const sdk = createOpencodeClient({ baseUrl: args.attach, directory, headers }) return await execute(sdk) } await bootstrap(process.cwd(), async () => { const fetchFn = (async (input: RequestInfo | URL, init?: RequestInit) => { const request = new Request(input, init) - return Server.App().fetch(request) + return Server.Default().fetch(request) }) as typeof globalThis.fetch const sdk = createOpencodeClient({ baseUrl: "http://altimate-code.internal", fetch: fetchFn }) await execute(sdk) diff --git a/packages/opencode/src/cli/cmd/session.ts b/packages/opencode/src/cli/cmd/session.ts index 84840392a6..8acd7480c9 100644 --- a/packages/opencode/src/cli/cmd/session.ts +++ b/packages/opencode/src/cli/cmd/session.ts @@ -1,6 +1,7 @@ import type { Argv } from "yargs" import { cmd } from "./cmd" import { Session } from "../../session" +import { SessionID } from "../../session/schema" import { bootstrap } from "../bootstrap" import { UI } from "../ui" import { Locale } from "../../util/locale" @@ -57,13 +58,14 @@ export const SessionDeleteCommand = cmd({ }, handler: async (args) => { await bootstrap(process.cwd(), async () => { + const sessionID = SessionID.make(args.sessionID) try { - await Session.get(args.sessionID) + await Session.get(sessionID) } catch { UI.error(`Session not found: ${args.sessionID}`) process.exit(1) } - await Session.remove(args.sessionID) + await Session.remove(sessionID) UI.println(UI.Style.TEXT_SUCCESS_BOLD + `Session ${args.sessionID} deleted` + UI.Style.TEXT_NORMAL) }) }, diff --git a/packages/opencode/src/cli/cmd/trajectory.ts b/packages/opencode/src/cli/cmd/trajectory.ts new file mode 100644 index 0000000000..7b81d18993 --- /dev/null +++ b/packages/opencode/src/cli/cmd/trajectory.ts @@ -0,0 +1,483 @@ +import type { Argv } from "yargs" +import { cmd } from "./cmd" +import { Session } from "../../session" +import { SessionID } from "../../session/schema" +import { bootstrap } from "../bootstrap" +import { UI } from "../ui" +import { Locale } from "../../util/locale" +import { EOL } from "os" +import type { MessageV2 } from "../../session/message-v2" + +export const TrajectoryCommand = cmd({ + command: "trajectory", + describe: "inspect agent execution trajectories", + builder: (yargs: Argv) => + yargs + .command(TrajectoryListCommand) + .command(TrajectoryShowCommand) + .command(TrajectoryExportCommand) + .demandCommand(), + async handler() {}, +}) + +const TrajectoryListCommand = cmd({ + command: "list", + describe: "list recent sessions with trajectory stats", + builder: (yargs: Argv) => { + return yargs + .option("max-count", { + alias: "n", + describe: "limit to N most recent sessions", + type: "number", + default: 20, + }) + .option("format", { + describe: "output format", + type: "string", + choices: ["table", "json"], + default: "table", + }) + }, + handler: async (args) => { + await bootstrap(process.cwd(), async () => { + const sessions = [...Session.list({ roots: true, limit: args.maxCount })] + + if (sessions.length === 0) { + console.log("No sessions found.") + return + } + + const summaries = await Promise.all(sessions.map((s) => buildSessionSummary(s))) + + if (args.format === "json") { + console.log(JSON.stringify(summaries, null, 2)) + } else { + printTrajectoryTable(summaries) + } + }) + }, +}) + +const TrajectoryShowCommand = cmd({ + command: "show ", + describe: "show detailed trajectory for a session", + builder: (yargs: Argv) => { + return yargs + .positional("sessionID", { + describe: "session ID to inspect", + type: "string", + demandOption: true, + }) + .option("verbose", { + alias: "v", + describe: "show full tool call inputs/outputs", + type: "boolean", + default: false, + }) + }, + handler: async (args) => { + await bootstrap(process.cwd(), async () => { + const sid = SessionID.make(args.sessionID) + let session: Session.Info + try { + session = await Session.get(sid) + } catch { + UI.error(`Session not found: ${args.sessionID}`) + process.exit(1) + } + + const messages = await Session.messages({ sessionID: sid }) + printTrajectoryDetail(session, messages, args.verbose) + }) + }, +}) + +const TrajectoryExportCommand = cmd({ + command: "export ", + describe: "export session trajectory as structured JSON", + builder: (yargs: Argv) => { + return yargs.positional("sessionID", { + describe: "session ID to export", + type: "string", + demandOption: true, + }) + }, + handler: async (args) => { + await bootstrap(process.cwd(), async () => { + const sid = SessionID.make(args.sessionID) + let session: Session.Info + try { + session = await Session.get(sid) + } catch { + UI.error(`Session not found: ${args.sessionID}`) + process.exit(1) + } + + const messages = await Session.messages({ sessionID: sid }) + const trajectory = buildTrajectoryExport(session, messages) + process.stdout.write(JSON.stringify(trajectory, null, 2)) + process.stdout.write(EOL) + }) + }, +}) + +// --- Helpers --- + +interface SessionSummary { + id: string + title: string + agent: string + model: string + duration_ms: number + cost: number + tool_calls: number + generations: number + outcome: "completed" | "error" | "in-progress" + updated: number +} + +async function buildSessionSummary(session: Session.Info): Promise { + const messages = await Session.messages({ sessionID: session.id }) + + let agent = "" + let model = "" + let cost = 0 + let toolCalls = 0 + let generations = 0 + let hadError = false + let startTime = session.time.created + let endTime = session.time.updated + + for (const msg of messages) { + if (msg.info.role === "user" && !agent) { + const userMsg = msg.info as MessageV2.User + agent = userMsg.agent ?? "" + if (userMsg.model) model = `${userMsg.model.providerID}/${userMsg.model.modelID}` + } + if (msg.info.role === "assistant") { + const assistantMsg = msg.info as MessageV2.Assistant + cost += assistantMsg.cost || 0 + generations++ + if (assistantMsg.error) hadError = true + } + for (const part of msg.parts) { + if (part.type === "tool") toolCalls++ + } + } + + return { + id: session.id, + title: session.title, + agent, + model, + duration_ms: endTime - startTime, + cost, + tool_calls: toolCalls, + generations, + outcome: hadError ? "error" : "completed", + updated: session.time.updated, + } +} + +function printTrajectoryTable(summaries: SessionSummary[]) { + const idW = 12 + const titleW = 30 + const agentW = 10 + const costW = 8 + const toolsW = 6 + const gensW = 5 + const durW = 10 + const outcomeW = 10 + + const header = [ + "ID".padEnd(idW), + "Title".padEnd(titleW), + "Agent".padEnd(agentW), + "Cost".padStart(costW), + "Tools".padStart(toolsW), + "Gens".padStart(gensW), + "Duration".padStart(durW), + "Status".padEnd(outcomeW), + ].join(" ") + + console.log(header) + console.log("-".repeat(header.length)) + + for (const s of summaries) { + const line = [ + s.id.slice(-idW).padEnd(idW), + Locale.truncate(s.title, titleW).padEnd(titleW), + (s.agent || "-").slice(0, agentW).padEnd(agentW), + `$${s.cost.toFixed(2)}`.padStart(costW), + String(s.tool_calls).padStart(toolsW), + String(s.generations).padStart(gensW), + formatDuration(s.duration_ms).padStart(durW), + s.outcome.padEnd(outcomeW), + ].join(" ") + console.log(line) + } +} + +function printTrajectoryDetail( + session: Session.Info, + messages: MessageV2.WithParts[], + verbose: boolean, +) { + console.log(`Session: ${session.id}`) + console.log(`Title: ${session.title}`) + console.log(`Created: ${new Date(session.time.created).toISOString()}`) + console.log(`Updated: ${new Date(session.time.updated).toISOString()}`) + console.log("") + + let stepIndex = 0 + let totalCost = 0 + let totalToolCalls = 0 + + for (const msg of messages) { + if (msg.info.role === "user") { + const userMsg = msg.info as MessageV2.User + const textParts = msg.parts.filter( + (p): p is MessageV2.TextPart => p.type === "text" && !p.synthetic, + ) + if (textParts.length > 0) { + console.log(`--- User (agent: ${userMsg.agent || "unknown"}) ---`) + for (const p of textParts) { + const text = verbose ? p.text : Locale.truncate(p.text, 200) + console.log(` ${text}`) + } + console.log("") + } + } + + if (msg.info.role === "assistant") { + stepIndex++ + const assistantMsg = msg.info as MessageV2.Assistant + totalCost += assistantMsg.cost || 0 + + const tokens = assistantMsg.tokens + const tokenStr = tokens + ? `in:${tokens.input} out:${tokens.output} cache_r:${tokens.cache?.read || 0} cache_w:${tokens.cache?.write || 0}` + : "n/a" + + console.log( + `--- Step ${stepIndex} (model: ${assistantMsg.modelID}, cost: $${(assistantMsg.cost || 0).toFixed(4)}, tokens: ${tokenStr}) ---`, + ) + + if (assistantMsg.summary) { + console.log(" [COMPACTION SUMMARY]") + } + + // Show text parts + const textParts = msg.parts.filter((p): p is MessageV2.TextPart => p.type === "text") + for (const p of textParts) { + const text = verbose ? p.text : Locale.truncate(p.text, 300) + console.log(` ${text}`) + } + + // Show tool calls + const toolParts = msg.parts.filter((p): p is MessageV2.ToolPart => p.type === "tool") + for (const tool of toolParts) { + totalToolCalls++ + const status = tool.state.status + const duration = + tool.state.status === "completed" || tool.state.status === "error" + ? tool.state.time.end - tool.state.time.start + : 0 + + console.log(` [TOOL] ${tool.tool} (${status}, ${formatDuration(duration)})`) + + if (verbose) { + const input = + tool.state.status === "completed" || + tool.state.status === "running" || + tool.state.status === "error" + ? tool.state.input + : null + if (input) { + console.log(` Input: ${JSON.stringify(input, null, 2).split("\n").join("\n ")}`) + } + if (tool.state.status === "completed" && tool.state.output) { + const output = Locale.truncate(tool.state.output, 500) + console.log(` Output: ${output}`) + } + if (tool.state.status === "error" && tool.state.error) { + console.log(` Error: ${tool.state.error}`) + } + } + } + + if (assistantMsg.error) { + console.log(` [ERROR] ${JSON.stringify(assistantMsg.error)}`) + } + + console.log("") + } + } + + console.log("=".repeat(60)) + console.log(`Total steps: ${stepIndex}`) + console.log(`Total tool calls: ${totalToolCalls}`) + console.log(`Total cost: $${totalCost.toFixed(4)}`) + console.log(`Duration: ${formatDuration(session.time.updated - session.time.created)}`) +} + +interface TrajectoryExport { + version: "1.0" + session: { + id: string + title: string + agent: string + model: { id: string; provider: string } + started_at: number + ended_at: number + duration_ms: number + total_cost: number + total_tokens: { + input: number + output: number + reasoning: number + cache_read: number + cache_write: number + } + } + steps: Array<{ + index: number + generation: { + model_id: string + provider_id: string + finish_reason: string | undefined + tokens: { + input: number + output: number + reasoning: number + cache_read: number + cache_write: number + } + cost: number + } + text: string | undefined + tool_calls: Array<{ + name: string + input: unknown + output: string | undefined + status: string + error: string | undefined + duration_ms: number + }> + is_summary: boolean + }> + errors: Array<{ + step: number + error: unknown + }> +} + +function buildTrajectoryExport( + session: Session.Info, + messages: MessageV2.WithParts[], +): TrajectoryExport { + let agent = "" + let model = { id: "", provider: "" } + let totalCost = 0 + const totalTokens = { input: 0, output: 0, reasoning: 0, cache_read: 0, cache_write: 0 } + const steps: TrajectoryExport["steps"] = [] + const errors: TrajectoryExport["errors"] = [] + + let stepIndex = 0 + + for (const msg of messages) { + if (msg.info.role === "user" && !agent) { + const userMsg = msg.info as MessageV2.User + agent = userMsg.agent ?? "" + if (userMsg.model) { + model = { id: userMsg.model.modelID, provider: userMsg.model.providerID } + } + } + + if (msg.info.role === "assistant") { + stepIndex++ + const a = msg.info as MessageV2.Assistant + totalCost += a.cost || 0 + + if (a.tokens) { + totalTokens.input += a.tokens.input || 0 + totalTokens.output += a.tokens.output || 0 + totalTokens.reasoning += a.tokens.reasoning || 0 + totalTokens.cache_read += a.tokens.cache?.read || 0 + totalTokens.cache_write += a.tokens.cache?.write || 0 + } + + const textParts = msg.parts + .filter((p): p is MessageV2.TextPart => p.type === "text") + .map((p) => p.text) + .join("\n") + + const toolCalls = msg.parts + .filter((p): p is MessageV2.ToolPart => p.type === "tool") + .map((tool) => ({ + name: tool.tool, + input: + tool.state.status === "completed" || + tool.state.status === "running" || + tool.state.status === "error" + ? tool.state.input + : null, + output: tool.state.status === "completed" ? tool.state.output : undefined, + status: tool.state.status, + error: tool.state.status === "error" ? tool.state.error : undefined, + duration_ms: + tool.state.status === "completed" || tool.state.status === "error" + ? tool.state.time.end - tool.state.time.start + : 0, + })) + + steps.push({ + index: stepIndex, + generation: { + model_id: a.modelID, + provider_id: a.providerID, + finish_reason: a.finish, + tokens: { + input: a.tokens?.input || 0, + output: a.tokens?.output || 0, + reasoning: a.tokens?.reasoning || 0, + cache_read: a.tokens?.cache?.read || 0, + cache_write: a.tokens?.cache?.write || 0, + }, + cost: a.cost || 0, + }, + text: textParts || undefined, + tool_calls: toolCalls, + is_summary: a.summary ?? false, + }) + + if (a.error) { + errors.push({ step: stepIndex, error: a.error }) + } + } + } + + return { + version: "1.0", + session: { + id: session.id, + title: session.title, + agent, + model, + started_at: session.time.created, + ended_at: session.time.updated, + duration_ms: session.time.updated - session.time.created, + total_cost: totalCost, + total_tokens: totalTokens, + }, + steps, + errors, + } +} + +function formatDuration(ms: number): string { + if (ms < 1000) return `${ms}ms` + if (ms < 60_000) return `${(ms / 1000).toFixed(1)}s` + if (ms < 3_600_000) return `${Math.floor(ms / 60_000)}m ${Math.floor((ms % 60_000) / 1000)}s` + return `${Math.floor(ms / 3_600_000)}h ${Math.floor((ms % 3_600_000) / 60_000)}m` +} diff --git a/packages/opencode/src/cli/cmd/tui/app.tsx b/packages/opencode/src/cli/cmd/tui/app.tsx index 9d94bddc3e..10d12aefb1 100644 --- a/packages/opencode/src/cli/cmd/tui/app.tsx +++ b/packages/opencode/src/cli/cmd/tui/app.tsx @@ -20,6 +20,7 @@ import { DialogHelp } from "./ui/dialog-help" import { CommandProvider, useCommandDialog } from "@tui/component/dialog-command" import { DialogAgent } from "@tui/component/dialog-agent" import { DialogSessionList } from "@tui/component/dialog-session-list" +import { DialogWorkspaceList } from "@tui/component/dialog-workspace-list" import { KeybindProvider } from "@tui/context/keybind" import { ThemeProvider, useTheme } from "@tui/context/theme" import { Home } from "@tui/routes/home" @@ -35,11 +36,11 @@ import { TuiEvent } from "./event" import { KVProvider, useKV } from "./context/kv" import { Provider } from "@/provider/provider" import { ArgsProvider, useArgs, type Args } from "./context/args" -import { TuiConfigProvider } from "@tui/context/tui-config" -import type { TuiConfig } from "@/config/tui" import open from "open" import { writeHeapSnapshot } from "v8" import { PromptRefProvider, usePromptRef } from "./context/prompt" +import { TuiConfigProvider } from "./context/tui-config" +import { TuiConfig } from "@/config/tui" async function getTerminalBackgroundColor(): Promise<"dark" | "light"> { // can't set raw mode if not a TTY @@ -106,12 +107,11 @@ import type { EventSource } from "./context/sdk" export function tui(input: { url: string args: Args + config: TuiConfig.Info directory?: string fetch?: typeof fetch headers?: RequestInit["headers"] events?: EventSource - tuiConfig?: TuiConfig.Info - onExit?: () => Promise }) { // promise to prevent immediate exit return new Promise(async (resolve) => { @@ -119,7 +119,6 @@ export function tui(input: { win32DisableProcessedInput() const mode = await getTerminalBackgroundColor() - const tuiConfig = input.tuiConfig ?? {} // Re-clear after getTerminalBackgroundColor() — setRawMode(false) restores // the original console mode which re-enables ENABLE_PROCESSED_INPUT. @@ -127,7 +126,6 @@ export function tui(input: { const onExit = async () => { unguard?.() - await input.onExit?.() resolve() } @@ -142,37 +140,37 @@ export function tui(input: { - - - + + + - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + - - - + + + @@ -267,14 +265,14 @@ function App() { if (!terminalTitleEnabled() || Flag.OPENCODE_DISABLE_TERMINAL_TITLE) return if (route.data.type === "home") { - renderer.setTerminalTitle("Altimate CLI") + renderer.setTerminalTitle("Altimate Code") return } if (route.data.type === "session") { const session = sync.session.get(route.data.sessionID) if (!session || SessionApi.isDefaultTitle(session.title)) { - renderer.setTerminalTitle("Altimate CLI") + renderer.setTerminalTitle("Altimate Code") return } @@ -374,6 +372,22 @@ function App() { dialog.replace(() => ) }, }, + ...(Flag.OPENCODE_EXPERIMENTAL_WORKSPACES + ? [ + { + title: "Manage workspaces", + value: "workspace.list", + category: "Workspace", + suggested: true, + slash: { + name: "workspaces", + }, + onSelect: () => { + dialog.replace(() => ) + }, + }, + ] + : []), { title: "New session", suggested: route.data.type === "session", @@ -388,9 +402,12 @@ function App() { const current = promptRef.current // Don't require focus - if there's any text, preserve it const currentPrompt = current?.current?.input ? current.current : undefined + const workspaceID = + route.data.type === "session" ? sync.session.get(route.data.sessionID)?.workspaceID : undefined route.navigate({ type: "home", initialPrompt: currentPrompt, + workspaceID, }) dialog.clear() }, @@ -516,7 +533,7 @@ function App() { { title: "View status", keybind: "status_view", - value: "altimate-code.status", + value: "opencode.status", slash: { name: "status", }, @@ -561,7 +578,7 @@ function App() { title: "Open docs", value: "docs.open", onSelect: () => { - open("https://crispy-adventure-6lj1ey3.pages.github.io/").catch(() => {}) + open("https://altimate.ai/docs").catch(() => {}) dialog.clear() }, category: "System", @@ -660,20 +677,6 @@ function App() { }, ]) - createEffect(() => { - const currentModel = local.model.current() - if (!currentModel) return - if (currentModel.providerID === "openrouter" && !kv.get("openrouter_warning", false)) { - untrack(() => { - DialogAlert.show( - dialog, - "Warning", - "While openrouter is a convenient way to access LLMs your request will often be routed to subpar providers that do not work well in our testing.\n\nFor reliable access to models check out Altimate CLI Zen\nhttps://altimate-code.dev/zen", - ).then(() => kv.set("openrouter_warning", true)) - }) - } - }) - sdk.event.on(TuiEvent.CommandExecute.type, (evt) => { command.trigger(evt.properties.command) }) @@ -730,7 +733,7 @@ function App() { toast.show({ variant: "info", title: "Update Available", - message: `Altimate CLI v${evt.properties.version} is available. Run 'altimate-code upgrade' to update manually.`, + message: `Altimate Code v${evt.properties.version} is available. Run 'opencode upgrade' to update manually.`, duration: 10000, }) }) @@ -785,7 +788,7 @@ function ErrorComponent(props: { }) const [copied, setCopied] = createSignal(false) - const issueURL = new URL("https://github.com/altimate/altimate-code/issues/new?template=bug-report.yml") + const issueURL = new URL("https://github.com/AltimateAI/altimate-code/issues/new?template=bug-report.yml") // Choose safe fallback colors per mode since theme context may not be available const isLight = props.mode === "light" @@ -807,7 +810,7 @@ function ErrorComponent(props: { ) } - issueURL.searchParams.set("altimate-code-version", Installation.VERSION) + issueURL.searchParams.set("opencode-version", Installation.VERSION) const copyIssueURL = () => { Clipboard.copy(issueURL.toString()).then(() => { diff --git a/packages/opencode/src/cli/cmd/tui/attach.ts b/packages/opencode/src/cli/cmd/tui/attach.ts index fe5eb04972..1dc4c6d525 100644 --- a/packages/opencode/src/cli/cmd/tui/attach.ts +++ b/packages/opencode/src/cli/cmd/tui/attach.ts @@ -70,6 +70,7 @@ export const AttachCommand = cmd({ sessionID: args.session, fork: args.fork, }, + config: {}, directory, headers, }) diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-provider.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-provider.tsx index a5601a938d..3425fba31d 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-provider.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-provider.tsx @@ -36,7 +36,7 @@ export function createDialogProviderOptions() { value: provider.id, description: { opencode: "(Recommended)", - anthropic: "(Claude Max or API key)", + anthropic: "(API key)", openai: "(ChatGPT Plus/Pro or API key)", "opencode-go": "Low cost subscription for everyone", }[provider.id], @@ -220,7 +220,7 @@ function ApiMethod(props: ApiMethodProps) { opencode: ( - OpenCode Zen gives you access to all the best coding models at the cheapest prices with a single API + Altimate Code Zen gives you access to all the best coding models at the cheapest prices with a single API key. @@ -231,7 +231,7 @@ function ApiMethod(props: ApiMethodProps) { "opencode-go": ( - OpenCode Go is a $10 per month subscription that provides reliable access to popular open coding models + Altimate Code Go is a $10 per month subscription that provides reliable access to popular open coding models with generous usage limits. diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-list.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-list.tsx new file mode 100644 index 0000000000..b11ad6a734 --- /dev/null +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-workspace-list.tsx @@ -0,0 +1,326 @@ +import { useDialog } from "@tui/ui/dialog" +import { DialogSelect } from "@tui/ui/dialog-select" +import { useRoute } from "@tui/context/route" +import { useSync } from "@tui/context/sync" +import { createEffect, createMemo, createSignal, onMount } from "solid-js" +import type { Session } from "@opencode-ai/sdk/v2" +import { useSDK } from "../context/sdk" +import { useToast } from "../ui/toast" +import { useKeybind } from "../context/keybind" +import { DialogSessionList } from "./workspace/dialog-session-list" +import { createOpencodeClient } from "@opencode-ai/sdk/v2" + +async function openWorkspace(input: { + dialog: ReturnType + route: ReturnType + sdk: ReturnType + sync: ReturnType + toast: ReturnType + workspaceID: string + forceCreate?: boolean +}) { + const cacheSession = (session: Session) => { + input.sync.set( + "session", + [...input.sync.data.session.filter((item) => item.id !== session.id), session].toSorted((a, b) => + a.id.localeCompare(b.id), + ), + ) + } + + const client = createOpencodeClient({ + baseUrl: input.sdk.url, + fetch: input.sdk.fetch, + directory: input.sync.data.path.directory || input.sdk.directory, + experimental_workspaceID: input.workspaceID, + }) + const listed = input.forceCreate ? undefined : await client.session.list({ roots: true, limit: 1 }) + const session = listed?.data?.[0] + if (session?.id) { + cacheSession(session) + input.route.navigate({ + type: "session", + sessionID: session.id, + }) + input.dialog.clear() + return + } + let created: Session | undefined + while (!created) { + const result = await client.session.create({ workspaceID: input.workspaceID }).catch(() => undefined) + if (!result) { + input.toast.show({ + message: "Failed to open workspace", + variant: "error", + }) + return + } + if (result.response.status >= 500 && result.response.status < 600) { + await Bun.sleep(1000) + continue + } + if (!result.data) { + input.toast.show({ + message: "Failed to open workspace", + variant: "error", + }) + return + } + created = result.data + } + cacheSession(created) + input.route.navigate({ + type: "session", + sessionID: created.id, + }) + input.dialog.clear() +} + +function DialogWorkspaceCreate(props: { onSelect: (workspaceID: string) => Promise }) { + const dialog = useDialog() + const sync = useSync() + const sdk = useSDK() + const toast = useToast() + const [creating, setCreating] = createSignal() + + onMount(() => { + dialog.setSize("medium") + }) + + const options = createMemo(() => { + const type = creating() + if (type) { + return [ + { + title: `Creating ${type} workspace...`, + value: "creating" as const, + description: "This can take a while for remote environments", + }, + ] + } + return [ + { + title: "Worktree", + value: "worktree" as const, + description: "Create a local git worktree", + }, + ] + }) + + const createWorkspace = async (type: string) => { + if (creating()) return + setCreating(type) + + const result = await sdk.client.experimental.workspace.create({ type, branch: null }).catch((err) => { + console.log(err) + return undefined + }) + console.log(JSON.stringify(result, null, 2)) + const workspace = result?.data + if (!workspace) { + setCreating(undefined) + toast.show({ + message: "Failed to create workspace", + variant: "error", + }) + return + } + await sync.workspace.sync() + await props.onSelect(workspace.id) + setCreating(undefined) + } + + return ( + { + if (option.value === "creating") return + void createWorkspace(option.value) + }} + /> + ) +} + +export function DialogWorkspaceList() { + const dialog = useDialog() + const route = useRoute() + const sync = useSync() + const sdk = useSDK() + const toast = useToast() + const keybind = useKeybind() + const [toDelete, setToDelete] = createSignal() + const [counts, setCounts] = createSignal>({}) + + const open = (workspaceID: string, forceCreate?: boolean) => + openWorkspace({ + dialog, + route, + sdk, + sync, + toast, + workspaceID, + forceCreate, + }) + + async function selectWorkspace(workspaceID: string) { + if (workspaceID === "__local__") { + if (localCount() > 0) { + dialog.replace(() => ) + return + } + route.navigate({ + type: "home", + }) + dialog.clear() + return + } + const count = counts()[workspaceID] + if (count && count > 0) { + dialog.replace(() => ) + return + } + + if (count === 0) { + await open(workspaceID) + return + } + const client = createOpencodeClient({ + baseUrl: sdk.url, + fetch: sdk.fetch, + directory: sync.data.path.directory || sdk.directory, + experimental_workspaceID: workspaceID, + }) + const listed = await client.session.list({ roots: true, limit: 1 }).catch(() => undefined) + if (listed?.data?.length) { + dialog.replace(() => ) + return + } + await open(workspaceID) + } + + const currentWorkspaceID = createMemo(() => { + if (route.data.type === "session") { + return sync.session.get(route.data.sessionID)?.workspaceID ?? "__local__" + } + return "__local__" + }) + + const localCount = createMemo( + () => sync.data.session.filter((session) => !session.workspaceID && !session.parentID).length, + ) + + let run = 0 + createEffect(() => { + const workspaces = sync.data.workspaceList + const next = ++run + if (!workspaces.length) { + setCounts({}) + return + } + setCounts(Object.fromEntries(workspaces.map((workspace) => [workspace.id, undefined]))) + void Promise.all( + workspaces.map(async (workspace) => { + const client = createOpencodeClient({ + baseUrl: sdk.url, + fetch: sdk.fetch, + directory: sync.data.path.directory || sdk.directory, + experimental_workspaceID: workspace.id, + }) + const result = await client.session.list({ roots: true }).catch(() => undefined) + return [workspace.id, result ? (result.data?.length ?? 0) : null] as const + }), + ).then((entries) => { + if (run !== next) return + setCounts(Object.fromEntries(entries)) + }) + }) + + const options = createMemo(() => [ + { + title: "Local", + value: "__local__", + category: "Workspace", + description: "Use the local machine", + footer: `${localCount()} session${localCount() === 1 ? "" : "s"}`, + }, + ...sync.data.workspaceList.map((workspace) => { + const count = counts()[workspace.id] + return { + title: + toDelete() === workspace.id + ? `Delete ${workspace.id}? Press ${keybind.print("session_delete")} again` + : workspace.id, + value: workspace.id, + category: workspace.type, + description: workspace.branch ? `Branch ${workspace.branch}` : undefined, + footer: + count === undefined + ? "Loading sessions..." + : count === null + ? "Sessions unavailable" + : `${count} session${count === 1 ? "" : "s"}`, + } + }), + { + title: "+ New workspace", + value: "__create__", + category: "Actions", + description: "Create a new workspace", + }, + ]) + + onMount(() => { + dialog.setSize("large") + void sync.workspace.sync() + }) + + return ( + { + setToDelete(undefined) + }} + onSelect={(option) => { + setToDelete(undefined) + if (option.value === "__create__") { + dialog.replace(() => open(workspaceID, true)} />) + return + } + void selectWorkspace(option.value) + }} + keybind={[ + { + keybind: keybind.all.session_delete?.[0], + title: "delete", + onTrigger: async (option) => { + if (option.value === "__create__" || option.value === "__local__") return + if (toDelete() !== option.value) { + setToDelete(option.value) + return + } + const result = await sdk.client.experimental.workspace.remove({ id: option.value }).catch(() => undefined) + setToDelete(undefined) + if (result?.error) { + toast.show({ + message: "Failed to delete workspace", + variant: "error", + }) + return + } + if (currentWorkspaceID() === option.value) { + route.navigate({ + type: "home", + }) + } + await sync.workspace.sync() + }, + }, + ]} + /> + ) +} diff --git a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx index d63c248fb8..c85426cc24 100644 --- a/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/prompt/index.tsx @@ -9,7 +9,7 @@ import { EmptyBorder } from "@tui/component/border" import { useSDK } from "@tui/context/sdk" import { useRoute } from "@tui/context/route" import { useSync } from "@tui/context/sync" -import { Identifier } from "@/id/id" +import { MessageID, PartID } from "@/session/schema" import { createStore, produce } from "solid-js/store" import { useKeybind } from "@tui/context/keybind" import { usePromptHistory, type PromptInfo } from "./history" @@ -37,6 +37,7 @@ import { DialogSkill } from "../dialog-skill" export type PromptProps = { sessionID?: string + workspaceID?: string visible?: boolean disabled?: boolean onSubmit?: () => void @@ -539,13 +540,28 @@ export function Prompt(props: PromptProps) { promptModelWarning() return } - const sessionID = props.sessionID - ? props.sessionID - : await (async () => { - const sessionID = await sdk.client.session.create({}).then((x) => x.data!.id) - return sessionID - })() - const messageID = Identifier.ascending("message") + + let sessionID = props.sessionID + if (sessionID == null) { + const res = await sdk.client.session.create({ + workspaceID: props.workspaceID, + }) + + if (res.error) { + console.log("Creating a session failed:", res.error) + + toast.show({ + message: "Creating a session failed. Open console for more details.", + variant: "error", + }) + + return + } + + sessionID = res.data.id + } + + const messageID = MessageID.ascending() let inputText = store.prompt.input // Expand pasted text inline before submitting @@ -608,7 +624,7 @@ export function Prompt(props: PromptProps) { parts: nonTextParts .filter((x) => x.type === "file") .map((x) => ({ - id: Identifier.ascending("part"), + id: PartID.ascending(), ...x, })), }) @@ -623,12 +639,12 @@ export function Prompt(props: PromptProps) { variant, parts: [ { - id: Identifier.ascending("part"), + id: PartID.ascending(), type: "text", text: inputText, }, ...nonTextParts.map((x) => ({ - id: Identifier.ascending("part"), + id: PartID.ascending(), ...x, })), ], diff --git a/packages/opencode/src/cli/cmd/tui/component/workspace/dialog-session-list.tsx b/packages/opencode/src/cli/cmd/tui/component/workspace/dialog-session-list.tsx new file mode 100644 index 0000000000..326f094a56 --- /dev/null +++ b/packages/opencode/src/cli/cmd/tui/component/workspace/dialog-session-list.tsx @@ -0,0 +1,151 @@ +import { useDialog } from "@tui/ui/dialog" +import { DialogSelect } from "@tui/ui/dialog-select" +import { useRoute } from "@tui/context/route" +import { useSync } from "@tui/context/sync" +import { createMemo, createSignal, createResource, onMount, Show } from "solid-js" +import { Locale } from "@/util/locale" +import { useKeybind } from "../../context/keybind" +import { useTheme } from "../../context/theme" +import { useSDK } from "../../context/sdk" +import { DialogSessionRename } from "../dialog-session-rename" +import { useKV } from "../../context/kv" +import { createDebouncedSignal } from "../../util/signal" +import { Spinner } from "../spinner" +import { useToast } from "../../ui/toast" + +export function DialogSessionList(props: { workspaceID?: string; localOnly?: boolean } = {}) { + const dialog = useDialog() + const route = useRoute() + const sync = useSync() + const keybind = useKeybind() + const { theme } = useTheme() + const sdk = useSDK() + const kv = useKV() + const toast = useToast() + const [toDelete, setToDelete] = createSignal() + const [search, setSearch] = createDebouncedSignal("", 150) + + const [listed, listedActions] = createResource( + () => props.workspaceID, + async (workspaceID) => { + if (!workspaceID) return undefined + const result = await sdk.client.session.list({ roots: true }) + return result.data ?? [] + }, + ) + + const [searchResults] = createResource(search, async (query) => { + if (!query || props.localOnly) return undefined + const result = await sdk.client.session.list({ + search: query, + limit: 30, + ...(props.workspaceID ? { roots: true } : {}), + }) + return result.data ?? [] + }) + + const currentSessionID = createMemo(() => (route.data.type === "session" ? route.data.sessionID : undefined)) + + const sessions = createMemo(() => { + if (searchResults()) return searchResults()! + if (props.workspaceID) return listed() ?? [] + if (props.localOnly) return sync.data.session.filter((session) => !session.workspaceID) + return sync.data.session + }) + + const options = createMemo(() => { + const today = new Date().toDateString() + return sessions() + .filter((x) => { + if (x.parentID !== undefined) return false + if (props.workspaceID && listed()) return true + if (props.workspaceID) return x.workspaceID === props.workspaceID + if (props.localOnly) return !x.workspaceID + return true + }) + .toSorted((a, b) => b.time.updated - a.time.updated) + .map((x) => { + const date = new Date(x.time.updated) + let category = date.toDateString() + if (category === today) { + category = "Today" + } + const isDeleting = toDelete() === x.id + const status = sync.data.session_status?.[x.id] + const isWorking = status?.type === "busy" + return { + title: isDeleting ? `Press ${keybind.print("session_delete")} again to confirm` : x.title, + bg: isDeleting ? theme.error : undefined, + value: x.id, + category, + footer: Locale.time(x.time.updated), + gutter: isWorking ? : undefined, + } + }) + }) + + onMount(() => { + dialog.setSize("large") + }) + + return ( + { + setToDelete(undefined) + }} + onSelect={(option) => { + route.navigate({ + type: "session", + sessionID: option.value, + }) + dialog.clear() + }} + keybind={[ + { + keybind: keybind.all.session_delete?.[0], + title: "delete", + onTrigger: async (option) => { + if (toDelete() === option.value) { + const deleted = await sdk.client.session + .delete({ + sessionID: option.value, + }) + .then(() => true) + .catch(() => false) + setToDelete(undefined) + if (!deleted) { + toast.show({ + message: "Failed to delete session", + variant: "error", + }) + return + } + if (props.workspaceID) { + listedActions.mutate((sessions) => sessions?.filter((session) => session.id !== option.value)) + return + } + sync.set( + "session", + sync.data.session.filter((session) => session.id !== option.value), + ) + return + } + setToDelete(option.value) + }, + }, + { + keybind: keybind.all.session_rename?.[0], + title: "rename", + onTrigger: async (option) => { + dialog.replace(() => ) + }, + }, + ]} + /> + ) +} diff --git a/packages/opencode/src/cli/cmd/tui/context/exit.tsx b/packages/opencode/src/cli/cmd/tui/context/exit.tsx index a6f775913a..3ed4ae3d2c 100644 --- a/packages/opencode/src/cli/cmd/tui/context/exit.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/exit.tsx @@ -15,6 +15,7 @@ export const { use: useExit, provider: ExitProvider } = createSimpleContext({ init: (input: { onExit?: () => Promise }) => { const renderer = useRenderer() let message: string | undefined + let task: Promise | undefined const store = { set: (value?: string) => { const prev = message @@ -29,20 +30,24 @@ export const { use: useExit, provider: ExitProvider } = createSimpleContext({ get: () => message, } const exit: Exit = Object.assign( - async (reason?: unknown) => { - // Reset window title before destroying renderer - renderer.setTerminalTitle("") - renderer.destroy() - win32FlushInputBuffer() - if (reason) { - const formatted = FormatError(reason) ?? FormatUnknownError(reason) - if (formatted) { - process.stderr.write(formatted + "\n") + (reason?: unknown) => { + if (task) return task + task = (async () => { + // Reset window title before destroying renderer + renderer.setTerminalTitle("") + renderer.destroy() + win32FlushInputBuffer() + if (reason) { + const formatted = FormatError(reason) ?? FormatUnknownError(reason) + if (formatted) { + process.stderr.write(formatted + "\n") + } } - } - const text = store.get() - if (text) process.stdout.write(text + "\n") - await input.onExit?.() + const text = store.get() + if (text) process.stdout.write(text + "\n") + await input.onExit?.() + })() + return task }, { message: store, diff --git a/packages/opencode/src/cli/cmd/tui/context/route.tsx b/packages/opencode/src/cli/cmd/tui/context/route.tsx index 358461921b..e96cd2c3a4 100644 --- a/packages/opencode/src/cli/cmd/tui/context/route.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/route.tsx @@ -5,6 +5,7 @@ import type { PromptInfo } from "../component/prompt/history" export type HomeRoute = { type: "home" initialPrompt?: PromptInfo + workspaceID?: string } export type SessionRoute = { diff --git a/packages/opencode/src/cli/cmd/tui/context/sdk.tsx b/packages/opencode/src/cli/cmd/tui/context/sdk.tsx index 7fa7e05c3d..2403a4e938 100644 --- a/packages/opencode/src/cli/cmd/tui/context/sdk.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/sdk.tsx @@ -5,6 +5,7 @@ import { batch, onCleanup, onMount } from "solid-js" export type EventSource = { on: (handler: (event: Event) => void) => () => void + setWorkspace?: (workspaceID?: string) => void } export const { use: useSDK, provider: SDKProvider } = createSimpleContext({ @@ -17,13 +18,21 @@ export const { use: useSDK, provider: SDKProvider } = createSimpleContext({ events?: EventSource }) => { const abort = new AbortController() - const sdk = createOpencodeClient({ - baseUrl: props.url, - signal: abort.signal, - directory: props.directory, - fetch: props.fetch, - headers: props.headers, - }) + let workspaceID: string | undefined + let sse: AbortController | undefined + + function createSDK() { + return createOpencodeClient({ + baseUrl: props.url, + signal: abort.signal, + directory: props.directory, + fetch: props.fetch, + headers: props.headers, + experimental_workspaceID: workspaceID, + }) + } + + let sdk = createSDK() const emitter = createGlobalEmitter<{ [key in Event["type"]]: Extract @@ -61,41 +70,56 @@ export const { use: useSDK, provider: SDKProvider } = createSimpleContext({ flush() } - onMount(async () => { - // If an event source is provided, use it instead of SSE - if (props.events) { - const unsub = props.events.on(handleEvent) - onCleanup(unsub) - return - } + function startSSE() { + sse?.abort() + const ctrl = new AbortController() + sse = ctrl + ;(async () => { + while (true) { + if (abort.signal.aborted || ctrl.signal.aborted) break + const events = await sdk.event.subscribe({}, { signal: ctrl.signal }) - // Fall back to SSE - while (true) { - if (abort.signal.aborted) break - const events = await sdk.event.subscribe( - {}, - { - signal: abort.signal, - }, - ) + for await (const event of events.stream) { + if (ctrl.signal.aborted) break + handleEvent(event) + } - for await (const event of events.stream) { - handleEvent(event) + if (timer) clearTimeout(timer) + if (queue.length > 0) flush() } + })().catch(() => {}) + } - // Flush any remaining events - if (timer) clearTimeout(timer) - if (queue.length > 0) { - flush() - } + onMount(() => { + if (props.events) { + const unsub = props.events.on(handleEvent) + onCleanup(unsub) + } else { + startSSE() } }) onCleanup(() => { abort.abort() + sse?.abort() if (timer) clearTimeout(timer) }) - return { client: sdk, event: emitter, url: props.url } + return { + get client() { + return sdk + }, + directory: props.directory, + event: emitter, + fetch: props.fetch ?? fetch, + setWorkspace(next?: string) { + if (workspaceID === next) return + workspaceID = next + sdk = createSDK() + props.events?.setWorkspace?.(next) + if (!props.events) startSSE() + }, + url: props.url, + } }, }) diff --git a/packages/opencode/src/cli/cmd/tui/context/sync.tsx b/packages/opencode/src/cli/cmd/tui/context/sync.tsx index 78a6e24b40..3b296a927a 100644 --- a/packages/opencode/src/cli/cmd/tui/context/sync.tsx +++ b/packages/opencode/src/cli/cmd/tui/context/sync.tsx @@ -28,6 +28,7 @@ import { useArgs } from "./args" import { batch, onMount } from "solid-js" import { Log } from "@/util/log" import type { Path } from "@opencode-ai/sdk" +import type { Workspace } from "@opencode-ai/sdk/v2" export const { use: useSync, provider: SyncProvider } = createSimpleContext({ name: "Sync", @@ -73,6 +74,7 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ formatter: FormatterStatus[] vcs: VcsInfo | undefined path: Path + workspaceList: Workspace[] }>({ provider_next: { all: [], @@ -100,10 +102,17 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ formatter: [], vcs: undefined, path: { state: "", config: "", worktree: "", directory: "" }, + workspaceList: [], }) const sdk = useSDK() + async function syncWorkspaces() { + const result = await sdk.client.experimental.workspace.list().catch(() => undefined) + if (!result?.data) return + setStore("workspaceList", reconcile(result.data)) + } + sdk.event.listen((e) => { const event = e.details switch (event.type) { @@ -336,11 +345,6 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ break } - case "mcp.tools.changed": { - sdk.client.mcp.status().then((x) => setStore("mcp", reconcile(x.data!))).catch(() => {}) - break - } - case "vcs.branch.updated": { setStore("vcs", { branch: event.properties.branch }) break @@ -404,26 +408,21 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ }) .then(() => { if (store.status !== "complete") setStore("status", "partial") - // non-blocking — each request catches errors individually so one - // failure doesn't prevent the others from populating the store. - const safe = (p: Promise) => p.catch((e) => { - Log.Default.warn("non-blocking sync request failed", { - error: e instanceof Error ? e.message : String(e), - }) - }) + // non-blocking Promise.all([ - ...(args.continue ? [] : [safe(sessionListPromise.then((sessions) => setStore("session", reconcile(sessions))))]), - safe(sdk.client.command.list().then((x) => setStore("command", reconcile(x.data ?? [])))), - safe(sdk.client.lsp.status().then((x) => setStore("lsp", reconcile(x.data!)))), - safe(sdk.client.mcp.status().then((x) => setStore("mcp", reconcile(x.data!)))), - safe(sdk.client.experimental.resource.list().then((x) => setStore("mcp_resource", reconcile(x.data ?? {})))), - safe(sdk.client.formatter.status().then((x) => setStore("formatter", reconcile(x.data!)))), - safe(sdk.client.session.status().then((x) => { + ...(args.continue ? [] : [sessionListPromise.then((sessions) => setStore("session", reconcile(sessions)))]), + sdk.client.command.list().then((x) => setStore("command", reconcile(x.data ?? []))), + sdk.client.lsp.status().then((x) => setStore("lsp", reconcile(x.data!))), + sdk.client.mcp.status().then((x) => setStore("mcp", reconcile(x.data!))), + sdk.client.experimental.resource.list().then((x) => setStore("mcp_resource", reconcile(x.data ?? {}))), + sdk.client.formatter.status().then((x) => setStore("formatter", reconcile(x.data!))), + sdk.client.session.status().then((x) => { setStore("session_status", reconcile(x.data!)) - })), - safe(sdk.client.provider.auth().then((x) => setStore("provider_auth", reconcile(x.data ?? {})))), - safe(sdk.client.vcs.get().then((x) => setStore("vcs", reconcile(x.data)))), - safe(sdk.client.path.get().then((x) => setStore("path", reconcile(x.data!)))), + }), + sdk.client.provider.auth().then((x) => setStore("provider_auth", reconcile(x.data ?? {}))), + sdk.client.vcs.get().then((x) => setStore("vcs", reconcile(x.data))), + sdk.client.path.get().then((x) => setStore("path", reconcile(x.data!))), + syncWorkspaces(), ]).then(() => { setStore("status", "complete") }) @@ -492,6 +491,12 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({ fullSyncedSessions.add(sessionID) }, }, + workspace: { + get(workspaceID: string) { + return store.workspaceList.find((workspace) => workspace.id === workspaceID) + }, + sync: syncWorkspaces, + }, bootstrap, } return result diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/altimate-code.json b/packages/opencode/src/cli/cmd/tui/context/theme/altimate-code.json index 8f585a4509..cd3b05f62d 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/altimate-code.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/altimate-code.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "darkStep1": "#0a0a0a", "darkStep2": "#141414", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/aura.json b/packages/opencode/src/cli/cmd/tui/context/theme/aura.json index e7798d5203..2a030093a0 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/aura.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/aura.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "darkBg": "#0f0f0f", "darkBgPanel": "#15141b", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/ayu.json b/packages/opencode/src/cli/cmd/tui/context/theme/ayu.json index a42fce4c4e..6a6069e173 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/ayu.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/ayu.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "darkBg": "#0B0E14", "darkBgAlt": "#0D1017", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/carbonfox.json b/packages/opencode/src/cli/cmd/tui/context/theme/carbonfox.json index b91de1fea9..87666b2c30 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/carbonfox.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/carbonfox.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "bg0": "#0d0d0d", "bg1": "#161616", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/catppuccin-frappe.json b/packages/opencode/src/cli/cmd/tui/context/theme/catppuccin-frappe.json index 79e56ee9af..de9060e5c9 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/catppuccin-frappe.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/catppuccin-frappe.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "frappeRosewater": "#f2d5cf", "frappeFlamingo": "#eebebe", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/catppuccin-macchiato.json b/packages/opencode/src/cli/cmd/tui/context/theme/catppuccin-macchiato.json index 6d9827dfe8..71ea8eacbb 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/catppuccin-macchiato.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/catppuccin-macchiato.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "macRosewater": "#f4dbd6", "macFlamingo": "#f0c6c6", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/catppuccin.json b/packages/opencode/src/cli/cmd/tui/context/theme/catppuccin.json index d0fa6a11d9..03b03b10ea 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/catppuccin.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/catppuccin.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "lightRosewater": "#dc8a78", "lightFlamingo": "#dd7878", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/cobalt2.json b/packages/opencode/src/cli/cmd/tui/context/theme/cobalt2.json index 2967eae58d..746e541808 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/cobalt2.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/cobalt2.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "background": "#193549", "backgroundAlt": "#122738", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/cursor.json b/packages/opencode/src/cli/cmd/tui/context/theme/cursor.json index ab518dbe7e..6f09b64e14 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/cursor.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/cursor.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "darkBg": "#181818", "darkPanel": "#141414", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/dracula.json b/packages/opencode/src/cli/cmd/tui/context/theme/dracula.json index c837a0b582..75c01bfa31 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/dracula.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/dracula.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "background": "#282a36", "currentLine": "#44475a", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/everforest.json b/packages/opencode/src/cli/cmd/tui/context/theme/everforest.json index 62dfb31ba8..7ceed1bcb3 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/everforest.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/everforest.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "darkStep1": "#2d353b", "darkStep2": "#333c43", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/flexoki.json b/packages/opencode/src/cli/cmd/tui/context/theme/flexoki.json index e525705dd1..405ebf25db 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/flexoki.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/flexoki.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "black": "#100F0F", "base950": "#1C1B1A", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/github.json b/packages/opencode/src/cli/cmd/tui/context/theme/github.json index 99a80879e1..e65a1a5ea8 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/github.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/github.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "darkBg": "#0d1117", "darkBgAlt": "#010409", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/gruvbox.json b/packages/opencode/src/cli/cmd/tui/context/theme/gruvbox.json index dcae302581..e31c7626c3 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/gruvbox.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/gruvbox.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "darkBg0": "#282828", "darkBg1": "#3c3836", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/kanagawa.json b/packages/opencode/src/cli/cmd/tui/context/theme/kanagawa.json index 91a784014a..cad1c598de 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/kanagawa.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/kanagawa.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "sumiInk0": "#1F1F28", "sumiInk1": "#2A2A37", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/lucent-orng.json b/packages/opencode/src/cli/cmd/tui/context/theme/lucent-orng.json index 036dedf2ef..356886e7a7 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/lucent-orng.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/lucent-orng.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "darkStep6": "#3c3c3c", "darkStep11": "#808080", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/material.json b/packages/opencode/src/cli/cmd/tui/context/theme/material.json index c3a1068085..c095c3dec6 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/material.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/material.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "darkBg": "#263238", "darkBgAlt": "#1e272c", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/matrix.json b/packages/opencode/src/cli/cmd/tui/context/theme/matrix.json index 3549462845..ba894012f2 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/matrix.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/matrix.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "matrixInk0": "#0a0e0a", "matrixInk1": "#0e130d", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/mercury.json b/packages/opencode/src/cli/cmd/tui/context/theme/mercury.json index dfd4f35298..812056f47f 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/mercury.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/mercury.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "purple-800": "#3442a6", "purple-700": "#465bd1", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/monokai.json b/packages/opencode/src/cli/cmd/tui/context/theme/monokai.json index 09637a1e2d..1b32e3d2ce 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/monokai.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/monokai.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "background": "#272822", "backgroundAlt": "#1e1f1c", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/nightowl.json b/packages/opencode/src/cli/cmd/tui/context/theme/nightowl.json index 24c74733dd..9a67d52921 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/nightowl.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/nightowl.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "nightOwlBg": "#011627", "nightOwlFg": "#d6deeb", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/nord.json b/packages/opencode/src/cli/cmd/tui/context/theme/nord.json index 4a525382a3..2a49fede75 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/nord.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/nord.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "nord0": "#2E3440", "nord1": "#3B4252", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/one-dark.json b/packages/opencode/src/cli/cmd/tui/context/theme/one-dark.json index 73b24e9292..da3c9f5227 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/one-dark.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/one-dark.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "darkBg": "#282c34", "darkBgAlt": "#21252b", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/opencode.json b/packages/opencode/src/cli/cmd/tui/context/theme/opencode.json index 8f585a4509..cd3b05f62d 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/opencode.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/opencode.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "darkStep1": "#0a0a0a", "darkStep2": "#141414", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/orng.json b/packages/opencode/src/cli/cmd/tui/context/theme/orng.json index 1fc602f2c8..5d24aa8236 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/orng.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/orng.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "darkStep1": "#0a0a0a", "darkStep2": "#141414", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/osaka-jade.json b/packages/opencode/src/cli/cmd/tui/context/theme/osaka-jade.json index 1c9de92af6..5c0cb9c86d 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/osaka-jade.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/osaka-jade.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "darkBg0": "#111c18", "darkBg1": "#1a2520", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/palenight.json b/packages/opencode/src/cli/cmd/tui/context/theme/palenight.json index 79f7c59e85..7afb0d9d0a 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/palenight.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/palenight.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "background": "#292d3e", "backgroundAlt": "#1e2132", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/rosepine.json b/packages/opencode/src/cli/cmd/tui/context/theme/rosepine.json index 444cdbd135..bc0c7ce96b 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/rosepine.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/rosepine.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "base": "#191724", "surface": "#1f1d2e", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/solarized.json b/packages/opencode/src/cli/cmd/tui/context/theme/solarized.json index e4de113674..67cdec5d19 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/solarized.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/solarized.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "base03": "#002b36", "base02": "#073642", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/synthwave84.json b/packages/opencode/src/cli/cmd/tui/context/theme/synthwave84.json index d25bf3b49d..89b7dae8b1 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/synthwave84.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/synthwave84.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "background": "#262335", "backgroundAlt": "#1e1a29", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/tokyonight.json b/packages/opencode/src/cli/cmd/tui/context/theme/tokyonight.json index 1c9503a420..7519e11713 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/tokyonight.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/tokyonight.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "darkStep1": "#1a1b26", "darkStep2": "#1e2030", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/vercel.json b/packages/opencode/src/cli/cmd/tui/context/theme/vercel.json index 86b965b10b..550a92cff4 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/vercel.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/vercel.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "background100": "#0A0A0A", "background200": "#000000", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/vesper.json b/packages/opencode/src/cli/cmd/tui/context/theme/vesper.json index 758c8f20c1..e1adb6481c 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/vesper.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/vesper.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "vesperBg": "#101010", "vesperFg": "#FFF", diff --git a/packages/opencode/src/cli/cmd/tui/context/theme/zenburn.json b/packages/opencode/src/cli/cmd/tui/context/theme/zenburn.json index c4475923bb..04c9919e61 100644 --- a/packages/opencode/src/cli/cmd/tui/context/theme/zenburn.json +++ b/packages/opencode/src/cli/cmd/tui/context/theme/zenburn.json @@ -1,5 +1,5 @@ { - "$schema": "https://opencode.ai/theme.json", + "$schema": "https://altimate.ai/theme.json", "defs": { "bg": "#3f3f3f", "bgAlt": "#4f4f4f", diff --git a/packages/opencode/src/cli/cmd/tui/event.ts b/packages/opencode/src/cli/cmd/tui/event.ts index 9466ae54f2..b2e4b92c55 100644 --- a/packages/opencode/src/cli/cmd/tui/event.ts +++ b/packages/opencode/src/cli/cmd/tui/event.ts @@ -1,5 +1,6 @@ import { BusEvent } from "@/bus/bus-event" import { Bus } from "@/bus" +import { SessionID } from "@/session/schema" import z from "zod" export const TuiEvent = { @@ -42,7 +43,7 @@ export const TuiEvent = { SessionSelect: BusEvent.define( "tui.session.select", z.object({ - sessionID: z.string().regex(/^ses/).describe("Session ID to navigate to"), + sessionID: SessionID.zod.describe("Session ID to navigate to"), }), ), } diff --git a/packages/opencode/src/cli/cmd/tui/routes/home.tsx b/packages/opencode/src/cli/cmd/tui/routes/home.tsx index c011f6c624..e76e165b26 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/home.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/home.tsx @@ -1,5 +1,5 @@ import { Prompt, type PromptRef } from "@tui/component/prompt" -import { createMemo, Match, onMount, Show, Switch } from "solid-js" +import { createEffect, createMemo, Match, on, onMount, Show, Switch } from "solid-js" import { useTheme } from "@tui/context/theme" import { useKeybind } from "@tui/context/keybind" import { Logo } from "../component/logo" @@ -14,6 +14,7 @@ import { usePromptRef } from "../context/prompt" import { Installation } from "@/installation" import { useKV } from "../context/kv" import { useCommandDialog } from "../component/dialog-command" +import { useLocal } from "../context/local" // TODO: what is the best way to do this? let once = false @@ -76,6 +77,7 @@ export function Home() { let prompt: PromptRef const args = useArgs() + const local = useLocal() onMount(() => { if (once) return if (route.initialPrompt) { @@ -84,9 +86,21 @@ export function Home() { } else if (args.prompt) { prompt.set({ input: args.prompt, parts: [] }) once = true - prompt.submit() } }) + + // Wait for sync and model store to be ready before auto-submitting --prompt + createEffect( + on( + () => sync.ready && local.model.ready, + (ready) => { + if (!ready) return + if (!args.prompt) return + if (prompt.current?.input !== args.prompt) return + prompt.submit() + }, + ), + ) const directory = useDirectory() const keybind = useKeybind() @@ -107,6 +121,7 @@ export function Home() { promptRef.set(r) }} hint={Hint} + workspaceID={route.workspaceID} /> diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/header.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/header.tsx index 0c5ea9a857..f64dbe533a 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/session/header.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/session/header.tsx @@ -7,6 +7,7 @@ import { SplitBorder } from "@tui/component/border" import type { AssistantMessage, Session } from "@opencode-ai/sdk/v2" import { useCommandDialog } from "@tui/component/dialog-command" import { useKeybind } from "../../context/keybind" +import { Flag } from "@/flag/flag" import { useTerminalDimensions } from "@opentui/solid" const Title = (props: { session: Accessor }) => { @@ -29,6 +30,17 @@ const ContextInfo = (props: { context: Accessor; cost: Acces ) } +const WorkspaceInfo = (props: { workspace: Accessor }) => { + const { theme } = useTheme() + return ( + + + {props.workspace()} + + + ) +} + export function Header() { const route = useRouteData("session") const sync = useSync() @@ -59,6 +71,14 @@ export function Header() { return result }) + const workspace = createMemo(() => { + const id = session()?.workspaceID + if (!id) return "Workspace local" + const info = sync.workspace.get(id) + if (!info) return `Workspace ${id}` + return `Workspace ${id} (${info.type})` + }) + const { theme } = useTheme() const keybind = useKeybind() const command = useCommandDialog() @@ -83,9 +103,19 @@ export function Header() { - - Subagent session - + {Flag.OPENCODE_EXPERIMENTAL_WORKSPACES ? ( + + + Subagent session + + + + ) : ( + + Subagent session + + )} + @@ -124,7 +154,14 @@ export function Header() { - + {Flag.OPENCODE_EXPERIMENTAL_WORKSPACES ? ( + <box flexDirection="column"> + <Title session={session} /> + <WorkspaceInfo workspace={workspace} /> + </box> + ) : ( + <Title session={session} /> + )} <ContextInfo context={context} cost={cost} /> </box> </Match> diff --git a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx index c49850df9c..ab5883f8d8 100644 --- a/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx +++ b/packages/opencode/src/cli/cmd/tui/routes/session/index.tsx @@ -182,6 +182,12 @@ export function Session() { return new CustomSpeedScroll(3) }) + createEffect(() => { + if (session()?.workspaceID) { + sdk.setWorkspace(session()?.workspaceID) + } + }) + createEffect(async () => { await sync.session .sync(route.sessionID) @@ -377,7 +383,12 @@ export function Session() { sessionID: route.sessionID, }) .then((res) => copy(res.data!.share!.url)) - .catch(() => toast.show({ message: "Failed to share session", variant: "error" })) + .catch((error) => { + toast.show({ + message: error instanceof Error ? error.message : "Failed to share session", + variant: "error", + }) + }) dialog.clear() }, }, @@ -480,7 +491,12 @@ export function Session() { sessionID: route.sessionID, }) .then(() => toast.show({ message: "Session unshared successfully", variant: "success" })) - .catch(() => toast.show({ message: "Failed to unshare session", variant: "error" })) + .catch((error) => { + toast.show({ + message: error instanceof Error ? error.message : "Failed to unshare session", + variant: "error", + }) + }) dialog.clear() }, }, diff --git a/packages/opencode/src/cli/cmd/tui/thread.ts b/packages/opencode/src/cli/cmd/tui/thread.ts index 4d767ca5b5..6e787c7afd 100644 --- a/packages/opencode/src/cli/cmd/tui/thread.ts +++ b/packages/opencode/src/cli/cmd/tui/thread.ts @@ -5,13 +5,15 @@ import { type rpc } from "./worker" import path from "path" import { fileURLToPath } from "url" import { UI } from "@/cli/ui" -import { iife } from "@/util/iife" import { Log } from "@/util/log" +import { withTimeout } from "@/util/timeout" import { withNetworkOptions, resolveNetworkOptions } from "@/cli/network" import { Filesystem } from "@/util/filesystem" import type { Event } from "@opencode-ai/sdk/v2" import type { EventSource } from "./context/sdk" import { win32DisableProcessedInput, win32InstallCtrlCGuard } from "./win32" +import { TuiConfig } from "@/config/tui" +import { Instance } from "@/project/instance" declare global { const OPENCODE_WORKER_PATH: string @@ -40,17 +42,34 @@ function createWorkerFetch(client: RpcClient): typeof fetch { function createEventSource(client: RpcClient): EventSource { return { on: (handler) => client.on<Event>("event", handler), + setWorkspace: (workspaceID) => { + void client.call("setWorkspace", { workspaceID }) + }, } } +async function target() { + if (typeof OPENCODE_WORKER_PATH !== "undefined") return OPENCODE_WORKER_PATH + const dist = new URL("./cli/cmd/tui/worker.js", import.meta.url) + if (await Filesystem.exists(fileURLToPath(dist))) return dist + return new URL("./worker.ts", import.meta.url) +} + +async function input(value?: string) { + const piped = process.stdin.isTTY ? undefined : await Bun.stdin.text() + if (!value) return piped + if (!piped) return value + return piped + "\n" + value +} + export const TuiThreadCommand = cmd({ command: "$0 [project]", - describe: "start altimate tui", + describe: "start opencode tui", builder: (yargs) => withNetworkOptions(yargs) .positional("project", { type: "string", - describe: "path to start altimate in", + describe: "path to start opencode in", }) .option("model", { type: "string", @@ -94,98 +113,110 @@ export const TuiThreadCommand = cmd({ return } - // Resolve relative paths against PWD to preserve behavior when using --cwd flag - const baseCwd = process.env.PWD ?? process.cwd() - const cwd = args.project ? path.resolve(baseCwd, args.project) : process.cwd() - const localWorker = new URL("./worker.ts", import.meta.url) - const distWorker = new URL("./cli/cmd/tui/worker.js", import.meta.url) - const workerPath = await iife(async () => { - if (typeof OPENCODE_WORKER_PATH !== "undefined") return OPENCODE_WORKER_PATH - if (await Filesystem.exists(fileURLToPath(distWorker))) return distWorker - return localWorker - }) + // Resolve relative --project paths from PWD, then use the real cwd after + // chdir so the thread and worker share the same directory key. + const root = Filesystem.resolve(process.env.PWD ?? process.cwd()) + const next = args.project + ? Filesystem.resolve(path.isAbsolute(args.project) ? args.project : path.join(root, args.project)) + : Filesystem.resolve(process.cwd()) + const file = await target() try { - process.chdir(cwd) - } catch (e) { - UI.error("Failed to change directory to " + cwd) + process.chdir(next) + } catch { + UI.error("Failed to change directory to " + next) return } + const cwd = Filesystem.resolve(process.cwd()) - const worker = new Worker(workerPath, { + const worker = new Worker(file, { env: Object.fromEntries( Object.entries(process.env).filter((entry): entry is [string, string] => entry[1] !== undefined), ), }) worker.onerror = (e) => { - Log.Default.error(e.message, { error: e.error?.stack ?? e.error ?? String(e) }) + Log.Default.error(e) } + const client = Rpc.client<typeof rpc>(worker) - process.on("uncaughtException", (e) => { + const error = (e: unknown) => { Log.Default.error(e) - }) - process.on("unhandledRejection", (e) => { - Log.Default.error(e) - }) - process.on("SIGUSR2", async () => { - await client.call("reload", undefined) - }) + } + const reload = () => { + client.call("reload", undefined).catch((err) => { + Log.Default.warn("worker reload failed", { + error: err instanceof Error ? err.message : String(err), + }) + }) + } + process.on("uncaughtException", error) + process.on("unhandledRejection", error) + process.on("SIGUSR2", reload) + + let stopped = false + const stop = async () => { + if (stopped) return + stopped = true + process.off("uncaughtException", error) + process.off("unhandledRejection", error) + process.off("SIGUSR2", reload) + await withTimeout(client.call("shutdown", undefined), 5000).catch((error) => { + Log.Default.warn("worker shutdown failed", { + error: error instanceof Error ? error.message : String(error), + }) + }) + worker.terminate() + } - const prompt = await iife(async () => { - const piped = !process.stdin.isTTY ? await Bun.stdin.text() : undefined - if (!args.prompt) return piped - return piped ? piped + "\n" + args.prompt : args.prompt + const prompt = await input(args.prompt) + const config = await Instance.provide({ + directory: cwd, + fn: () => TuiConfig.get(), }) - // Check if server should be started (port or hostname explicitly set in CLI or config) - const networkOpts = await resolveNetworkOptions(args) - const shouldStartServer = + const network = await resolveNetworkOptions(args) + const external = process.argv.includes("--port") || process.argv.includes("--hostname") || process.argv.includes("--mdns") || - networkOpts.mdns || - networkOpts.port !== 0 || - networkOpts.hostname !== "127.0.0.1" - - let url: string - let customFetch: typeof fetch | undefined - let events: EventSource | undefined - - if (shouldStartServer) { - // Start HTTP server for external access - const server = await client.call("server", networkOpts) - url = server.url - } else { - // Use direct RPC communication (no HTTP) - url = "http://altimate-code.internal" - customFetch = createWorkerFetch(client) - events = createEventSource(client) - } - - const tuiConfig = await client.call("tuiConfig", undefined) - - const tuiPromise = tui({ - url, - fetch: customFetch, - events, - tuiConfig, - args: { - continue: args.continue, - sessionID: args.session, - agent: args.agent, - model: args.model, - prompt, - fork: args.fork, - }, - onExit: async () => { - await client.call("shutdown", undefined) - }, - }) + network.mdns || + network.port !== 0 || + network.hostname !== "127.0.0.1" + + const transport = external + ? { + url: (await client.call("server", network)).url, + fetch: undefined, + events: undefined, + } + : { + url: "http://opencode.internal", + fetch: createWorkerFetch(client), + events: createEventSource(client), + } setTimeout(() => { client.call("checkUpgrade", { directory: cwd }).catch(() => {}) - }, 1000) + }, 1000).unref?.() - await tuiPromise + try { + await tui({ + url: transport.url, + config, + directory: cwd, + fetch: transport.fetch, + events: transport.events, + args: { + continue: args.continue, + sessionID: args.session, + agent: args.agent, + model: args.model, + prompt, + fork: args.fork, + }, + }) + } finally { + await stop() + } } finally { unguard?.() } diff --git a/packages/opencode/src/cli/cmd/tui/ui/dialog-export-options.tsx b/packages/opencode/src/cli/cmd/tui/ui/dialog-export-options.tsx index 1e8d09bb0b..d29fe05ee9 100644 --- a/packages/opencode/src/cli/cmd/tui/ui/dialog-export-options.tsx +++ b/packages/opencode/src/cli/cmd/tui/ui/dialog-export-options.tsx @@ -56,7 +56,7 @@ export function DialogExportOptions(props: DialogExportOptionsProps) { setStore("active", order[nextIndex]) evt.preventDefault() } - if (evt.name === "space") { + if (evt.name === "space" || evt.name === " ") { if (store.active === "thinking") setStore("thinking", !store.thinking) if (store.active === "toolDetails") setStore("toolDetails", !store.toolDetails) if (store.active === "assistantMetadata") setStore("assistantMetadata", !store.assistantMetadata) diff --git a/packages/opencode/src/cli/cmd/tui/util/clipboard.ts b/packages/opencode/src/cli/cmd/tui/util/clipboard.ts index 412ec654ff..85e13d3133 100644 --- a/packages/opencode/src/cli/cmd/tui/util/clipboard.ts +++ b/packages/opencode/src/cli/cmd/tui/util/clipboard.ts @@ -1,9 +1,9 @@ -import { $ } from "bun" import { platform, release } from "os" import clipboardy from "clipboardy" import { lazy } from "../../../../util/lazy.js" import { tmpdir } from "os" import path from "path" +import fs from "fs/promises" import { Filesystem } from "../../../../util/filesystem" import { Process } from "../../../../util/process" import { which } from "../../../../util/which" @@ -34,23 +34,38 @@ export namespace Clipboard { if (os === "darwin") { const tmpfile = path.join(tmpdir(), "opencode-clipboard.png") try { - await $`osascript -e 'set imageData to the clipboard as "PNGf"' -e 'set fileRef to open for access POSIX file "${tmpfile}" with write permission' -e 'set eof fileRef to 0' -e 'write imageData to fileRef' -e 'close access fileRef'` - .nothrow() - .quiet() + await Process.run( + [ + "osascript", + "-e", + 'set imageData to the clipboard as "PNGf"', + "-e", + `set fileRef to open for access POSIX file "${tmpfile}" with write permission`, + "-e", + "set eof fileRef to 0", + "-e", + "write imageData to fileRef", + "-e", + "close access fileRef", + ], + { nothrow: true }, + ) const buffer = await Filesystem.readBytes(tmpfile) return { data: buffer.toString("base64"), mime: "image/png" } } catch { } finally { - await $`rm -f "${tmpfile}"`.nothrow().quiet() + await fs.rm(tmpfile, { force: true }).catch(() => {}) } } if (os === "win32" || release().includes("WSL")) { const script = "Add-Type -AssemblyName System.Windows.Forms; $img = [System.Windows.Forms.Clipboard]::GetImage(); if ($img) { $ms = New-Object System.IO.MemoryStream; $img.Save($ms, [System.Drawing.Imaging.ImageFormat]::Png); [System.Convert]::ToBase64String($ms.ToArray()) }" - const base64 = await $`powershell.exe -NonInteractive -NoProfile -command "${script}"`.nothrow().text() - if (base64) { - const imageBuffer = Buffer.from(base64.trim(), "base64") + const base64 = await Process.text(["powershell.exe", "-NonInteractive", "-NoProfile", "-command", script], { + nothrow: true, + }) + if (base64.text) { + const imageBuffer = Buffer.from(base64.text.trim(), "base64") if (imageBuffer.length > 0) { return { data: imageBuffer.toString("base64"), mime: "image/png" } } @@ -58,13 +73,15 @@ export namespace Clipboard { } if (os === "linux") { - const wayland = await $`wl-paste -t image/png`.nothrow().arrayBuffer() - if (wayland && wayland.byteLength > 0) { - return { data: Buffer.from(wayland).toString("base64"), mime: "image/png" } + const wayland = await Process.run(["wl-paste", "-t", "image/png"], { nothrow: true }) + if (wayland.stdout.byteLength > 0) { + return { data: Buffer.from(wayland.stdout).toString("base64"), mime: "image/png" } } - const x11 = await $`xclip -selection clipboard -t image/png -o`.nothrow().arrayBuffer() - if (x11 && x11.byteLength > 0) { - return { data: Buffer.from(x11).toString("base64"), mime: "image/png" } + const x11 = await Process.run(["xclip", "-selection", "clipboard", "-t", "image/png", "-o"], { + nothrow: true, + }) + if (x11.stdout.byteLength > 0) { + return { data: Buffer.from(x11.stdout).toString("base64"), mime: "image/png" } } } @@ -81,7 +98,7 @@ export namespace Clipboard { console.log("clipboard: using osascript") return async (text: string) => { const escaped = text.replace(/\\/g, "\\\\").replace(/"/g, '\\"') - await $`osascript -e 'set the clipboard to "${escaped}"'`.nothrow().quiet() + await Process.run(["osascript", "-e", `set the clipboard to "${escaped}"`], { nothrow: true }) } } diff --git a/packages/opencode/src/cli/cmd/tui/worker.ts b/packages/opencode/src/cli/cmd/tui/worker.ts index 78ca34b0cc..408350c520 100644 --- a/packages/opencode/src/cli/cmd/tui/worker.ts +++ b/packages/opencode/src/cli/cmd/tui/worker.ts @@ -10,7 +10,6 @@ import { GlobalBus } from "@/bus/global" import { createOpencodeClient, type Event } from "@opencode-ai/sdk/v2" import type { BunWebSocketData } from "hono/bun" import { Flag } from "@/flag/flag" -import { Telemetry } from "@/telemetry" import { setTimeout as sleep } from "node:timers/promises" await Log.init({ @@ -34,9 +33,6 @@ process.on("uncaughtException", (e) => { }) }) -// Initialize telemetry early so MCP/engine events are captured before session starts -Telemetry.init().catch(() => {}) - // Subscribe to global events and forward them via RPC GlobalBus.on("event", (event) => { Rpc.emit("global.event", event) @@ -48,7 +44,7 @@ const eventStream = { abort: undefined as AbortController | undefined, } -const startEventStream = (directory: string) => { +const startEventStream = (input: { directory: string; workspaceID?: string }) => { if (eventStream.abort) eventStream.abort.abort() const abort = new AbortController() eventStream.abort = abort @@ -58,12 +54,13 @@ const startEventStream = (directory: string) => { const request = new Request(input, init) const auth = getAuthorizationHeader() if (auth) request.headers.set("Authorization", auth) - return Server.App().fetch(request) + return Server.Default().fetch(request) }) as typeof globalThis.fetch const sdk = createOpencodeClient({ - baseUrl: "http://altimate-code.internal", - directory, + baseUrl: "http://opencode.internal", + directory: input.directory, + experimental_workspaceID: input.workspaceID, fetch: fetchFn, signal, }) @@ -99,7 +96,7 @@ const startEventStream = (directory: string) => { }) } -startEventStream(process.cwd()) +startEventStream({ directory: process.cwd() }) export const rpc = { async fetch(input: { url: string; method: string; headers: Record<string, string>; body?: string }) { @@ -113,7 +110,7 @@ export const rpc = { headers, body: input.body, }) - const response = await Server.App().fetch(request) + const response = await Server.Default().fetch(request) const body = await response.text() return { status: response.status, @@ -135,25 +132,18 @@ export const rpc = { }, }) }, - async tuiConfig() { - const response = await Server.App().fetch(new Request("http://altimate-code.internal/config/tui")) - return response.json() - }, async reload() { Config.global.reset() await Instance.disposeAll() }, + async setWorkspace(input: { workspaceID?: string }) { + startEventStream({ directory: process.cwd(), workspaceID: input.workspaceID }) + }, async shutdown() { Log.Default.info("worker shutting down") if (eventStream.abort) eventStream.abort.abort() - await Promise.race([ - Instance.disposeAll(), - new Promise((resolve) => { - setTimeout(resolve, 5000) - }), - ]) + await Instance.disposeAll() if (server) server.stop(true) - await Telemetry.shutdown() }, } @@ -162,6 +152,6 @@ Rpc.listen(rpc) function getAuthorizationHeader(): string | undefined { const password = Flag.OPENCODE_SERVER_PASSWORD if (!password) return undefined - const username = Flag.OPENCODE_SERVER_USERNAME ?? "altimate" + const username = Flag.OPENCODE_SERVER_USERNAME ?? "opencode" return `Basic ${btoa(`${username}:${password}`)}` } diff --git a/packages/opencode/src/cli/cmd/uninstall.ts b/packages/opencode/src/cli/cmd/uninstall.ts index d0767c93b9..e3eb43d927 100644 --- a/packages/opencode/src/cli/cmd/uninstall.ts +++ b/packages/opencode/src/cli/cmd/uninstall.ts @@ -3,11 +3,11 @@ import { UI } from "../ui" import * as prompts from "@clack/prompts" import { Installation } from "../../installation" import { Global } from "../../global" -import { $ } from "bun" import fs from "fs/promises" import path from "path" import os from "os" import { Filesystem } from "../../util/filesystem" +import { Process } from "../../util/process" interface UninstallArgs { keepConfig: boolean @@ -24,7 +24,7 @@ interface RemovalTargets { export const UninstallCommand = { command: "uninstall", - describe: "uninstall altimate and remove all related files", + describe: "uninstall opencode and remove all related files", builder: (yargs: Argv) => yargs .option("keep-config", { @@ -55,7 +55,7 @@ export const UninstallCommand = { UI.empty() UI.println(UI.logo(" ")) UI.empty() - prompts.intro("Uninstall Altimate CLI") + prompts.intro("Uninstall Altimate Code") const method = await Installation.method() prompts.log.info(`Installation method: ${method}`) @@ -129,13 +129,13 @@ async function showRemovalSummary(targets: RemovalTargets, method: Installation. if (method !== "curl" && method !== "unknown") { const cmds: Record<string, string> = { - npm: "npm uninstall -g @opencode-ai/opencode", - pnpm: "pnpm uninstall -g @opencode-ai/opencode", - bun: "bun remove -g @opencode-ai/opencode", - yarn: "yarn global remove @opencode-ai/opencode", - brew: "brew uninstall altimate", - choco: "choco uninstall altimate", - scoop: "scoop uninstall altimate", + npm: "npm uninstall -g opencode-ai", + pnpm: "pnpm uninstall -g opencode-ai", + bun: "bun remove -g opencode-ai", + yarn: "yarn global remove opencode-ai", + brew: "brew uninstall opencode", + choco: "choco uninstall opencode", + scoop: "scoop uninstall opencode", } prompts.log.info(` ✓ Package: ${cmds[method] || method}`) } @@ -180,28 +180,25 @@ async function executeUninstall(method: Installation.Method, targets: RemovalTar if (method !== "curl" && method !== "unknown") { const cmds: Record<string, string[]> = { - npm: ["npm", "uninstall", "-g", "@opencode-ai/opencode"], - pnpm: ["pnpm", "uninstall", "-g", "@opencode-ai/opencode"], - bun: ["bun", "remove", "-g", "@opencode-ai/opencode"], - yarn: ["yarn", "global", "remove", "@opencode-ai/opencode"], - brew: ["brew", "uninstall", "altimate"], - choco: ["choco", "uninstall", "altimate"], - scoop: ["scoop", "uninstall", "altimate"], + npm: ["npm", "uninstall", "-g", "opencode-ai"], + pnpm: ["pnpm", "uninstall", "-g", "opencode-ai"], + bun: ["bun", "remove", "-g", "opencode-ai"], + yarn: ["yarn", "global", "remove", "opencode-ai"], + brew: ["brew", "uninstall", "opencode"], + choco: ["choco", "uninstall", "opencode"], + scoop: ["scoop", "uninstall", "opencode"], } const cmd = cmds[method] if (cmd) { spinner.start(`Running ${cmd.join(" ")}...`) - const result = - method === "choco" - ? await $`echo Y | choco uninstall altimate -y -r`.quiet().nothrow() - : await $`${cmd}`.quiet().nothrow() - if (result.exitCode !== 0) { - spinner.stop(`Package manager uninstall failed: exit code ${result.exitCode}`, 1) - if ( - method === "choco" && - result.stdout.toString("utf8").includes("not running from an elevated command shell") - ) { + const result = await Process.run(method === "choco" ? ["choco", "uninstall", "opencode", "-y", "-r"] : cmd, { + nothrow: true, + }) + if (result.code !== 0) { + spinner.stop(`Package manager uninstall failed: exit code ${result.code}`, 1) + const text = `${result.stdout.toString("utf8")}\n${result.stderr.toString("utf8")}` + if (method === "choco" && text.includes("not running from an elevated command shell")) { prompts.log.warn(`You may need to run '${cmd.join(" ")}' from an elevated command shell`) } else { prompts.log.warn(`You may need to run manually: ${cmd.join(" ")}`) @@ -232,7 +229,7 @@ async function executeUninstall(method: Installation.Method, targets: RemovalTar } UI.empty() - prompts.log.success("Thank you for using Altimate CLI!") + prompts.log.success("Thank you for using Altimate Code!") } async function getShellConfigFile(): Promise<string | null> { @@ -269,7 +266,7 @@ async function getShellConfigFile(): Promise<string | null> { if (!exists) continue const content = await Filesystem.readText(file).catch(() => "") - if (content.includes("# altimate-code") || content.includes(".opencode/bin")) { + if (content.includes("# opencode") || content.includes(".opencode/bin")) { return file } } @@ -287,7 +284,7 @@ async function cleanShellConfig(file: string) { for (const line of lines) { const trimmed = line.trim() - if (trimmed === "# altimate-code") { + if (trimmed === "# opencode") { skip = true continue } diff --git a/packages/opencode/src/cli/effect/prompt.ts b/packages/opencode/src/cli/effect/prompt.ts new file mode 100644 index 0000000000..7f9cd8cfe6 --- /dev/null +++ b/packages/opencode/src/cli/effect/prompt.ts @@ -0,0 +1,25 @@ +import * as prompts from "@clack/prompts" +import { Effect, Option } from "effect" + +export const intro = (msg: string) => Effect.sync(() => prompts.intro(msg)) +export const outro = (msg: string) => Effect.sync(() => prompts.outro(msg)) + +export const log = { + info: (msg: string) => Effect.sync(() => prompts.log.info(msg)), +} + +export const select = <Value>(opts: Parameters<typeof prompts.select<Value>>[0]) => + Effect.tryPromise(() => prompts.select(opts)).pipe( + Effect.map((result) => { + if (prompts.isCancel(result)) return Option.none<Value>() + return Option.some(result) + }), + ) + +export const spinner = () => { + const s = prompts.spinner() + return { + start: (msg: string) => Effect.sync(() => s.start(msg)), + stop: (msg: string, code?: number) => Effect.sync(() => s.stop(msg, code)), + } +} diff --git a/packages/opencode/src/command/index.ts b/packages/opencode/src/command/index.ts index 7b56220556..4fb9348b7c 100644 --- a/packages/opencode/src/command/index.ts +++ b/packages/opencode/src/command/index.ts @@ -1,4 +1,5 @@ import { BusEvent } from "@/bus/bus-event" +import { SessionID, MessageID } from "@/session/schema" import z from "zod" import { Config } from "../config/config" import { Instance } from "../project/instance" @@ -17,9 +18,9 @@ export namespace Command { "command.executed", z.object({ name: z.string(), - sessionID: Identifier.schema("session"), + sessionID: SessionID.zod, arguments: z.string(), - messageID: Identifier.schema("message"), + messageID: MessageID.zod, }), ), } diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 35c5a59468..1ab39ad18a 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -12,6 +12,7 @@ import { lazy } from "../util/lazy" import { NamedError } from "@opencode-ai/util/error" import { Flag } from "../flag/flag" import { Auth } from "../auth" +import { Env } from "../env" import { type ParseError as JsoncParseError, applyEdits, @@ -32,9 +33,11 @@ import { Glob } from "../util/glob" import { PackageRegistry } from "@/bun/registry" import { proxied } from "@/util/proxied" import { iife } from "@/util/iife" -import { Control } from "@/control" +import { Account } from "@/account" import { ConfigPaths } from "./paths" import { Filesystem } from "@/util/filesystem" +import { Process } from "@/util/process" +import { Lock } from "@/util/lock" export namespace Config { const ModelId = z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" }) @@ -108,10 +111,6 @@ export namespace Config { } } - const token = await Control.token() - if (token) { - } - // Global user config overrides remote config. result = mergeConfigConcatArrays(result, await global()) @@ -182,6 +181,32 @@ export namespace Config { log.debug("loaded custom config from OPENCODE_CONFIG_CONTENT") } + const active = Account.active() + if (active?.active_org_id) { + try { + const [config, token] = await Promise.all([ + Account.config(active.id, active.active_org_id), + Account.token(active.id), + ]) + if (token) { + process.env["OPENCODE_CONSOLE_TOKEN"] = token + Env.set("OPENCODE_CONSOLE_TOKEN", token) + } + + if (config) { + result = mergeConfigConcatArrays( + result, + await load(JSON.stringify(config), { + dir: path.dirname(`${active.url}/api/config`), + source: `${active.url}/api/config`, + }), + ) + } + } catch (err: any) { + log.debug("failed to fetch remote account config", { error: err?.message ?? err }) + } + } + // Load managed config files last (highest priority) - enterprise admin-controlled // Kept separate from directories array to avoid write operations when installing plugins // which would fail on system directories requiring elevated permissions @@ -271,6 +296,7 @@ export namespace Config { // Install any additional dependencies defined in the package.json // This allows local plugins and custom tools to use external packages + using _ = await Lock.write("bun-install") await BunProc.run( [ "install", @@ -279,6 +305,26 @@ export namespace Config { ], { cwd: dir }, ).catch((err) => { + if (err instanceof Process.RunFailedError) { + const detail = { + dir, + cmd: err.cmd, + code: err.code, + stdout: err.stdout.toString(), + stderr: err.stderr.toString(), + } + if (Flag.OPENCODE_STRICT_CONFIG_DEPS) { + log.error("failed to install dependencies", detail) + throw err + } + log.warn("failed to install dependencies", detail) + return + } + + if (Flag.OPENCODE_STRICT_CONFIG_DEPS) { + log.error("failed to install dependencies", { dir, error: err }) + throw err + } log.warn("failed to install dependencies", { dir, error: err }) }) } @@ -978,6 +1024,14 @@ export namespace Config { .describe( "Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout.", ), + chunkTimeout: z + .number() + .int() + .positive() + .optional() + .describe( + "Timeout in milliseconds between streamed SSE chunks for this provider. If no chunk arrives within this window, the request is aborted.", + ), }) .catchall(z.any()) .optional(), diff --git a/packages/opencode/src/control-plane/schema.ts b/packages/opencode/src/control-plane/schema.ts new file mode 100644 index 0000000000..7618f46ad4 --- /dev/null +++ b/packages/opencode/src/control-plane/schema.ts @@ -0,0 +1,17 @@ +import { Schema } from "effect" +import z from "zod" + +import { withStatics } from "@/util/schema" +import { Identifier } from "@/id/id" + +const workspaceIdSchema = Schema.String.pipe(Schema.brand("WorkspaceID")) + +export type WorkspaceID = typeof workspaceIdSchema.Type + +export const WorkspaceID = workspaceIdSchema.pipe( + withStatics((schema: typeof workspaceIdSchema) => ({ + make: (id: string) => schema.makeUnsafe(id), + ascending: (id?: string) => schema.makeUnsafe(Identifier.ascending("workspace", id)), + zod: Identifier.schema("workspace").pipe(z.custom<WorkspaceID>()), + })), +) diff --git a/packages/opencode/src/control-plane/types.ts b/packages/opencode/src/control-plane/types.ts index 3d27757fd1..ab628a6938 100644 --- a/packages/opencode/src/control-plane/types.ts +++ b/packages/opencode/src/control-plane/types.ts @@ -1,14 +1,15 @@ import z from "zod" -import { Identifier } from "@/id/id" +import { ProjectID } from "@/project/schema" +import { WorkspaceID } from "./schema" export const WorkspaceInfo = z.object({ - id: Identifier.schema("workspace"), + id: WorkspaceID.zod, type: z.string(), branch: z.string().nullable(), name: z.string().nullable(), directory: z.string().nullable(), extra: z.unknown().nullable(), - projectID: z.string(), + projectID: ProjectID.zod, }) export type WorkspaceInfo = z.infer<typeof WorkspaceInfo> diff --git a/packages/opencode/src/control-plane/workspace-context.ts b/packages/opencode/src/control-plane/workspace-context.ts index f7297b3f4b..cdd975dc4f 100644 --- a/packages/opencode/src/control-plane/workspace-context.ts +++ b/packages/opencode/src/control-plane/workspace-context.ts @@ -1,13 +1,14 @@ import { Context } from "../util/context" +import type { WorkspaceID } from "./schema" interface Context { - workspaceID?: string + workspaceID?: WorkspaceID } const context = Context.create<Context>("workspace") export const WorkspaceContext = { - async provide<R>(input: { workspaceID?: string; fn: () => R }): Promise<R> { + async provide<R>(input: { workspaceID?: WorkspaceID; fn: () => R }): Promise<R> { return context.provide({ workspaceID: input.workspaceID }, async () => { return input.fn() }) diff --git a/packages/opencode/src/control-plane/workspace-router-middleware.ts b/packages/opencode/src/control-plane/workspace-router-middleware.ts index b48f2fd2b7..463a95ef2b 100644 --- a/packages/opencode/src/control-plane/workspace-router-middleware.ts +++ b/packages/opencode/src/control-plane/workspace-router-middleware.ts @@ -1,6 +1,5 @@ -import { Instance } from "@/project/instance" import type { MiddlewareHandler } from "hono" -import { Installation } from "../installation" +import { Flag } from "../flag/flag" import { getAdaptor } from "./adaptors" import { Workspace } from "./workspace" import { WorkspaceContext } from "./workspace-context" @@ -38,7 +37,7 @@ async function routeRequest(req: Request) { export const WorkspaceRouterMiddleware: MiddlewareHandler = async (c, next) => { // Only available in development for now - if (!Installation.isLocal()) { + if (!Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) { return next() } diff --git a/packages/opencode/src/control-plane/workspace-server/server.ts b/packages/opencode/src/control-plane/workspace-server/server.ts index fd7fd93086..b0744fe025 100644 --- a/packages/opencode/src/control-plane/workspace-server/server.ts +++ b/packages/opencode/src/control-plane/workspace-server/server.ts @@ -4,6 +4,7 @@ import { InstanceBootstrap } from "../../project/bootstrap" import { SessionRoutes } from "../../server/routes/session" import { WorkspaceServerRoutes } from "./routes" import { WorkspaceContext } from "../workspace-context" +import { WorkspaceID } from "../schema" export namespace WorkspaceServer { export function App() { @@ -20,9 +21,9 @@ export namespace WorkspaceServer { return new Hono() .use(async (c, next) => { - const workspaceID = c.req.query("workspace") || c.req.header("x-opencode-workspace") + const rawWorkspaceID = c.req.query("workspace") || c.req.header("x-opencode-workspace") const raw = c.req.query("directory") || c.req.header("x-opencode-directory") - if (workspaceID == null) { + if (rawWorkspaceID == null) { throw new Error("workspaceID parameter is required") } if (raw == null) { @@ -38,7 +39,7 @@ export namespace WorkspaceServer { })() return WorkspaceContext.provide({ - workspaceID, + workspaceID: WorkspaceID.make(rawWorkspaceID), async fn() { return Instance.provide({ directory, diff --git a/packages/opencode/src/control-plane/workspace.sql.ts b/packages/opencode/src/control-plane/workspace.sql.ts index 1ba1605f8e..272907da15 100644 --- a/packages/opencode/src/control-plane/workspace.sql.ts +++ b/packages/opencode/src/control-plane/workspace.sql.ts @@ -1,14 +1,17 @@ import { sqliteTable, text } from "drizzle-orm/sqlite-core" -import { ProjectTable } from "@/project/project.sql" +import { ProjectTable } from "../project/project.sql" +import type { ProjectID } from "../project/schema" +import type { WorkspaceID } from "./schema" export const WorkspaceTable = sqliteTable("workspace", { - id: text().primaryKey(), + id: text().$type<WorkspaceID>().primaryKey(), type: text().notNull(), branch: text(), name: text(), directory: text(), extra: text({ mode: "json" }), project_id: text() + .$type<ProjectID>() .notNull() .references(() => ProjectTable.id, { onDelete: "cascade" }), }) diff --git a/packages/opencode/src/control-plane/workspace.ts b/packages/opencode/src/control-plane/workspace.ts index 8c76fbdab9..c3c28ed605 100644 --- a/packages/opencode/src/control-plane/workspace.ts +++ b/packages/opencode/src/control-plane/workspace.ts @@ -1,14 +1,15 @@ import z from "zod" -import { Identifier } from "@/id/id" import { fn } from "@/util/fn" import { Database, eq } from "@/storage/db" import { Project } from "@/project/project" import { BusEvent } from "@/bus/bus-event" import { GlobalBus } from "@/bus/global" import { Log } from "@/util/log" +import { ProjectID } from "@/project/schema" import { WorkspaceTable } from "./workspace.sql" import { getAdaptor } from "./adaptors" import { WorkspaceInfo } from "./types" +import { WorkspaceID } from "./schema" import { parseSSE } from "./sse" export namespace Workspace { @@ -45,15 +46,15 @@ export namespace Workspace { } const CreateInput = z.object({ - id: Identifier.schema("workspace").optional(), + id: WorkspaceID.zod.optional(), type: Info.shape.type, branch: Info.shape.branch, - projectID: Info.shape.projectID, + projectID: ProjectID.zod, extra: Info.shape.extra, }) export const create = fn(CreateInput, async (input) => { - const id = Identifier.ascending("workspace", input.id) + const id = WorkspaceID.ascending(input.id) const adaptor = await getAdaptor(input.type) const config = await adaptor.configure({ ...input, id, name: null, directory: null }) @@ -93,13 +94,13 @@ export namespace Workspace { return rows.map(fromRow).sort((a, b) => a.id.localeCompare(b.id)) } - export const get = fn(Identifier.schema("workspace"), async (id) => { + export const get = fn(WorkspaceID.zod, async (id) => { const row = Database.use((db) => db.select().from(WorkspaceTable).where(eq(WorkspaceTable.id, id)).get()) if (!row) return return fromRow(row) }) - export const remove = fn(Identifier.schema("workspace"), async (id) => { + export const remove = fn(WorkspaceID.zod, async (id) => { const row = Database.use((db) => db.select().from(WorkspaceTable).where(eq(WorkspaceTable.id, id)).get()) if (row) { const info = fromRow(row) diff --git a/packages/opencode/src/control/control.sql.ts b/packages/opencode/src/control/control.sql.ts deleted file mode 100644 index 7b805c1627..0000000000 --- a/packages/opencode/src/control/control.sql.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { sqliteTable, text, integer, primaryKey, uniqueIndex } from "drizzle-orm/sqlite-core" -import { eq } from "drizzle-orm" -import { Timestamps } from "@/storage/schema.sql" - -export const ControlAccountTable = sqliteTable( - "control_account", - { - email: text().notNull(), - url: text().notNull(), - access_token: text().notNull(), - refresh_token: text().notNull(), - token_expiry: integer(), - active: integer({ mode: "boolean" }) - .notNull() - .$default(() => false), - ...Timestamps, - }, - (table) => [ - primaryKey({ columns: [table.email, table.url] }), - // uniqueIndex("control_account_active_idx").on(table.email).where(eq(table.active, true)), - ], -) diff --git a/packages/opencode/src/control/index.ts b/packages/opencode/src/control/index.ts deleted file mode 100644 index f712e88281..0000000000 --- a/packages/opencode/src/control/index.ts +++ /dev/null @@ -1,67 +0,0 @@ -import { eq, and } from "drizzle-orm" -import { Database } from "@/storage/db" -import { ControlAccountTable } from "./control.sql" -import z from "zod" - -export * from "./control.sql" - -export namespace Control { - export const Account = z.object({ - email: z.string(), - url: z.string(), - }) - export type Account = z.infer<typeof Account> - - function fromRow(row: (typeof ControlAccountTable)["$inferSelect"]): Account { - return { - email: row.email, - url: row.url, - } - } - - export function account(): Account | undefined { - const row = Database.use((db) => - db.select().from(ControlAccountTable).where(eq(ControlAccountTable.active, true)).get(), - ) - return row ? fromRow(row) : undefined - } - - export async function token(): Promise<string | undefined> { - const row = Database.use((db) => - db.select().from(ControlAccountTable).where(eq(ControlAccountTable.active, true)).get(), - ) - if (!row) return undefined - if (row.token_expiry && row.token_expiry > Date.now()) return row.access_token - - const res = await fetch(`${row.url}/oauth/token`, { - method: "POST", - headers: { "Content-Type": "application/x-www-form-urlencoded" }, - body: new URLSearchParams({ - grant_type: "refresh_token", - refresh_token: row.refresh_token, - }).toString(), - }) - - if (!res.ok) return - - const json = (await res.json()) as { - access_token: string - refresh_token?: string - expires_in?: number - } - - Database.use((db) => - db - .update(ControlAccountTable) - .set({ - access_token: json.access_token, - refresh_token: json.refresh_token ?? row.refresh_token, - token_expiry: json.expires_in ? Date.now() + json.expires_in * 1000 : undefined, - }) - .where(and(eq(ControlAccountTable.email, row.email), eq(ControlAccountTable.url, row.url))) - .run(), - ) - - return json.access_token - } -} diff --git a/packages/opencode/src/effect/runtime.ts b/packages/opencode/src/effect/runtime.ts new file mode 100644 index 0000000000..23acff7337 --- /dev/null +++ b/packages/opencode/src/effect/runtime.ts @@ -0,0 +1,5 @@ +import { Layer, ManagedRuntime } from "effect" +import { AccountService } from "@/account/service" +import { AuthService } from "@/auth/service" + +export const runtime = ManagedRuntime.make(Layer.mergeAll(AccountService.defaultLayer, AuthService.defaultLayer)) diff --git a/packages/opencode/src/file/index.ts b/packages/opencode/src/file/index.ts index 01f07c9afa..e03fc8a9f3 100644 --- a/packages/opencode/src/file/index.ts +++ b/packages/opencode/src/file/index.ts @@ -1,6 +1,5 @@ import { BusEvent } from "@/bus/bus-event" import z from "zod" -import { $ } from "bun" import { formatPatch, structuredPatch } from "diff" import path from "path" import fs from "fs" @@ -11,6 +10,8 @@ import { Instance } from "../project/instance" import { Ripgrep } from "./ripgrep" import fuzzysort from "fuzzysort" import { Global } from "../global" +import { git } from "@/util/git" +import { Protected } from "./protected" export namespace File { const log = Log.create({ service: "file" }) @@ -345,10 +346,7 @@ export namespace File { if (isGlobalHome) { const dirs = new Set<string>() - const ignore = new Set<string>() - - if (process.platform === "darwin") ignore.add("Library") - if (process.platform === "win32") ignore.add("AppData") + const ignore = Protected.names() const ignoreNested = new Set(["node_modules", "dist", "build", "target", "vendor"]) const shouldIgnore = (name: string) => name.startsWith(".") || ignore.has(name) @@ -418,11 +416,11 @@ export namespace File { const project = Instance.project if (project.vcs !== "git") return [] - const diffOutput = await $`git -c core.fsmonitor=false -c core.quotepath=false diff --numstat HEAD` - .cwd(Instance.directory) - .quiet() - .nothrow() - .text() + const diffOutput = ( + await git(["-c", "core.fsmonitor=false", "-c", "core.quotepath=false", "diff", "--numstat", "HEAD"], { + cwd: Instance.directory, + }) + ).text() const changedFiles: Info[] = [] @@ -439,12 +437,14 @@ export namespace File { } } - const untrackedOutput = - await $`git -c core.fsmonitor=false -c core.quotepath=false ls-files --others --exclude-standard` - .cwd(Instance.directory) - .quiet() - .nothrow() - .text() + const untrackedOutput = ( + await git( + ["-c", "core.fsmonitor=false", "-c", "core.quotepath=false", "ls-files", "--others", "--exclude-standard"], + { + cwd: Instance.directory, + }, + ) + ).text() if (untrackedOutput.trim()) { const untrackedFiles = untrackedOutput.trim().split("\n") @@ -465,12 +465,14 @@ export namespace File { } // Get deleted files - const deletedOutput = - await $`git -c core.fsmonitor=false -c core.quotepath=false diff --name-only --diff-filter=D HEAD` - .cwd(Instance.directory) - .quiet() - .nothrow() - .text() + const deletedOutput = ( + await git( + ["-c", "core.fsmonitor=false", "-c", "core.quotepath=false", "diff", "--name-only", "--diff-filter=D", "HEAD"], + { + cwd: Instance.directory, + }, + ) + ).text() if (deletedOutput.trim()) { const deletedFiles = deletedOutput.trim().split("\n") @@ -541,16 +543,14 @@ export namespace File { const content = (await Filesystem.readText(full).catch(() => "")).trim() if (project.vcs === "git") { - let diff = await $`git -c core.fsmonitor=false diff ${file}`.cwd(Instance.directory).quiet().nothrow().text() + let diff = (await git(["-c", "core.fsmonitor=false", "diff", "--", file], { cwd: Instance.directory })).text() if (!diff.trim()) { - diff = await $`git -c core.fsmonitor=false diff --staged ${file}` - .cwd(Instance.directory) - .quiet() - .nothrow() - .text() + diff = ( + await git(["-c", "core.fsmonitor=false", "diff", "--staged", "--", file], { cwd: Instance.directory }) + ).text() } if (diff.trim()) { - const original = await $`git show HEAD:${file}`.cwd(Instance.directory).quiet().nothrow().text() + const original = (await git(["show", `HEAD:${file}`], { cwd: Instance.directory })).text() const patch = structuredPatch(file, file, original, content, "old", "new", { context: Infinity, ignoreWhitespace: true, diff --git a/packages/opencode/src/file/protected.ts b/packages/opencode/src/file/protected.ts new file mode 100644 index 0000000000..d519746193 --- /dev/null +++ b/packages/opencode/src/file/protected.ts @@ -0,0 +1,59 @@ +import path from "path" +import os from "os" + +const home = os.homedir() + +// macOS directories that trigger TCC (Transparency, Consent, and Control) +// permission prompts when accessed by a non-sandboxed process. +const DARWIN_HOME = [ + // Media + "Music", + "Pictures", + "Movies", + // User-managed folders synced via iCloud / subject to TCC + "Downloads", + "Desktop", + "Documents", + // Other system-managed + "Public", + "Applications", + "Library", +] + +const DARWIN_LIBRARY = [ + "Application Support/AddressBook", + "Calendars", + "Mail", + "Messages", + "Safari", + "Cookies", + "Application Support/com.apple.TCC", + "PersonalizationPortrait", + "Metadata/CoreSpotlight", + "Suggestions", +] + +const DARWIN_ROOT = ["/.DocumentRevisions-V100", "/.Spotlight-V100", "/.Trashes", "/.fseventsd"] + +const WIN32_HOME = ["AppData", "Downloads", "Desktop", "Documents", "Pictures", "Music", "Videos", "OneDrive"] + +export namespace Protected { + /** Directory basenames to skip when scanning the home directory. */ + export function names(): ReadonlySet<string> { + if (process.platform === "darwin") return new Set(DARWIN_HOME) + if (process.platform === "win32") return new Set(WIN32_HOME) + return new Set() + } + + /** Absolute paths that should never be watched, stated, or scanned. */ + export function paths(): string[] { + if (process.platform === "darwin") + return [ + ...DARWIN_HOME.map((n) => path.join(home, n)), + ...DARWIN_LIBRARY.map((n) => path.join(home, "Library", n)), + ...DARWIN_ROOT, + ] + if (process.platform === "win32") return WIN32_HOME.map((n) => path.join(home, n)) + return [] + } +} diff --git a/packages/opencode/src/file/ripgrep.ts b/packages/opencode/src/file/ripgrep.ts index 09fef453c9..601c82e94f 100644 --- a/packages/opencode/src/file/ripgrep.ts +++ b/packages/opencode/src/file/ripgrep.ts @@ -5,7 +5,7 @@ import fs from "fs/promises" import z from "zod" import { NamedError } from "@opencode-ai/util/error" import { lazy } from "../util/lazy" -import { $ } from "bun" + import { Filesystem } from "../util/filesystem" import { Process } from "../util/process" import { which } from "../util/which" @@ -100,6 +100,7 @@ export namespace Ripgrep { }, "x64-darwin": { platform: "x86_64-apple-darwin", extension: "tar.gz" }, "x64-linux": { platform: "x86_64-unknown-linux-musl", extension: "tar.gz" }, + "arm64-win32": { platform: "aarch64-pc-windows-msvc", extension: "zip" }, "x64-win32": { platform: "x86_64-pc-windows-msvc", extension: "zip" }, } as const @@ -338,7 +339,7 @@ export namespace Ripgrep { limit?: number follow?: boolean }) { - const args = [`${await filepath()}`, "--json", "--hidden", "--glob='!.git/*'"] + const args = [`${await filepath()}`, "--json", "--hidden", "--glob=!.git/*"] if (input.follow) args.push("--follow") if (input.glob) { @@ -354,14 +355,16 @@ export namespace Ripgrep { args.push("--") args.push(input.pattern) - const command = args.join(" ") - const result = await $`${{ raw: command }}`.cwd(input.cwd).quiet().nothrow() - if (result.exitCode !== 0) { + const result = await Process.text(args, { + cwd: input.cwd, + nothrow: true, + }) + if (result.code !== 0) { return [] } // Handle both Unix (\n) and Windows (\r\n) line endings - const lines = result.text().trim().split(/\r?\n/).filter(Boolean) + const lines = result.text.trim().split(/\r?\n/).filter(Boolean) // Parse JSON lines from ripgrep output return lines diff --git a/packages/opencode/src/file/watcher.ts b/packages/opencode/src/file/watcher.ts index 626a746c83..3797c16270 100644 --- a/packages/opencode/src/file/watcher.ts +++ b/packages/opencode/src/file/watcher.ts @@ -11,9 +11,10 @@ import { createWrapper } from "@parcel/watcher/wrapper" import { lazy } from "@/util/lazy" import { withTimeout } from "@/util/timeout" import type ParcelWatcher from "@parcel/watcher" -import { $ } from "bun" import { Flag } from "@/flag/flag" import { readdir } from "fs/promises" +import { git } from "@/util/git" +import { Protected } from "./protected" const SUBSCRIBE_TIMEOUT_MS = 10_000 @@ -76,7 +77,7 @@ export namespace FileWatcher { if (Flag.OPENCODE_EXPERIMENTAL_FILEWATCHER) { const pending = w.subscribe(Instance.directory, subscribe, { - ignore: [...FileIgnore.PATTERNS, ...cfgIgnores], + ignore: [...FileIgnore.PATTERNS, ...cfgIgnores, ...Protected.paths()], backend, }) const sub = await withTimeout(pending, SUBSCRIBE_TIMEOUT_MS).catch((err) => { @@ -88,13 +89,10 @@ export namespace FileWatcher { } if (Instance.project.vcs === "git") { - const vcsDir = await $`git rev-parse --git-dir` - .quiet() - .nothrow() - .cwd(Instance.worktree) - .text() - .then((x) => path.resolve(Instance.worktree, x.trim())) - .catch(() => undefined) + const result = await git(["rev-parse", "--git-dir"], { + cwd: Instance.worktree, + }) + const vcsDir = result.exitCode === 0 ? path.resolve(Instance.worktree, result.text().trim()) : undefined if (vcsDir && !cfgIgnores.includes(".git") && !cfgIgnores.includes(vcsDir)) { const gitDirContents = await readdir(vcsDir).catch(() => []) const ignoreList = gitDirContents.filter((entry) => entry !== "HEAD") diff --git a/packages/opencode/src/flag/flag.ts b/packages/opencode/src/flag/flag.ts index c913c206c9..8658e17ee8 100644 --- a/packages/opencode/src/flag/flag.ts +++ b/packages/opencode/src/flag/flag.ts @@ -76,9 +76,13 @@ export namespace Flag { export const OPENCODE_EXPERIMENTAL_LSP_TOOL = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_LSP_TOOL") export const OPENCODE_DISABLE_FILETIME_CHECK = truthy("OPENCODE_DISABLE_FILETIME_CHECK") export const OPENCODE_EXPERIMENTAL_PLAN_MODE = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_PLAN_MODE") + export const OPENCODE_EXPERIMENTAL_WORKSPACES = OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_WORKSPACES") export const OPENCODE_EXPERIMENTAL_MARKDOWN = !falsy("OPENCODE_EXPERIMENTAL_MARKDOWN") export const OPENCODE_MODELS_URL = process.env["OPENCODE_MODELS_URL"] export const OPENCODE_MODELS_PATH = process.env["OPENCODE_MODELS_PATH"] + export const OPENCODE_DISABLE_CHANNEL_DB = truthy("OPENCODE_DISABLE_CHANNEL_DB") + export const OPENCODE_SKIP_MIGRATIONS = truthy("OPENCODE_SKIP_MIGRATIONS") + export const OPENCODE_STRICT_CONFIG_DEPS = truthy("OPENCODE_STRICT_CONFIG_DEPS") function number(key: string) { const value = process.env[key] diff --git a/packages/opencode/src/index.ts b/packages/opencode/src/index.ts index 0338e475a7..85be39f3ba 100644 --- a/packages/opencode/src/index.ts +++ b/packages/opencode/src/index.ts @@ -3,7 +3,8 @@ import { hideBin } from "yargs/helpers" import { RunCommand } from "./cli/cmd/run" import { GenerateCommand } from "./cli/cmd/generate" import { Log } from "./util/log" -import { AuthCommand } from "./cli/cmd/auth" +import { ConsoleCommand } from "./cli/cmd/account" +import { ProvidersCommand } from "./cli/cmd/providers" import { AgentCommand } from "./cli/cmd/agent" import { UpgradeCommand } from "./cli/cmd/upgrade" import { UninstallCommand } from "./cli/cmd/uninstall" @@ -159,7 +160,8 @@ let cli = yargs(hideBin(process.argv)) .command(RunCommand) .command(GenerateCommand) .command(DebugCommand) - .command(AuthCommand) + .command(ConsoleCommand) + .command(ProvidersCommand) .command(AgentCommand) .command(UpgradeCommand) .command(UninstallCommand) diff --git a/packages/opencode/src/installation/index.ts b/packages/opencode/src/installation/index.ts index f281593c7c..2e82624097 100644 --- a/packages/opencode/src/installation/index.ts +++ b/packages/opencode/src/installation/index.ts @@ -1,13 +1,20 @@ import { BusEvent } from "@/bus/bus-event" import path from "path" -import { $ } from "bun" import z from "zod" import { NamedError } from "@opencode-ai/util/error" import { Log } from "../util/log" import { iife } from "@/util/iife" import { Flag } from "../flag/flag" -// altimate_change start - telemetry import -import { Telemetry } from "../telemetry" +import { Process } from "@/util/process" +import { buffer } from "node:stream/consumers" +// altimate_change start — telemetry (lazy import to avoid circular dep with Telemetry → Installation) +let _telemetryCache: (typeof import("../telemetry"))["Telemetry"] | undefined +async function getTelemetry() { + if (_telemetryCache) return _telemetryCache + const { Telemetry } = await import("../telemetry") + _telemetryCache = Telemetry + return Telemetry +} // altimate_change end declare global { @@ -18,6 +25,38 @@ declare global { export namespace Installation { const log = Log.create({ service: "installation" }) + async function text(cmd: string[], opts: { cwd?: string; env?: NodeJS.ProcessEnv } = {}) { + return Process.text(cmd, { + cwd: opts.cwd, + env: opts.env, + nothrow: true, + }).then((x) => x.text) + } + + async function upgradeCurl(target: string) { + const body = await fetch("https://altimate.ai/install").then((res) => { + if (!res.ok) throw new Error(res.statusText) + return res.text() + }) + const proc = Process.spawn(["bash"], { + stdin: "pipe", + stdout: "pipe", + stderr: "pipe", + env: { + ...process.env, + VERSION: target, + }, + }) + if (!proc.stdin || !proc.stdout || !proc.stderr) throw new Error("Process output not available") + proc.stdin.end(body) + const [code, stdout, stderr] = await Promise.all([proc.exited, buffer(proc.stdout), buffer(proc.stderr)]) + return { + code, + stdout, + stderr, + } + } + export type Method = Awaited<ReturnType<typeof method>> export const Event = { @@ -68,31 +107,31 @@ export namespace Installation { const checks = [ { name: "npm" as const, - command: () => $`npm list -g --depth=0`.throws(false).quiet().text(), + command: () => text(["npm", "list", "-g", "--depth=0"]), }, { name: "yarn" as const, - command: () => $`yarn global list`.throws(false).quiet().text(), + command: () => text(["yarn", "global", "list"]), }, { name: "pnpm" as const, - command: () => $`pnpm list -g --depth=0`.throws(false).quiet().text(), + command: () => text(["pnpm", "list", "-g", "--depth=0"]), }, { name: "bun" as const, - command: () => $`bun pm ls -g`.throws(false).quiet().text(), + command: () => text(["bun", "pm", "ls", "-g"]), }, { name: "brew" as const, - command: () => $`brew list --formula altimate`.throws(false).quiet().text(), + command: () => text(["brew", "list", "--formula", "opencode"]), }, { name: "scoop" as const, - command: () => $`scoop list altimate`.throws(false).quiet().text(), + command: () => text(["scoop", "list", "opencode"]), }, { name: "choco" as const, - command: () => $`choco list --limit-output altimate`.throws(false).quiet().text(), + command: () => text(["choco", "list", "--limit-output", "opencode"]), }, ] @@ -107,7 +146,7 @@ export namespace Installation { for (const check of checks) { const output = await check.command() const installedName = - check.name === "brew" || check.name === "choco" || check.name === "scoop" ? "altimate" : "@opencode-ai/opencode" + check.name === "brew" || check.name === "choco" || check.name === "scoop" ? "opencode" : "opencode-ai" if (output.includes(installedName)) { return check.name } @@ -124,66 +163,77 @@ export namespace Installation { ) async function getBrewFormula() { - const tapFormula = await $`brew list --formula AltimateAI/tap/altimate`.throws(false).quiet().text() - if (tapFormula.includes("altimate")) return "AltimateAI/tap/altimate" - const coreFormula = await $`brew list --formula altimate`.throws(false).quiet().text() - if (coreFormula.includes("altimate")) return "altimate" - return "altimate" + const tapFormula = await text(["brew", "list", "--formula", "AltimateAI/tap/altimate-code"]) + if (tapFormula.includes("opencode")) return "AltimateAI/tap/altimate-code" + const coreFormula = await text(["brew", "list", "--formula", "opencode"]) + if (coreFormula.includes("opencode")) return "opencode" + return "opencode" } export async function upgrade(method: Method, target: string) { - let cmd + let result: Awaited<ReturnType<typeof upgradeCurl>> | undefined switch (method) { case "curl": - cmd = $`curl -fsSL https://altimate-code.dev/install | bash`.env({ - ...process.env, - VERSION: target, - }) + result = await upgradeCurl(target) break case "npm": - cmd = $`npm install -g @opencode-ai/opencode@${target}` + result = await Process.run(["npm", "install", "-g", `@altimateai/altimate-code@${target}`], { nothrow: true }) break case "pnpm": - cmd = $`pnpm install -g @opencode-ai/opencode@${target}` + result = await Process.run(["pnpm", "install", "-g", `@altimateai/altimate-code@${target}`], { nothrow: true }) break case "bun": - cmd = $`bun install -g @opencode-ai/opencode@${target}` + result = await Process.run(["bun", "install", "-g", `@altimateai/altimate-code@${target}`], { nothrow: true }) break case "brew": { const formula = await getBrewFormula() - if (formula.includes("/")) { - cmd = - $`brew tap AltimateAI/tap && cd "$(brew --repo AltimateAI/tap)" && git pull --ff-only && brew upgrade ${formula}`.env( - { - HOMEBREW_NO_AUTO_UPDATE: "1", - ...process.env, - }, - ) - break - } - cmd = $`brew upgrade ${formula}`.env({ + const env = { HOMEBREW_NO_AUTO_UPDATE: "1", ...process.env, - }) + } + if (formula.includes("/")) { + const tap = await Process.run(["brew", "tap", "AltimateAI/tap"], { env, nothrow: true }) + if (tap.code !== 0) { + result = tap + break + } + const repo = await Process.text(["brew", "--repo", "AltimateAI/tap"], { env, nothrow: true }) + if (repo.code !== 0) { + result = repo + break + } + const dir = repo.text.trim() + if (dir) { + const pull = await Process.run(["git", "pull", "--ff-only"], { cwd: dir, env, nothrow: true }) + if (pull.code !== 0) { + result = pull + break + } + } + } + result = await Process.run(["brew", "upgrade", formula], { env, nothrow: true }) break } + case "choco": - cmd = $`echo Y | choco upgrade altimate --version=${target}` + result = await Process.run(["choco", "upgrade", "opencode", `--version=${target}`, "-y"], { nothrow: true }) break case "scoop": - cmd = $`scoop install altimate@${target}` + result = await Process.run(["scoop", "install", `opencode@${target}`], { nothrow: true }) break default: throw new Error(`Unknown method: ${method}`) } - const result = await cmd.quiet().throws(false) - if (result.exitCode !== 0) { - const stderr = method === "choco" ? "not running from an elevated command shell" : result.stderr.toString("utf8") - const telemetryMethod = (["npm", "bun", "brew"].includes(method) ? method : "other") as "npm" | "bun" | "brew" | "other" - Telemetry.track({ + // altimate_change start — telemetry for upgrade result + const telemetryMethod = (["npm", "bun", "brew"].includes(method) ? method : "other") as "npm" | "bun" | "brew" | "other" + if (!result || result.code !== 0) { + const stderr = + method === "choco" ? "not running from an elevated command shell" : result?.stderr.toString("utf8") || "" + const T = await getTelemetry() + T.track({ type: "upgrade_attempted", timestamp: Date.now(), - session_id: Telemetry.getContext().sessionId || "cli", + session_id: T.getContext().sessionId || "cli", from_version: VERSION, to_version: target, method: telemetryMethod, @@ -200,24 +250,23 @@ export namespace Installation { stdout: result.stdout.toString(), stderr: result.stderr.toString(), }) - const telemetryMethod = (["npm", "bun", "brew"].includes(method) ? method : "other") as "npm" | "bun" | "brew" | "other" - Telemetry.track({ + const T2 = await getTelemetry() + T2.track({ type: "upgrade_attempted", timestamp: Date.now(), - session_id: Telemetry.getContext().sessionId || "cli", + session_id: T2.getContext().sessionId || "cli", from_version: VERSION, to_version: target, method: telemetryMethod, status: "success", }) - await $`${process.execPath} --version`.nothrow().quiet().text() + // altimate_change end + await Process.text([process.execPath, "--version"], { nothrow: true }) } export const VERSION = typeof OPENCODE_VERSION === "string" ? OPENCODE_VERSION : "local" export const CHANNEL = typeof OPENCODE_CHANNEL === "string" ? OPENCODE_CHANNEL : "local" - // altimate_change start - user agent string export const USER_AGENT = `altimate-code/${CHANNEL}/${VERSION}/${Flag.OPENCODE_CLIENT}` - // altimate_change end export async function latest(installMethod?: Method) { const detectedMethod = installMethod || (await method()) @@ -225,13 +274,13 @@ export namespace Installation { if (detectedMethod === "brew") { const formula = await getBrewFormula() if (formula.includes("/")) { - const infoJson = await $`brew info --json=v2 ${formula}`.quiet().text() + const infoJson = await text(["brew", "info", "--json=v2", formula]) const info = JSON.parse(infoJson) const version = info.formulae?.[0]?.versions?.stable if (!version) throw new Error(`Could not detect version for tap formula: ${formula}`) return version } - return fetch("https://formulae.brew.sh/api/formula/altimate.json") + return fetch("https://formulae.brew.sh/api/formula/opencode.json") .then((res) => { if (!res.ok) throw new Error(res.statusText) return res.json() @@ -241,12 +290,12 @@ export namespace Installation { if (detectedMethod === "npm" || detectedMethod === "bun" || detectedMethod === "pnpm") { const registry = await iife(async () => { - const r = (await $`npm config get registry`.quiet().nothrow().text()).trim() + const r = (await text(["npm", "config", "get", "registry"])).trim() const reg = r || "https://registry.npmjs.org" return reg.endsWith("/") ? reg.slice(0, -1) : reg }) const channel = CHANNEL - return fetch(`${registry}/@opencode-ai/opencode/${channel}`) + return fetch(`${registry}/opencode-ai/${channel}`) .then((res) => { if (!res.ok) throw new Error(res.statusText) return res.json() @@ -256,7 +305,7 @@ export namespace Installation { if (detectedMethod === "choco") { return fetch( - "https://community.chocolatey.org/api/v2/Packages?$filter=Id%20eq%20%27altimate%27%20and%20IsLatestVersion&$select=Version", + "https://community.chocolatey.org/api/v2/Packages?$filter=Id%20eq%20%27opencode%27%20and%20IsLatestVersion&$select=Version", { headers: { Accept: "application/json;odata=verbose" } }, ) .then((res) => { @@ -267,7 +316,7 @@ export namespace Installation { } if (detectedMethod === "scoop") { - return fetch("https://raw.githubusercontent.com/ScoopInstaller/Main/master/bucket/altimate.json", { + return fetch("https://raw.githubusercontent.com/ScoopInstaller/Main/master/bucket/opencode.json", { headers: { Accept: "application/json" }, }) .then((res) => { diff --git a/packages/opencode/src/lsp/index.ts b/packages/opencode/src/lsp/index.ts index 9d7d30632a..6ea7554c09 100644 --- a/packages/opencode/src/lsp/index.ts +++ b/packages/opencode/src/lsp/index.ts @@ -114,6 +114,7 @@ export namespace LSP { return { process: spawn(item.command[0], item.command.slice(1), { cwd: root, + windowsHide: true, env: { ...process.env, ...item.env, diff --git a/packages/opencode/src/lsp/server.ts b/packages/opencode/src/lsp/server.ts index e09fbc97fe..8f93213ea1 100644 --- a/packages/opencode/src/lsp/server.ts +++ b/packages/opencode/src/lsp/server.ts @@ -1,10 +1,9 @@ -import { spawn, type ChildProcessWithoutNullStreams } from "child_process" +import { spawn as launch, type ChildProcessWithoutNullStreams } from "child_process" import path from "path" import os from "os" import { Global } from "../global" import { Log } from "../util/log" import { BunProc } from "../bun" -import { $ } from "bun" import { text } from "node:stream/consumers" import fs from "fs/promises" import { Filesystem } from "../util/filesystem" @@ -13,6 +12,12 @@ import { Flag } from "../flag/flag" import { Archive } from "../util/archive" import { Process } from "../util/process" import { which } from "../util/which" +import { Module } from "@opencode-ai/util/module" + +const spawn = ((cmd, args, opts) => { + if (Array.isArray(args)) return launch(cmd, [...args], { ...(opts ?? {}), windowsHide: true }) + return launch(cmd, { ...(args ?? {}), windowsHide: true }) +}) as typeof launch export namespace LSPServer { const log = Log.create({ service: "lsp.server" }) @@ -21,6 +26,8 @@ export namespace LSPServer { .stat(p) .then(() => true) .catch(() => false) + const run = (cmd: string[], opts: Process.RunOptions = {}) => Process.run(cmd, { ...opts, nothrow: true }) + const output = (cmd: string[], opts: Process.RunOptions = {}) => Process.text(cmd, { ...opts, nothrow: true }) export interface Handle { process: ChildProcessWithoutNullStreams @@ -97,7 +104,7 @@ export namespace LSPServer { ), extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs", ".mts", ".cts"], async spawn(root) { - const tsserver = await Bun.resolve("typescript/lib/tsserver.js", Instance.directory).catch(() => {}) + const tsserver = Module.resolve("typescript/lib/tsserver.js", Instance.directory) log.info("typescript server", { tsserver }) if (!tsserver) return const proc = spawn(BunProc.which(), ["x", "typescript-language-server", "--stdio"], { @@ -172,7 +179,7 @@ export namespace LSPServer { root: NearestRoot(["package-lock.json", "bun.lockb", "bun.lock", "pnpm-lock.yaml", "yarn.lock"]), extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs", ".mts", ".cts", ".vue"], async spawn(root) { - const eslint = await Bun.resolve("eslint", Instance.directory).catch(() => {}) + const eslint = Module.resolve("eslint", Instance.directory) if (!eslint) return log.info("spawning eslint server") const serverPath = path.join(Global.Path.bin, "vscode-eslint", "server", "out", "eslintServer.js") @@ -205,8 +212,8 @@ export namespace LSPServer { await fs.rename(extractedPath, finalPath) const npmCmd = process.platform === "win32" ? "npm.cmd" : "npm" - await $`${npmCmd} install`.cwd(finalPath).quiet() - await $`${npmCmd} run compile`.cwd(finalPath).quiet() + await Process.run([npmCmd, "install"], { cwd: finalPath }) + await Process.run([npmCmd, "run", "compile"], { cwd: finalPath }) log.info("installed VS Code ESLint server", { serverPath }) } @@ -340,7 +347,7 @@ export namespace LSPServer { let args = ["lsp-proxy", "--stdio"] if (!bin) { - const resolved = await Bun.resolve("biome", root).catch(() => undefined) + const resolved = Module.resolve("biome", root) if (!resolved) return bin = BunProc.which() args = ["x", "biome", "lsp-proxy", "--stdio"] @@ -602,10 +609,11 @@ export namespace LSPServer { recursive: true, }) - await $`mix deps.get && mix compile && mix elixir_ls.release2 -o release` - .quiet() - .cwd(path.join(Global.Path.bin, "elixir-ls-master")) - .env({ MIX_ENV: "prod", ...process.env }) + const cwd = path.join(Global.Path.bin, "elixir-ls-master") + const env = { MIX_ENV: "prod", ...process.env } + await Process.run(["mix", "deps.get"], { cwd, env }) + await Process.run(["mix", "compile"], { cwd, env }) + await Process.run(["mix", "elixir_ls.release2", "-o", "release"], { cwd, env }) log.info(`installed elixir-ls`, { path: elixirLsPath, @@ -706,7 +714,7 @@ export namespace LSPServer { }) if (!ok) return } else { - await $`tar -xf ${tempPath}`.cwd(Global.Path.bin).quiet().nothrow() + await run(["tar", "-xf", tempPath], { cwd: Global.Path.bin }) } await fs.rm(tempPath, { force: true }) @@ -719,7 +727,7 @@ export namespace LSPServer { } if (platform !== "win32") { - await $`chmod +x ${bin}`.quiet().nothrow() + await fs.chmod(bin, 0o755).catch(() => {}) } log.info(`installed zls`, { bin }) @@ -831,11 +839,11 @@ export namespace LSPServer { // This is specific to macOS where sourcekit-lsp is typically installed with Xcode if (!which("xcrun")) return - const lspLoc = await $`xcrun --find sourcekit-lsp`.quiet().nothrow() + const lspLoc = await output(["xcrun", "--find", "sourcekit-lsp"]) - if (lspLoc.exitCode !== 0) return + if (lspLoc.code !== 0) return - const bin = lspLoc.text().trim() + const bin = lspLoc.text.trim() return { process: spawn(bin, { @@ -1010,7 +1018,7 @@ export namespace LSPServer { if (!ok) return } if (tar) { - await $`tar -xf ${archive}`.cwd(Global.Path.bin).quiet().nothrow() + await run(["tar", "-xf", archive], { cwd: Global.Path.bin }) } await fs.rm(archive, { force: true }) @@ -1021,7 +1029,7 @@ export namespace LSPServer { } if (platform !== "win32") { - await $`chmod +x ${bin}`.quiet().nothrow() + await fs.chmod(bin, 0o755).catch(() => {}) } await fs.unlink(path.join(Global.Path.bin, "clangd")).catch(() => {}) @@ -1082,7 +1090,7 @@ export namespace LSPServer { extensions: [".astro"], root: NearestRoot(["package-lock.json", "bun.lockb", "bun.lock", "pnpm-lock.yaml", "yarn.lock"]), async spawn(root) { - const tsserver = await Bun.resolve("typescript/lib/tsserver.js", Instance.directory).catch(() => {}) + const tsserver = Module.resolve("typescript/lib/tsserver.js", Instance.directory) if (!tsserver) { log.info("typescript not found, required for Astro language server") return @@ -1130,7 +1138,30 @@ export namespace LSPServer { export const JDTLS: Info = { id: "jdtls", - root: NearestRoot(["pom.xml", "build.gradle", "build.gradle.kts", ".project", ".classpath"]), + root: async (file) => { + // Without exclusions, NearestRoot defaults to instance directory so we can't + // distinguish between a) no project found and b) project found at instance dir. + // So we can't choose the root from (potential) monorepo markers first. + // Look for potential subproject markers first while excluding potential monorepo markers. + const settingsMarkers = ["settings.gradle", "settings.gradle.kts"] + const gradleMarkers = ["gradlew", "gradlew.bat"] + const exclusionsForMonorepos = gradleMarkers.concat(settingsMarkers) + + const [projectRoot, wrapperRoot, settingsRoot] = await Promise.all([ + NearestRoot( + ["pom.xml", "build.gradle", "build.gradle.kts", ".project", ".classpath"], + exclusionsForMonorepos, + )(file), + NearestRoot(gradleMarkers, settingsMarkers)(file), + NearestRoot(settingsMarkers)(file), + ]) + + // If projectRoot is undefined we know we are in a monorepo or no project at all. + // So can safely fall through to the other roots + if (projectRoot) return projectRoot + if (wrapperRoot) return wrapperRoot + if (settingsRoot) return settingsRoot + }, extensions: [".java"], async spawn(root) { const java = which("java") @@ -1138,13 +1169,10 @@ export namespace LSPServer { log.error("Java 21 or newer is required to run the JDTLS. Please install it first.") return } - const javaMajorVersion = await $`java -version` - .quiet() - .nothrow() - .then(({ stderr }) => { - const m = /"(\d+)\.\d+\.\d+"/.exec(stderr.toString()) - return !m ? undefined : parseInt(m[1]) - }) + const javaMajorVersion = await run(["java", "-version"]).then((result) => { + const m = /"(\d+)\.\d+\.\d+"/.exec(result.stderr.toString()) + return !m ? undefined : parseInt(m[1]) + }) if (javaMajorVersion == null || javaMajorVersion < 21) { log.error("JDTLS requires at least Java 21.") return @@ -1161,27 +1189,27 @@ export namespace LSPServer { const archiveName = "release.tar.gz" log.info("Downloading JDTLS archive", { url: releaseURL, dest: distPath }) - const curlResult = await $`curl -L -o ${archiveName} '${releaseURL}'`.cwd(distPath).quiet().nothrow() - if (curlResult.exitCode !== 0) { - log.error("Failed to download JDTLS", { exitCode: curlResult.exitCode, stderr: curlResult.stderr.toString() }) + const download = await fetch(releaseURL) + if (!download.ok || !download.body) { + log.error("Failed to download JDTLS", { status: download.status, statusText: download.statusText }) return } + await Filesystem.writeStream(path.join(distPath, archiveName), download.body) log.info("Extracting JDTLS archive") - const tarResult = await $`tar -xzf ${archiveName}`.cwd(distPath).quiet().nothrow() - if (tarResult.exitCode !== 0) { - log.error("Failed to extract JDTLS", { exitCode: tarResult.exitCode, stderr: tarResult.stderr.toString() }) + const tarResult = await run(["tar", "-xzf", archiveName], { cwd: distPath }) + if (tarResult.code !== 0) { + log.error("Failed to extract JDTLS", { exitCode: tarResult.code, stderr: tarResult.stderr.toString() }) return } await fs.rm(path.join(distPath, archiveName), { force: true }) log.info("JDTLS download and extraction completed") } - const jarFileName = await $`ls org.eclipse.equinox.launcher_*.jar` - .cwd(launcherDir) - .quiet() - .nothrow() - .then(({ stdout }) => stdout.toString().trim()) + const jarFileName = + (await fs.readdir(launcherDir).catch(() => [])) + .find((item) => /^org\.eclipse\.equinox\.launcher_.*\.jar$/.test(item)) + ?.trim() ?? "" const launcherJar = path.join(launcherDir, jarFileName) if (!(await pathExists(launcherJar))) { log.error(`Failed to locate the JDTLS launcher module in the installed directory: ${distPath}.`) @@ -1294,7 +1322,15 @@ export namespace LSPServer { await fs.mkdir(distPath, { recursive: true }) const archivePath = path.join(distPath, "kotlin-ls.zip") - await $`curl -L -o '${archivePath}' '${releaseURL}'`.quiet().nothrow() + const download = await fetch(releaseURL) + if (!download.ok || !download.body) { + log.error("Failed to download Kotlin Language Server", { + status: download.status, + statusText: download.statusText, + }) + return + } + await Filesystem.writeStream(archivePath, download.body) const ok = await Archive.extractZip(archivePath, distPath) .then(() => true) .catch((error) => { @@ -1304,7 +1340,7 @@ export namespace LSPServer { if (!ok) return await fs.rm(archivePath, { force: true }) if (process.platform !== "win32") { - await $`chmod +x ${launcherScript}`.quiet().nothrow() + await fs.chmod(launcherScript, 0o755).catch(() => {}) } log.info("Installed Kotlin Language Server", { path: launcherScript }) } @@ -1468,10 +1504,9 @@ export namespace LSPServer { }) if (!ok) return } else { - const ok = await $`tar -xzf ${tempPath} -C ${installDir}` - .quiet() - .then(() => true) - .catch((error) => { + const ok = await run(["tar", "-xzf", tempPath, "-C", installDir]) + .then((result) => result.code === 0) + .catch((error: unknown) => { log.error("Failed to extract lua-language-server archive", { error }) return false }) @@ -1489,11 +1524,15 @@ export namespace LSPServer { } if (platform !== "win32") { - const ok = await $`chmod +x ${bin}`.quiet().catch((error) => { - log.error("Failed to set executable permission for lua-language-server binary", { - error, + const ok = await fs + .chmod(bin, 0o755) + .then(() => true) + .catch((error: unknown) => { + log.error("Failed to set executable permission for lua-language-server binary", { + error, + }) + return false }) - }) if (!ok) return } @@ -1707,7 +1746,7 @@ export namespace LSPServer { } if (platform !== "win32") { - await $`chmod +x ${bin}`.quiet().nothrow() + await fs.chmod(bin, 0o755).catch(() => {}) } log.info(`installed terraform-ls`, { bin }) @@ -1790,7 +1829,7 @@ export namespace LSPServer { if (!ok) return } if (ext === "tar.gz") { - await $`tar -xzf ${tempPath}`.cwd(Global.Path.bin).quiet().nothrow() + await run(["tar", "-xzf", tempPath], { cwd: Global.Path.bin }) } await fs.rm(tempPath, { force: true }) @@ -1803,7 +1842,7 @@ export namespace LSPServer { } if (platform !== "win32") { - await $`chmod +x ${bin}`.quiet().nothrow() + await fs.chmod(bin, 0o755).catch(() => {}) } log.info("installed texlab", { bin }) @@ -1995,7 +2034,7 @@ export namespace LSPServer { }) if (!ok) return } else { - await $`tar -xzf ${tempPath} --strip-components=1`.cwd(Global.Path.bin).quiet().nothrow() + await run(["tar", "-xzf", tempPath, "--strip-components=1"], { cwd: Global.Path.bin }) } await fs.rm(tempPath, { force: true }) @@ -2008,7 +2047,7 @@ export namespace LSPServer { } if (platform !== "win32") { - await $`chmod +x ${bin}`.quiet().nothrow() + await fs.chmod(bin, 0o755).catch(() => {}) } log.info("installed tinymist", { bin }) diff --git a/packages/opencode/src/mcp/index.ts b/packages/opencode/src/mcp/index.ts index 2a24aa6c73..25b02d2e33 100644 --- a/packages/opencode/src/mcp/index.ts +++ b/packages/opencode/src/mcp/index.ts @@ -405,8 +405,14 @@ export namespace MCP { } catch (error) { lastError = error instanceof Error ? error : new Error(String(error)) - // Handle OAuth-specific errors - if (error instanceof UnauthorizedError) { + // Handle OAuth-specific errors. + // The SDK throws UnauthorizedError when auth() returns 'REDIRECT', + // but may also throw plain Errors when auth() fails internally + // (e.g. during discovery, registration, or state generation). + // When an authProvider is attached, treat both cases as auth-related. + const isAuthError = + error instanceof UnauthorizedError || (authProvider && lastError.message.includes("OAuth")) + if (isAuthError) { log.info("mcp server requires authentication", { key, transport: name }) // Check if this is a "needs registration" error diff --git a/packages/opencode/src/mcp/oauth-provider.ts b/packages/opencode/src/mcp/oauth-provider.ts index 9359c887c3..ddccddcf4e 100644 --- a/packages/opencode/src/mcp/oauth-provider.ts +++ b/packages/opencode/src/mcp/oauth-provider.ts @@ -144,10 +144,19 @@ export class McpOAuthProvider implements OAuthClientProvider { async state(): Promise<string> { const entry = await McpAuth.get(this.mcpName) - if (!entry?.oauthState) { - throw new Error(`No OAuth state saved for MCP server: ${this.mcpName}`) + if (entry?.oauthState) { + return entry.oauthState } - return entry.oauthState + + // Generate a new state if none exists — the SDK calls state() as a + // generator, not just a reader, so we need to produce a value even when + // startAuth() hasn't pre-saved one (e.g. during automatic auth on first + // connect). + const newState = Array.from(crypto.getRandomValues(new Uint8Array(32))) + .map((b) => b.toString(16).padStart(2, "0")) + .join("") + await McpAuth.updateOAuthState(this.mcpName, newState) + return newState } async invalidateCredentials(type: "all" | "client" | "tokens"): Promise<void> { diff --git a/packages/opencode/src/permission/index.ts b/packages/opencode/src/permission/index.ts index f1cd43fdbe..565ccf20d1 100644 --- a/packages/opencode/src/permission/index.ts +++ b/packages/opencode/src/permission/index.ts @@ -1,11 +1,12 @@ import { BusEvent } from "@/bus/bus-event" import { Bus } from "@/bus" +import { SessionID, MessageID } from "@/session/schema" import z from "zod" import { Log } from "../util/log" -import { Identifier } from "../id/id" import { Plugin } from "../plugin" import { Instance } from "../project/instance" import { Wildcard } from "../util/wildcard" +import { PermissionID } from "./schema" export namespace Permission { const log = Log.create({ service: "permission" }) @@ -14,18 +15,22 @@ export namespace Permission { return pattern === undefined ? [type] : Array.isArray(pattern) ? pattern : [pattern] } - function covered(keys: string[], approved: Record<string, boolean>): boolean { - const pats = Object.keys(approved) - return keys.every((k) => pats.some((p) => Wildcard.match(k, p))) + function covered(keys: string[], approved: Map<string, boolean>): boolean { + return keys.every((k) => { + for (const p of approved.keys()) { + if (Wildcard.match(k, p)) return true + } + return false + }) } export const Info = z .object({ - id: z.string(), + id: PermissionID.zod, type: z.string(), pattern: z.union([z.string(), z.array(z.string())]).optional(), - sessionID: z.string(), - messageID: z.string(), + sessionID: SessionID.zod, + messageID: MessageID.zod, callID: z.string().optional(), message: z.string(), metadata: z.record(z.string(), z.any()), @@ -38,44 +43,32 @@ export namespace Permission { }) export type Info = z.infer<typeof Info> + interface PendingEntry { + info: Info + resolve: () => void + reject: (e: any) => void + } + export const Event = { Updated: BusEvent.define("permission.updated", Info), Replied: BusEvent.define( "permission.replied", z.object({ - sessionID: z.string(), - permissionID: z.string(), + sessionID: SessionID.zod, + permissionID: PermissionID.zod, response: z.string(), }), ), } const state = Instance.state( - () => { - const pending: { - [sessionID: string]: { - [permissionID: string]: { - info: Info - resolve: () => void - reject: (e: any) => void - } - } - } = {} - - const approved: { - [sessionID: string]: { - [permissionID: string]: boolean - } - } = {} - - return { - pending, - approved, - } - }, + () => ({ + pending: new Map<SessionID, Map<PermissionID, PendingEntry>>(), + approved: new Map<SessionID, Map<string, boolean>>(), + }), async (state) => { - for (const pending of Object.values(state.pending)) { - for (const item of Object.values(pending)) { + for (const session of state.pending.values()) { + for (const item of session.values()) { item.reject(new RejectedError(item.info.sessionID, item.info.id, item.info.callID, item.info.metadata)) } } @@ -89,8 +82,8 @@ export namespace Permission { export function list() { const { pending } = state() const result: Info[] = [] - for (const items of Object.values(pending)) { - for (const item of Object.values(items)) { + for (const session of pending.values()) { + for (const item of session.values()) { result.push(item.info) } } @@ -113,11 +106,11 @@ export namespace Permission { toolCallID: input.callID, pattern: input.pattern, }) - const approvedForSession = approved[input.sessionID] || {} + const approvedForSession = approved.get(input.sessionID) const keys = toKeys(input.pattern, input.type) - if (covered(keys, approvedForSession)) return + if (approvedForSession && covered(keys, approvedForSession)) return const info: Info = { - id: Identifier.ascending("permission"), + id: PermissionID.ascending(), type: input.type, pattern: input.pattern, sessionID: input.sessionID, @@ -141,13 +134,13 @@ export namespace Permission { return } - pending[input.sessionID] = pending[input.sessionID] || {} + if (!pending.has(input.sessionID)) pending.set(input.sessionID, new Map()) return new Promise<void>((resolve, reject) => { - pending[input.sessionID][info.id] = { + pending.get(input.sessionID)!.set(info.id, { info, resolve, reject, - } + }) Bus.publish(Event.Updated, info) }) } @@ -158,9 +151,11 @@ export namespace Permission { export function respond(input: { sessionID: Info["sessionID"]; permissionID: Info["id"]; response: Response }) { log.info("response", input) const { pending, approved } = state() - const match = pending[input.sessionID]?.[input.permissionID] - if (!match) return - delete pending[input.sessionID][input.permissionID] + const session = pending.get(input.sessionID) + const match = session?.get(input.permissionID) + if (!session || !match) return + session.delete(input.permissionID) + if (session.size === 0) pending.delete(input.sessionID) Bus.publish(Event.Replied, { sessionID: input.sessionID, permissionID: input.permissionID, @@ -172,30 +167,35 @@ export namespace Permission { } match.resolve() if (input.response === "always") { - approved[input.sessionID] = approved[input.sessionID] || {} + if (!approved.has(input.sessionID)) approved.set(input.sessionID, new Map()) + const approvedSession = approved.get(input.sessionID)! const approveKeys = toKeys(match.info.pattern, match.info.type) for (const k of approveKeys) { - approved[input.sessionID][k] = true + approvedSession.set(k, true) } - const items = pending[input.sessionID] + const items = pending.get(input.sessionID) if (!items) return - for (const item of Object.values(items)) { + const toRespond: Info[] = [] + for (const item of items.values()) { const itemKeys = toKeys(item.info.pattern, item.info.type) - if (covered(itemKeys, approved[input.sessionID])) { - respond({ - sessionID: item.info.sessionID, - permissionID: item.info.id, - response: input.response, - }) + if (covered(itemKeys, approvedSession)) { + toRespond.push(item.info) } } + for (const item of toRespond) { + respond({ + sessionID: item.sessionID, + permissionID: item.id, + response: input.response, + }) + } } } export class RejectedError extends Error { constructor( - public readonly sessionID: string, - public readonly permissionID: string, + public readonly sessionID: SessionID, + public readonly permissionID: PermissionID, public readonly toolCallID?: string, public readonly metadata?: Record<string, any>, public readonly reason?: string, diff --git a/packages/opencode/src/permission/next.ts b/packages/opencode/src/permission/next.ts index df6173fb4d..92ac637646 100644 --- a/packages/opencode/src/permission/next.ts +++ b/packages/opencode/src/permission/next.ts @@ -1,13 +1,15 @@ import { Bus } from "@/bus" import { BusEvent } from "@/bus/bus-event" import { Config } from "@/config/config" -import { Identifier } from "@/id/id" +import { SessionID, MessageID } from "@/session/schema" +import { PermissionID } from "./schema" import { Instance } from "@/project/instance" import { Database, eq } from "@/storage/db" import { PermissionTable } from "@/session/session.sql" import { Telemetry } from "@/telemetry" import { fn } from "@/util/fn" import { Log } from "@/util/log" +import { ProjectID } from "@/project/schema" import { Wildcard } from "@/util/wildcard" import os from "os" import z from "zod" @@ -68,15 +70,15 @@ export namespace PermissionNext { export const Request = z .object({ - id: Identifier.schema("permission"), - sessionID: Identifier.schema("session"), + id: PermissionID.zod, + sessionID: SessionID.zod, permission: z.string(), patterns: z.string().array(), metadata: z.record(z.string(), z.any()), always: z.string().array(), tool: z .object({ - messageID: z.string(), + messageID: MessageID.zod, callID: z.string(), }) .optional(), @@ -91,7 +93,7 @@ export namespace PermissionNext { export type Reply = z.infer<typeof Reply> export const Approval = z.object({ - projectID: z.string(), + projectID: ProjectID.zod, patterns: z.string().array(), }) @@ -100,13 +102,19 @@ export namespace PermissionNext { Replied: BusEvent.define( "permission.replied", z.object({ - sessionID: z.string(), - requestID: z.string(), + sessionID: SessionID.zod, + requestID: PermissionID.zod, reply: Reply, }), ), } + interface PendingEntry { + info: Request + resolve: () => void + reject: (e: any) => void + } + const state = Instance.state(() => { const projectID = Instance.project.id const row = Database.use((db) => @@ -114,17 +122,8 @@ export namespace PermissionNext { ) const stored = row?.data ?? ([] as Ruleset) - const pending: Record< - string, - { - info: Request - resolve: () => void - reject: (e: any) => void - } - > = {} - return { - pending, + pending: new Map<PermissionID, PendingEntry>(), approved: stored, } }) @@ -151,17 +150,17 @@ export namespace PermissionNext { throw new DeniedError(ruleset.filter((r) => Wildcard.match(request.permission, r.permission))) } if (rule.action === "ask") { - const id = input.id ?? Identifier.ascending("permission") + const id = input.id ?? PermissionID.ascending() return new Promise<void>((resolve, reject) => { const info: Request = { id, ...request, } - s.pending[id] = { + s.pending.set(id, { info, resolve, reject, - } + }) Bus.publish(Event.Asked, info) }) } @@ -172,15 +171,15 @@ export namespace PermissionNext { export const reply = fn( z.object({ - requestID: Identifier.schema("permission"), + requestID: PermissionID.zod, reply: Reply, message: z.string().optional(), }), async (input) => { const s = await state() - const existing = s.pending[input.requestID] + const existing = s.pending.get(input.requestID) if (!existing) return - delete s.pending[input.requestID] + s.pending.delete(input.requestID) Bus.publish(Event.Replied, { sessionID: existing.info.sessionID, requestID: existing.info.id, @@ -198,9 +197,9 @@ export namespace PermissionNext { existing.reject(input.message ? new CorrectedError(input.message) : new RejectedError()) // Reject all other pending permissions for this session const sessionID = existing.info.sessionID - for (const [id, pending] of Object.entries(s.pending)) { + for (const [id, pending] of s.pending) { if (pending.info.sessionID === sessionID) { - delete s.pending[id] + s.pending.delete(id) Bus.publish(Event.Replied, { sessionID: pending.info.sessionID, requestID: pending.info.id, @@ -227,13 +226,13 @@ export namespace PermissionNext { existing.resolve() const sessionID = existing.info.sessionID - for (const [id, pending] of Object.entries(s.pending)) { + for (const [id, pending] of s.pending) { if (pending.info.sessionID !== sessionID) continue const ok = pending.info.patterns.every( (pattern) => evaluate(pending.info.permission, pattern, s.approved).action === "allow", ) if (!ok) continue - delete s.pending[id] + s.pending.delete(id) Bus.publish(Event.Replied, { sessionID: pending.info.sessionID, requestID: pending.info.id, @@ -299,6 +298,6 @@ export namespace PermissionNext { export async function list() { const s = await state() - return Object.values(s.pending).map((x) => x.info) + return Array.from(s.pending.values(), (x) => x.info) } } diff --git a/packages/opencode/src/permission/schema.ts b/packages/opencode/src/permission/schema.ts new file mode 100644 index 0000000000..c3242b714a --- /dev/null +++ b/packages/opencode/src/permission/schema.ts @@ -0,0 +1,17 @@ +import { Schema } from "effect" +import z from "zod" + +import { Identifier } from "@/id/id" +import { withStatics } from "@/util/schema" + +const permissionIdSchema = Schema.String.pipe(Schema.brand("PermissionID")) + +export type PermissionID = typeof permissionIdSchema.Type + +export const PermissionID = permissionIdSchema.pipe( + withStatics((schema: typeof permissionIdSchema) => ({ + make: (id: string) => schema.makeUnsafe(id), + ascending: (id?: string) => schema.makeUnsafe(Identifier.ascending("permission", id)), + zod: Identifier.schema("permission").pipe(z.custom<PermissionID>()), + })), +) diff --git a/packages/opencode/src/plugin/codex.ts b/packages/opencode/src/plugin/codex.ts index de9911f377..ac49bf22a4 100644 --- a/packages/opencode/src/plugin/codex.ts +++ b/packages/opencode/src/plugin/codex.ts @@ -4,6 +4,7 @@ import { Installation } from "../installation" import { Auth, OAUTH_DUMMY_KEY } from "../auth" import os from "os" import { ProviderTransform } from "@/provider/transform" +import { ModelID, ProviderID } from "@/provider/schema" import { setTimeout as sleep } from "node:timers/promises" const log = Log.create({ service: "plugin.codex" }) @@ -392,8 +393,8 @@ export async function CodexAuthPlugin(input: PluginInput): Promise<Hooks> { if (!provider.models["gpt-5.3-codex"]) { const model = { - id: "gpt-5.3-codex", - providerID: "openai", + id: ModelID.make("gpt-5.3-codex"), + providerID: ProviderID.openai, api: { id: "gpt-5.3-codex", url: "https://chatgpt.com/backend-api/codex", diff --git a/packages/opencode/src/plugin/index.ts b/packages/opencode/src/plugin/index.ts index 0fe6e049b6..72f1fee34d 100644 --- a/packages/opencode/src/plugin/index.ts +++ b/packages/opencode/src/plugin/index.ts @@ -28,8 +28,12 @@ export namespace Plugin { const client = createOpencodeClient({ baseUrl: "http://localhost:4096", directory: Instance.directory, - // @ts-ignore - fetch type incompatibility - fetch: async (...args) => Server.App().fetch(...args), + headers: Flag.OPENCODE_SERVER_PASSWORD + ? { + Authorization: `Basic ${Buffer.from(`${Flag.OPENCODE_SERVER_USERNAME ?? "opencode"}:${Flag.OPENCODE_SERVER_PASSWORD}`).toString("base64")}`, + } + : undefined, + fetch: async (...args) => Server.Default().fetch(...args), }) const config = await Config.get() const hooks: Hooks[] = [] @@ -38,7 +42,9 @@ export namespace Plugin { project: Instance.project, worktree: Instance.worktree, directory: Instance.directory, - serverUrl: Server.url(), + get serverUrl(): URL { + return Server.url ?? new URL("http://localhost:4096") + }, $: Bun.$, } diff --git a/packages/opencode/src/project/instance.ts b/packages/opencode/src/project/instance.ts index 98031f18d3..dac5e71ba1 100644 --- a/packages/opencode/src/project/instance.ts +++ b/packages/opencode/src/project/instance.ts @@ -1,3 +1,4 @@ +import { Effect } from "effect" import { Log } from "@/util/log" import { Context } from "../util/context" import { Project } from "./project" @@ -5,6 +6,7 @@ import { State } from "./state" import { iife } from "@/util/iife" import { GlobalBus } from "@/bus/global" import { Filesystem } from "@/util/filesystem" +import { InstanceState } from "@/util/instance-state" interface Context { directory: string @@ -18,24 +20,61 @@ const disposal = { all: undefined as Promise<void> | undefined, } +function emit(directory: string) { + GlobalBus.emit("event", { + directory, + payload: { + type: "server.instance.disposed", + properties: { + directory, + }, + }, + }) +} + +function boot(input: { directory: string; init?: () => Promise<any>; project?: Project.Info; worktree?: string }) { + return iife(async () => { + const ctx = + input.project && input.worktree + ? { + directory: input.directory, + worktree: input.worktree, + project: input.project, + } + : await Project.fromDirectory(input.directory).then(({ project, sandbox }) => ({ + directory: input.directory, + worktree: sandbox, + project, + })) + await context.provide(ctx, async () => { + await input.init?.() + }) + return ctx + }) +} + +function track(directory: string, next: Promise<Context>) { + const task = next.catch((error) => { + if (cache.get(directory) === task) cache.delete(directory) + throw error + }) + cache.set(directory, task) + return task +} + export const Instance = { async provide<R>(input: { directory: string; init?: () => Promise<any>; fn: () => R }): Promise<R> { - let existing = cache.get(input.directory) + const directory = Filesystem.resolve(input.directory) + let existing = cache.get(directory) if (!existing) { - Log.Default.info("creating instance", { directory: input.directory }) - existing = iife(async () => { - const { project, sandbox } = await Project.fromDirectory(input.directory) - const ctx = { - directory: input.directory, - worktree: sandbox, - project, - } - await context.provide(ctx, async () => { - await input.init?.() - }) - return ctx - }) - cache.set(input.directory, existing) + Log.Default.info("creating instance", { directory }) + existing = track( + directory, + boot({ + directory, + init: input.init, + }), + ) } const ctx = await existing return context.provide(ctx, async () => { @@ -66,19 +105,20 @@ export const Instance = { state<S>(init: () => S, dispose?: (state: Awaited<S>) => Promise<void>): () => S { return State.create(() => Instance.directory, init, dispose) }, + async reload(input: { directory: string; init?: () => Promise<any>; project?: Project.Info; worktree?: string }) { + const directory = Filesystem.resolve(input.directory) + Log.Default.info("reloading instance", { directory }) + await Promise.all([State.dispose(directory), Effect.runPromise(InstanceState.dispose(directory))]) + cache.delete(directory) + const next = track(directory, boot({ ...input, directory })) + emit(directory) + return await next + }, async dispose() { Log.Default.info("disposing instance", { directory: Instance.directory }) - await State.dispose(Instance.directory) + await Promise.all([State.dispose(Instance.directory), Effect.runPromise(InstanceState.dispose(Instance.directory))]) cache.delete(Instance.directory) - GlobalBus.emit("event", { - directory: Instance.directory, - payload: { - type: "server.instance.disposed", - properties: { - directory: Instance.directory, - }, - }, - }) + emit(Instance.directory) }, async disposeAll() { if (disposal.all) return disposal.all diff --git a/packages/opencode/src/project/project.sql.ts b/packages/opencode/src/project/project.sql.ts index 12373244f5..efbc400b5e 100644 --- a/packages/opencode/src/project/project.sql.ts +++ b/packages/opencode/src/project/project.sql.ts @@ -1,8 +1,9 @@ import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core" -import { Timestamps } from "@/storage/schema.sql" +import { Timestamps } from "../storage/schema.sql" +import type { ProjectID } from "./schema" export const ProjectTable = sqliteTable("project", { - id: text().primaryKey(), + id: text().$type<ProjectID>().primaryKey(), worktree: text().notNull(), vcs: text(), name: text(), diff --git a/packages/opencode/src/project/project.ts b/packages/opencode/src/project/project.ts index aee8a26481..b0b1a848cc 100644 --- a/packages/opencode/src/project/project.ts +++ b/packages/opencode/src/project/project.ts @@ -1,12 +1,11 @@ import z from "zod" import { Filesystem } from "../util/filesystem" import path from "path" -import { Database, eq } from "../storage/db" +import { and, Database, eq } from "../storage/db" import { ProjectTable } from "./project.sql" import { SessionTable } from "../session/session.sql" import { Log } from "../util/log" import { Flag } from "@/flag/flag" -import { work } from "../util/queue" import { fn } from "@opencode-ai/util/fn" import { BusEvent } from "@/bus/bus-event" import { iife } from "@/util/iife" @@ -15,6 +14,7 @@ import { existsSync } from "fs" import { git } from "../util/git" import { Glob } from "../util/glob" import { which } from "../util/which" +import { ProjectID } from "./schema" export namespace Project { const log = Log.create({ service: "project" }) @@ -33,7 +33,7 @@ export namespace Project { export const Info = z .object({ - id: z.string(), + id: ProjectID.zod, worktree: z.string(), vcs: z.literal("git").optional(), name: z.string().optional(), @@ -73,7 +73,7 @@ export namespace Project { ? { url: row.icon_url ?? undefined, color: row.icon_color ?? undefined } : undefined return { - id: row.id, + id: ProjectID.make(row.id), worktree: row.worktree, vcs: row.vcs ? Info.shape.vcs.parse(row.vcs) : undefined, name: row.name ?? undefined, @@ -88,6 +88,20 @@ export namespace Project { } } + // altimate_change start — support legacy .git/altimate-code project ID cache + function readCachedId(dir: string) { + return Filesystem.readText(path.join(dir, "opencode")) + .then((x) => x.trim()) + .then(ProjectID.make) + .catch(() => + Filesystem.readText(path.join(dir, "altimate-code")) + .then((x) => x.trim()) + .then(ProjectID.make) + .catch(() => undefined), + ) + } + // altimate_change end + export async function fromDirectory(directory: string) { log.info("fromDirectory", { directory }) @@ -101,23 +115,43 @@ export namespace Project { const gitBinary = which("git") // cached id calculation - let id = await Filesystem.readText(path.join(dotgit, "altimate")) - .then((x) => x.trim()) - .catch(() => - Filesystem.readText(path.join(dotgit, "altimate-code")) - .then((x) => x.trim()) - .catch(() => undefined), - ) + let id = await readCachedId(dotgit) if (!gitBinary) { return { - id: id ?? "global", + id: id ?? ProjectID.global, worktree: sandbox, - sandbox: sandbox, + sandbox, vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS), } } + const worktree = await git(["rev-parse", "--git-common-dir"], { + cwd: sandbox, + }) + .then(async (result) => { + const common = gitpath(sandbox, await result.text()) + // Avoid going to parent of sandbox when git-common-dir is empty. + return common === sandbox ? sandbox : path.dirname(common) + }) + .catch(() => undefined) + + if (!worktree) { + return { + id: id ?? ProjectID.global, + worktree: sandbox, + sandbox, + vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS), + } + } + + // In the case of a git worktree, it can't cache the id + // because `.git` is not a folder, but it always needs the + // same project id as the common dir, so we resolve it now + if (id == null) { + id = await readCachedId(path.join(worktree, ".git")) + } + // generate id from root commit if (!id) { const roots = await git(["rev-list", "--max-parents=0", "--all"], { @@ -134,24 +168,24 @@ export namespace Project { if (!roots) { return { - id: "global", + id: ProjectID.global, worktree: sandbox, - sandbox: sandbox, + sandbox, vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS), } } - id = roots[0] + id = roots[0] ? ProjectID.make(roots[0]) : undefined if (id) { - void Filesystem.write(path.join(dotgit, "altimate"), id).catch(() => undefined) + await Filesystem.write(path.join(dotgit, "opencode"), id).catch(() => undefined) } } if (!id) { return { - id: "global", + id: ProjectID.global, worktree: sandbox, - sandbox: sandbox, + sandbox, vcs: "git", } } @@ -165,33 +199,14 @@ export namespace Project { if (!top) { return { id, - sandbox, worktree: sandbox, + sandbox, vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS), } } sandbox = top - const worktree = await git(["rev-parse", "--git-common-dir"], { - cwd: sandbox, - }) - .then(async (result) => { - const common = gitpath(sandbox, await result.text()) - // Avoid going to parent of sandbox when git-common-dir is empty. - return common === sandbox ? sandbox : path.dirname(common) - }) - .catch(() => undefined) - - if (!worktree) { - return { - id, - sandbox, - worktree: sandbox, - vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS), - } - } - return { id, sandbox, @@ -201,7 +216,7 @@ export namespace Project { } return { - id: "global", + id: ProjectID.global, worktree: "/", sandbox: "/", vcs: Info.shape.vcs.parse(Flag.OPENCODE_FAKE_VCS), @@ -209,23 +224,18 @@ export namespace Project { }) const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, data.id)).get()) - const existing = await iife(async () => { - if (row) return fromRow(row) - const fresh: Info = { - id: data.id, - worktree: data.worktree, - vcs: data.vcs as Info["vcs"], - sandboxes: [], - time: { - created: Date.now(), - updated: Date.now(), - }, - } - if (data.id !== "global") { - await migrateFromGlobal(data.id, data.worktree) - } - return fresh - }) + const existing = row + ? fromRow(row) + : { + id: data.id, + worktree: data.worktree, + vcs: data.vcs as Info["vcs"], + sandboxes: [] as string[], + time: { + created: Date.now(), + updated: Date.now(), + }, + } if (Flag.OPENCODE_EXPERIMENTAL_ICON_DISCOVERY) discover(existing) @@ -268,6 +278,18 @@ export namespace Project { Database.use((db) => db.insert(ProjectTable).values(insert).onConflictDoUpdate({ target: ProjectTable.id, set: updateSet }).run(), ) + // Runs after upsert so the target project row exists (FK constraint). + // Runs on every startup because sessions created before git init + // accumulate under "global" and need migrating whenever they appear. + if (data.id !== ProjectID.global) { + Database.use((db) => + db + .update(SessionTable) + .set({ project_id: data.id }) + .where(and(eq(SessionTable.project_id, ProjectID.global), eq(SessionTable.directory, data.worktree))) + .run(), + ) + } GlobalBus.emit("event", { payload: { type: Event.Updated.type, @@ -301,29 +323,7 @@ export namespace Project { return } - async function migrateFromGlobal(id: string, worktree: string) { - const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, "global")).get()) - if (!row) return - - const sessions = Database.use((db) => - db.select().from(SessionTable).where(eq(SessionTable.project_id, "global")).all(), - ) - if (sessions.length === 0) return - - log.info("migrating sessions from global", { newProjectID: id, worktree, count: sessions.length }) - - await work(10, sessions, async (row) => { - // Skip sessions that belong to a different directory - if (row.directory && row.directory !== worktree) return - - log.info("migrating session", { sessionID: row.id, from: "global", to: id }) - Database.use((db) => db.update(SessionTable).set({ project_id: id }).where(eq(SessionTable.id, row.id)).run()) - }).catch((error) => { - log.error("failed to migrate sessions from global to project", { error, projectId: id }) - }) - } - - export function setInitialized(id: string) { + export function setInitialized(id: ProjectID) { Database.use((db) => db .update(ProjectTable) @@ -345,20 +345,36 @@ export namespace Project { ) } - export function get(id: string): Info | undefined { + export function get(id: ProjectID): Info | undefined { const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get()) if (!row) return undefined return fromRow(row) } + export async function initGit(input: { directory: string; project: Info }) { + if (input.project.vcs === "git") return input.project + if (!which("git")) throw new Error("Git is not installed") + + const result = await git(["init", "--quiet"], { + cwd: input.directory, + }) + if (result.exitCode !== 0) { + const text = result.stderr.toString().trim() || result.text().trim() + throw new Error(text || "Failed to initialize git repository") + } + + return (await fromDirectory(input.directory)).project + } + export const update = fn( z.object({ - projectID: z.string(), + projectID: ProjectID.zod, name: z.string().optional(), icon: Info.shape.icon.optional(), commands: Info.shape.commands.optional(), }), async (input) => { + const id = ProjectID.make(input.projectID) const result = Database.use((db) => db .update(ProjectTable) @@ -369,7 +385,7 @@ export namespace Project { commands: input.commands, time_updated: Date.now(), }) - .where(eq(ProjectTable.id, input.projectID)) + .where(eq(ProjectTable.id, id)) .returning() .get(), ) @@ -385,7 +401,7 @@ export namespace Project { }, ) - export async function sandboxes(id: string) { + export async function sandboxes(id: ProjectID) { const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get()) if (!row) return [] const data = fromRow(row) @@ -397,7 +413,7 @@ export namespace Project { return valid } - export async function addSandbox(id: string, directory: string) { + export async function addSandbox(id: ProjectID, directory: string) { const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get()) if (!row) throw new Error(`Project not found: ${id}`) const sandboxes = [...row.sandboxes] @@ -421,7 +437,7 @@ export namespace Project { return data } - export async function removeSandbox(id: string, directory: string) { + export async function removeSandbox(id: ProjectID, directory: string) { const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get()) if (!row) throw new Error(`Project not found: ${id}`) const sandboxes = row.sandboxes.filter((s) => s !== directory) diff --git a/packages/opencode/src/project/schema.ts b/packages/opencode/src/project/schema.ts new file mode 100644 index 0000000000..e904ff5a84 --- /dev/null +++ b/packages/opencode/src/project/schema.ts @@ -0,0 +1,16 @@ +import { Schema } from "effect" +import z from "zod" + +import { withStatics } from "@/util/schema" + +const projectIdSchema = Schema.String.pipe(Schema.brand("ProjectID")) + +export type ProjectID = typeof projectIdSchema.Type + +export const ProjectID = projectIdSchema.pipe( + withStatics((schema: typeof projectIdSchema) => ({ + global: schema.makeUnsafe("global"), + make: (id: string) => schema.makeUnsafe(id), + zod: z.string().pipe(z.custom<ProjectID>()), + })), +) diff --git a/packages/opencode/src/project/vcs.ts b/packages/opencode/src/project/vcs.ts index e434b5f8c3..34d5905431 100644 --- a/packages/opencode/src/project/vcs.ts +++ b/packages/opencode/src/project/vcs.ts @@ -1,11 +1,11 @@ import { BusEvent } from "@/bus/bus-event" import { Bus } from "@/bus" -import { $ } from "bun" import path from "path" import z from "zod" import { Log } from "@/util/log" import { Instance } from "./instance" import { FileWatcher } from "@/file/watcher" +import { git } from "@/util/git" const log = Log.create({ service: "vcs" }) @@ -29,13 +29,13 @@ export namespace Vcs { export type Info = z.infer<typeof Info> async function currentBranch() { - return $`git rev-parse --abbrev-ref HEAD` - .quiet() - .nothrow() - .cwd(Instance.worktree) - .text() - .then((x) => x.trim()) - .catch(() => undefined) + const result = await git(["rev-parse", "--abbrev-ref", "HEAD"], { + cwd: Instance.worktree, + }) + if (result.exitCode !== 0) return + const text = result.text().trim() + if (!text) return + return text } const state = Instance.state( diff --git a/packages/opencode/src/provider/auth-service.ts b/packages/opencode/src/provider/auth-service.ts new file mode 100644 index 0000000000..4b5ac1777a --- /dev/null +++ b/packages/opencode/src/provider/auth-service.ts @@ -0,0 +1,169 @@ +import { Effect, Layer, Record, ServiceMap, Struct } from "effect" +import { Instance } from "@/project/instance" +import { Plugin } from "../plugin" +import { filter, fromEntries, map, pipe } from "remeda" +import type { AuthOuathResult } from "@opencode-ai/plugin" +import { NamedError } from "@opencode-ai/util/error" +import * as Auth from "@/auth/service" +import { InstanceState } from "@/util/instance-state" +import { ProviderID } from "./schema" +import z from "zod" + +export const Method = z + .object({ + type: z.union([z.literal("oauth"), z.literal("api")]), + label: z.string(), + }) + .meta({ + ref: "ProviderAuthMethod", + }) +export type Method = z.infer<typeof Method> + +export const Authorization = z + .object({ + url: z.string(), + method: z.union([z.literal("auto"), z.literal("code")]), + instructions: z.string(), + }) + .meta({ + ref: "ProviderAuthAuthorization", + }) +export type Authorization = z.infer<typeof Authorization> + +export const OauthMissing = NamedError.create( + "ProviderAuthOauthMissing", + z.object({ + providerID: ProviderID.zod, + }), +) + +export const OauthCodeMissing = NamedError.create( + "ProviderAuthOauthCodeMissing", + z.object({ + providerID: ProviderID.zod, + }), +) + +export const OauthCallbackFailed = NamedError.create("ProviderAuthOauthCallbackFailed", z.object({})) + +export type ProviderAuthError = + | Auth.AuthServiceError + | InstanceType<typeof OauthMissing> + | InstanceType<typeof OauthCodeMissing> + | InstanceType<typeof OauthCallbackFailed> + +export namespace ProviderAuthService { + export interface Service { + /** Get available auth methods for each provider (e.g. OAuth, API key). */ + readonly methods: () => Effect.Effect<Record<string, Method[]>> + + /** Start an OAuth authorization flow for a provider. Returns the URL to redirect to. */ + readonly authorize: (input: { providerID: ProviderID; method: number }) => Effect.Effect<Authorization | undefined> + + /** Complete an OAuth flow after the user has authorized. Exchanges the code/callback for credentials. */ + readonly callback: (input: { + providerID: ProviderID + method: number + code?: string + }) => Effect.Effect<void, ProviderAuthError> + + /** Set an API key directly for a provider (no OAuth flow). */ + readonly api: (input: { providerID: ProviderID; key: string }) => Effect.Effect<void, Auth.AuthServiceError> + } +} + +export class ProviderAuthService extends ServiceMap.Service<ProviderAuthService, ProviderAuthService.Service>()( + "@opencode/ProviderAuth", +) { + static readonly layer = Layer.effect( + ProviderAuthService, + Effect.gen(function* () { + const auth = yield* Auth.AuthService + const state = yield* InstanceState.make({ + lookup: () => + Effect.promise(async () => { + const methods = pipe( + await Plugin.list(), + filter((x) => x.auth?.provider !== undefined), + map((x) => [x.auth!.provider, x.auth!] as const), + fromEntries(), + ) + return { methods, pending: new Map<ProviderID, AuthOuathResult>() } + }), + }) + + const methods = Effect.fn("ProviderAuthService.methods")(function* () { + const x = yield* InstanceState.get(state) + return Record.map(x.methods, (y) => y.methods.map((z): Method => Struct.pick(z, ["type", "label"]))) + }) + + const authorize = Effect.fn("ProviderAuthService.authorize")(function* (input: { + providerID: ProviderID + method: number + }) { + const s = yield* InstanceState.get(state) + const method = s.methods[input.providerID].methods[input.method] + if (method.type !== "oauth") return + const result = yield* Effect.promise(() => method.authorize()) + s.pending.set(input.providerID, result) + return { + url: result.url, + method: result.method, + instructions: result.instructions, + } + }) + + const callback = Effect.fn("ProviderAuthService.callback")(function* (input: { + providerID: ProviderID + method: number + code?: string + }) { + const s = yield* InstanceState.get(state) + const match = s.pending.get(input.providerID) + if (!match) return yield* Effect.fail(new OauthMissing({ providerID: input.providerID })) + + if (match.method === "code" && !input.code) + return yield* Effect.fail(new OauthCodeMissing({ providerID: input.providerID })) + + const result = yield* Effect.promise(() => + match.method === "code" ? match.callback(input.code!) : match.callback(), + ) + + if (!result || result.type !== "success") return yield* Effect.fail(new OauthCallbackFailed({})) + + if ("key" in result) { + yield* auth.set(input.providerID, { + type: "api", + key: result.key, + }) + } + + if ("refresh" in result) { + yield* auth.set(input.providerID, { + type: "oauth", + access: result.access, + refresh: result.refresh, + expires: result.expires, + ...(result.accountId ? { accountId: result.accountId } : {}), + }) + } + }) + + const api = Effect.fn("ProviderAuthService.api")(function* (input: { providerID: ProviderID; key: string }) { + yield* auth.set(input.providerID, { + type: "api", + key: input.key, + }) + }) + + return ProviderAuthService.of({ + methods, + authorize, + callback, + api, + }) + }), + ) + + static readonly defaultLayer = ProviderAuthService.layer.pipe(Layer.provide(Auth.AuthService.defaultLayer)) +} diff --git a/packages/opencode/src/provider/auth.ts b/packages/opencode/src/provider/auth.ts index e6681ff089..095c6e57ed 100644 --- a/packages/opencode/src/provider/auth.ts +++ b/packages/opencode/src/provider/auth.ts @@ -1,147 +1,56 @@ -import { Instance } from "@/project/instance" -import { Plugin } from "../plugin" -import { map, filter, pipe, fromEntries, mapValues } from "remeda" +import { Effect, ManagedRuntime } from "effect" import z from "zod" + import { fn } from "@/util/fn" -import type { AuthOuathResult, Hooks } from "@opencode-ai/plugin" -import { NamedError } from "@opencode-ai/util/error" -import { Auth } from "@/auth" +import * as S from "./auth-service" +import { ProviderID } from "./schema" -export namespace ProviderAuth { - const state = Instance.state(async () => { - const methods = pipe( - await Plugin.list(), - filter((x) => x.auth?.provider !== undefined), - map((x) => [x.auth!.provider, x.auth!] as const), - fromEntries(), - ) - return { methods, pending: {} as Record<string, AuthOuathResult> } - }) +// Separate runtime: ProviderAuthService can't join the shared runtime because +// runtime.ts → auth-service.ts → provider/auth.ts creates a circular import. +// AuthService is stateless file I/O so the duplicate instance is harmless. +const rt = ManagedRuntime.make(S.ProviderAuthService.defaultLayer) + +function runPromise<A>(f: (service: S.ProviderAuthService.Service) => Effect.Effect<A, S.ProviderAuthError>) { + return rt.runPromise(S.ProviderAuthService.use(f)) +} - export const Method = z - .object({ - type: z.union([z.literal("oauth"), z.literal("api")]), - label: z.string(), - }) - .meta({ - ref: "ProviderAuthMethod", - }) - export type Method = z.infer<typeof Method> +export namespace ProviderAuth { + export const Method = S.Method + export type Method = S.Method export async function methods() { - const s = await state().then((x) => x.methods) - return mapValues(s, (x) => - x.methods.map( - (y): Method => ({ - type: y.type, - label: y.label, - }), - ), - ) + return runPromise((service) => service.methods()) } - export const Authorization = z - .object({ - url: z.string(), - method: z.union([z.literal("auto"), z.literal("code")]), - instructions: z.string(), - }) - .meta({ - ref: "ProviderAuthAuthorization", - }) - export type Authorization = z.infer<typeof Authorization> + export const Authorization = S.Authorization + export type Authorization = S.Authorization export const authorize = fn( z.object({ - providerID: z.string(), + providerID: ProviderID.zod, method: z.number(), }), - async (input): Promise<Authorization | undefined> => { - const auth = await state().then((s) => s.methods[input.providerID]) - const method = auth.methods[input.method] - if (method.type === "oauth") { - const result = await method.authorize() - await state().then((s) => (s.pending[input.providerID] = result)) - return { - url: result.url, - method: result.method, - instructions: result.instructions, - } - } - }, + async (input): Promise<Authorization | undefined> => runPromise((service) => service.authorize(input)), ) export const callback = fn( z.object({ - providerID: z.string(), + providerID: ProviderID.zod, method: z.number(), code: z.string().optional(), }), - async (input) => { - const match = await state().then((s) => s.pending[input.providerID]) - if (!match) throw new OauthMissing({ providerID: input.providerID }) - let result - - if (match.method === "code") { - if (!input.code) throw new OauthCodeMissing({ providerID: input.providerID }) - result = await match.callback(input.code) - } - - if (match.method === "auto") { - result = await match.callback() - } - - if (result?.type === "success") { - if ("key" in result) { - await Auth.set(input.providerID, { - type: "api", - key: result.key, - }) - } - if ("refresh" in result) { - const info: Auth.Info = { - type: "oauth", - access: result.access, - refresh: result.refresh, - expires: result.expires, - } - if (result.accountId) { - info.accountId = result.accountId - } - await Auth.set(input.providerID, info) - } - return - } - - throw new OauthCallbackFailed({}) - }, + async (input) => runPromise((service) => service.callback(input)), ) export const api = fn( z.object({ - providerID: z.string(), + providerID: ProviderID.zod, key: z.string(), }), - async (input) => { - await Auth.set(input.providerID, { - type: "api", - key: input.key, - }) - }, - ) - - export const OauthMissing = NamedError.create( - "ProviderAuthOauthMissing", - z.object({ - providerID: z.string(), - }), - ) - export const OauthCodeMissing = NamedError.create( - "ProviderAuthOauthCodeMissing", - z.object({ - providerID: z.string(), - }), + async (input) => runPromise((service) => service.api(input)), ) - export const OauthCallbackFailed = NamedError.create("ProviderAuthOauthCallbackFailed", z.object({})) + export import OauthMissing = S.OauthMissing + export import OauthCodeMissing = S.OauthCodeMissing + export import OauthCallbackFailed = S.OauthCallbackFailed } diff --git a/packages/opencode/src/provider/error.ts b/packages/opencode/src/provider/error.ts index 82fcb18368..6d8599a401 100644 --- a/packages/opencode/src/provider/error.ts +++ b/packages/opencode/src/provider/error.ts @@ -1,6 +1,7 @@ import { APICallError } from "ai" import { STATUS_CODES } from "http" import { iife } from "@/util/iife" +import type { ProviderID } from "./schema" export namespace ProviderError { // Adapted from overflow detection patterns in: @@ -42,15 +43,7 @@ export namespace ProviderError { return /^4(00|13)\s*(status code)?\s*\(no body\)/i.test(message) } - function error(providerID: string, error: APICallError) { - if (providerID.includes("github-copilot") && error.statusCode === 403) { - return "Please reauthenticate with the copilot provider to ensure your credentials work properly with OpenCode." - } - - return error.message - } - - function message(providerID: string, e: APICallError) { + function message(providerID: ProviderID, e: APICallError) { return iife(() => { const msg = e.message if (msg === "") { @@ -62,10 +55,6 @@ export namespace ProviderError { return "Unknown error" } - const transformed = error(providerID, e) - if (transformed !== msg) { - return transformed - } if (!e.responseBody || (e.statusCode && msg !== STATUS_CODES[e.statusCode])) { return msg } @@ -178,7 +167,7 @@ export namespace ProviderError { metadata?: Record<string, string> } - export function parseAPICallError(input: { providerID: string; error: APICallError }): ParsedAPICallError { + export function parseAPICallError(input: { providerID: ProviderID; error: APICallError }): ParsedAPICallError { const m = message(input.providerID, input.error) if (isOverflow(m) || input.error.statusCode === 413) { return { diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index a839e4ad6c..eaca075bfc 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -45,43 +45,64 @@ import { fromNodeProviderChain } from "@aws-sdk/credential-providers" import { GoogleAuth } from "google-auth-library" import { ProviderTransform } from "./transform" import { Installation } from "../installation" +import { ModelID, ProviderID } from "./schema" + +const DEFAULT_CHUNK_TIMEOUT = 120_000 export namespace Provider { const log = Log.create({ service: "provider" }) - function isGpt5OrLater(modelID: string): boolean { + function shouldUseCopilotResponsesApi(modelID: string): boolean { const match = /^gpt-(\d+)/.exec(modelID) - if (!match) { - return false - } - return Number(match[1]) >= 5 + if (!match) return false + return Number(match[1]) >= 5 && !modelID.startsWith("gpt-5-mini") } - function shouldUseCopilotResponsesApi(modelID: string): boolean { - return isGpt5OrLater(modelID) && !modelID.startsWith("gpt-5-mini") - } + function wrapSSE(res: Response, ms: number, ctl: AbortController) { + if (typeof ms !== "number" || ms <= 0) return res + if (!res.body) return res + if (!res.headers.get("content-type")?.includes("text/event-stream")) return res + + const reader = res.body.getReader() + const body = new ReadableStream<Uint8Array>({ + async pull(ctrl) { + const part = await new Promise<Awaited<ReturnType<typeof reader.read>>>((resolve, reject) => { + const id = setTimeout(() => { + const err = new Error("SSE read timed out") + ctl.abort(err) + void reader.cancel(err) + reject(err) + }, ms) + + reader.read().then( + (part) => { + clearTimeout(id) + resolve(part) + }, + (err) => { + clearTimeout(id) + reject(err) + }, + ) + }) - function googleVertexVars(options: Record<string, any>) { - const project = - options["project"] ?? Env.get("GOOGLE_CLOUD_PROJECT") ?? Env.get("GCP_PROJECT") ?? Env.get("GCLOUD_PROJECT") - const location = - options["location"] ?? Env.get("GOOGLE_CLOUD_LOCATION") ?? Env.get("VERTEX_LOCATION") ?? "us-central1" - const endpoint = location === "global" ? "aiplatform.googleapis.com" : `${location}-aiplatform.googleapis.com` + if (part.done) { + ctrl.close() + return + } - return { - GOOGLE_VERTEX_PROJECT: project, - GOOGLE_VERTEX_LOCATION: location, - GOOGLE_VERTEX_ENDPOINT: endpoint, - } - } + ctrl.enqueue(part.value) + }, + async cancel(reason) { + ctl.abort(reason) + await reader.cancel(reason) + }, + }) - function loadBaseURL(model: Model, options: Record<string, any>) { - const raw = options["baseURL"] ?? model.api.url - if (typeof raw !== "string") return raw - const vars = model.providerID === "google-vertex" ? googleVertexVars(options) : undefined - return raw.replace(/\$\{([^}]+)\}/g, (match, key) => { - const val = Env.get(String(key)) ?? vars?.[String(key) as keyof typeof vars] - return val ?? match + return new Response(body, { + headers: new Headers(res.headers), + status: res.status, + statusText: res.statusText, }) } @@ -111,12 +132,18 @@ export namespace Provider { } type CustomModelLoader = (sdk: any, modelID: string, options?: Record<string, any>) => Promise<any> + type CustomVarsLoader = (options: Record<string, any>) => Record<string, string> type CustomLoader = (provider: Info) => Promise<{ autoload: boolean getModel?: CustomModelLoader + vars?: CustomVarsLoader options?: Record<string, any> }> + function useLanguageModel(sdk: any) { + return sdk.responses === undefined && sdk.chat === undefined + } + const CUSTOM_LOADERS: Record<string, CustomLoader> = { async anthropic() { return { @@ -129,13 +156,13 @@ export namespace Provider { }, } }, - async "altimate-code"(input) { + async opencode(input) { const hasKey = await (async () => { const env = Env.all() if (input.env.some((item) => env[item])) return true if (await Auth.get(input.id)) return true const config = await Config.get() - if (config.provider?.["altimate-code"]?.options?.apiKey) return true + if (config.provider?.["opencode"]?.options?.apiKey) return true return false })() @@ -164,7 +191,7 @@ export namespace Provider { return { autoload: false, async getModel(sdk: any, modelID: string, _options?: Record<string, any>) { - if (sdk.responses === undefined && sdk.chat === undefined) return sdk.languageModel(modelID) + if (useLanguageModel(sdk)) return sdk.languageModel(modelID) return shouldUseCopilotResponsesApi(modelID) ? sdk.responses(modelID) : sdk.chat(modelID) }, options: {}, @@ -174,16 +201,23 @@ export namespace Provider { return { autoload: false, async getModel(sdk: any, modelID: string, _options?: Record<string, any>) { - if (sdk.responses === undefined && sdk.chat === undefined) return sdk.languageModel(modelID) + if (useLanguageModel(sdk)) return sdk.languageModel(modelID) return shouldUseCopilotResponsesApi(modelID) ? sdk.responses(modelID) : sdk.chat(modelID) }, options: {}, } }, - azure: async () => { + azure: async (provider) => { + const resource = iife(() => { + const name = provider.options?.resourceName + if (typeof name === "string" && name.trim() !== "") return name + return Env.get("AZURE_RESOURCE_NAME") + }) + return { autoload: false, async getModel(sdk: any, modelID: string, options?: Record<string, any>) { + if (useLanguageModel(sdk)) return sdk.languageModel(modelID) if (options?.["useCompletionUrls"]) { return sdk.chat(modelID) } else { @@ -191,6 +225,11 @@ export namespace Provider { } }, options: {}, + vars(_options) { + return { + ...(resource && { AZURE_RESOURCE_NAME: resource }), + } + }, } }, "azure-cognitive-services": async () => { @@ -198,6 +237,7 @@ export namespace Provider { return { autoload: false, async getModel(sdk: any, modelID: string, options?: Record<string, any>) { + if (useLanguageModel(sdk)) return sdk.languageModel(modelID) if (options?.["useCompletionUrls"]) { return sdk.chat(modelID) } else { @@ -279,7 +319,7 @@ export namespace Provider { } // Region resolution precedence (highest to lowest): - // 1. options.region from altimate-code.json provider config + // 1. options.region from opencode.json provider config // 2. defaultRegion from AWS_REGION environment variable // 3. Default "us-east-1" (baked into defaultRegion) const region = options?.region ?? defaultRegion @@ -362,8 +402,8 @@ export namespace Provider { autoload: false, options: { headers: { - "HTTP-Referer": "https://altimate-code.dev/", - "X-Title": "altimate-code", + "HTTP-Referer": "https://altimate.ai/", + "X-Title": "opencode", }, }, } @@ -373,8 +413,8 @@ export namespace Provider { autoload: false, options: { headers: { - "http-referer": "https://altimate-code.dev/", - "x-title": "altimate-code", + "http-referer": "https://altimate.ai/", + "x-title": "opencode", }, }, } @@ -386,13 +426,26 @@ export namespace Provider { Env.get("GCP_PROJECT") ?? Env.get("GCLOUD_PROJECT") - const location = - provider.options?.location ?? Env.get("GOOGLE_CLOUD_LOCATION") ?? Env.get("VERTEX_LOCATION") ?? "us-central1" + const location = String( + provider.options?.location ?? + Env.get("GOOGLE_VERTEX_LOCATION") ?? + Env.get("GOOGLE_CLOUD_LOCATION") ?? + Env.get("VERTEX_LOCATION") ?? + "us-central1", + ) const autoload = Boolean(project) if (!autoload) return { autoload: false } return { autoload: true, + vars(_options: Record<string, any>) { + const endpoint = location === "global" ? "aiplatform.googleapis.com" : `${location}-aiplatform.googleapis.com` + return { + ...(project && { GOOGLE_VERTEX_PROJECT: project }), + GOOGLE_VERTEX_LOCATION: location, + GOOGLE_VERTEX_ENDPOINT: endpoint, + } + }, options: { project, location, @@ -459,8 +512,8 @@ export namespace Provider { autoload: false, options: { headers: { - "HTTP-Referer": "https://altimate-code.dev/", - "X-Title": "altimate-code", + "HTTP-Referer": "https://altimate.ai/", + "X-Title": "opencode", }, }, } @@ -479,7 +532,8 @@ export namespace Provider { const providerConfig = config.provider?.["gitlab"] const aiGatewayHeaders = { - "User-Agent": `altimate-code/${Installation.VERSION} gitlab-ai-provider/${GITLAB_PROVIDER_VERSION} (${os.platform()} ${os.release()}; ${os.arch()})`, + "User-Agent": `opencode/${Installation.VERSION} gitlab-ai-provider/${GITLAB_PROVIDER_VERSION} (${os.platform()} ${os.release()}; ${os.arch()})`, + "anthropic-beta": "context-1m-2025-08-07", ...(providerConfig?.options?.aiGatewayHeaders || {}), } @@ -523,11 +577,15 @@ export namespace Provider { autoload: !!apiKey, options: { apiKey, - baseURL: `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/v1`, }, async getModel(sdk: any, modelID: string) { return sdk.languageModel(modelID) }, + vars(_options) { + return { + CLOUDFLARE_ACCOUNT_ID: accountId, + } + }, } }, "cloudflare-ai-gateway": async (input) => { @@ -548,7 +606,7 @@ export namespace Provider { if (!apiToken) { throw new Error( "CLOUDFLARE_API_TOKEN (or CF_AIG_TOKEN) is required for Cloudflare AI Gateway. " + - "Set it via environment variable or run `altimate auth cloudflare-ai-gateway`.", + "Set it via environment variable or run `opencode auth cloudflare-ai-gateway`.", ) } @@ -556,7 +614,28 @@ export namespace Provider { const { createAiGateway } = await import("ai-gateway-provider") const { createUnified } = await import("ai-gateway-provider/providers/unified") - const aigateway = createAiGateway({ accountId, gateway, apiKey: apiToken }) + const metadata = iife(() => { + if (input.options?.metadata) return input.options.metadata + try { + return JSON.parse(input.options?.headers?.["cf-aig-metadata"]) + } catch { + return undefined + } + }) + const opts = { + metadata, + cacheTtl: input.options?.cacheTtl, + cacheKey: input.options?.cacheKey, + skipCache: input.options?.skipCache, + collectLog: input.options?.collectLog, + } + + const aigateway = createAiGateway({ + accountId, + gateway, + apiKey: apiToken, + ...(Object.values(opts).some((v) => v !== undefined) ? { options: opts } : {}), + }) const unified = createUnified() return { @@ -573,7 +652,7 @@ export namespace Provider { autoload: false, options: { headers: { - "X-Cerebras-3rd-Party-Integration": "altimate-code", + "X-Cerebras-3rd-Party-Integration": "opencode", }, }, } @@ -583,8 +662,8 @@ export namespace Provider { autoload: false, options: { headers: { - "HTTP-Referer": "https://altimate-code.dev/", - "X-Title": "altimate-code", + "HTTP-Referer": "https://altimate.ai/", + "X-Title": "opencode", }, }, } @@ -593,8 +672,8 @@ export namespace Provider { export const Model = z .object({ - id: z.string(), - providerID: z.string(), + id: ModelID.zod, + providerID: ProviderID.zod, api: z.object({ id: z.string(), url: z.string(), @@ -664,7 +743,7 @@ export namespace Provider { export const Info = z .object({ - id: z.string(), + id: ProviderID.zod, name: z.string(), source: z.enum(["env", "config", "custom", "api"]), env: z.string().array(), @@ -679,8 +758,8 @@ export namespace Provider { function fromModelsDevModel(provider: ModelsDev.Provider, model: ModelsDev.Model): Model { const m: Model = { - id: model.id, - providerID: provider.id, + id: ModelID.make(model.id), + providerID: ProviderID.make(provider.id), name: model.name, family: model.family, api: { @@ -746,7 +825,7 @@ export namespace Provider { export function fromModelsDevProvider(provider: ModelsDev.Provider): Info { return { - id: provider.id, + id: ProviderID.make(provider.id), source: "custom", name: provider.name, env: provider.env ?? [], @@ -764,7 +843,7 @@ export namespace Provider { const disabled = new Set(config.disabled_providers ?? []) const enabled = config.enabled_providers ? new Set(config.enabled_providers) : null - function isProviderAllowed(providerID: string): boolean { + function isProviderAllowed(providerID: ProviderID): boolean { if (enabled && !enabled.has(providerID)) return false if (disabled.has(providerID)) return false return true @@ -775,6 +854,9 @@ export namespace Provider { const modelLoaders: { [providerID: string]: CustomModelLoader } = {} + const varsLoaders: { + [providerID: string]: CustomVarsLoader + } = {} const sdk = new Map<string, SDK>() log.info("init") @@ -786,16 +868,16 @@ export namespace Provider { const githubCopilot = database["github-copilot"] database["github-copilot-enterprise"] = { ...githubCopilot, - id: "github-copilot-enterprise", + id: ProviderID.githubCopilotEnterprise, name: "GitHub Copilot Enterprise", models: mapValues(githubCopilot.models, (model) => ({ ...model, - providerID: "github-copilot-enterprise", + providerID: ProviderID.githubCopilotEnterprise, })), } } - function mergeProvider(providerID: string, provider: Partial<Info>) { + function mergeProvider(providerID: ProviderID, provider: Partial<Info>) { const existing = providers[providerID] if (existing) { // @ts-expect-error @@ -812,7 +894,7 @@ export namespace Provider { for (const [providerID, provider] of configProviders) { const existing = database[providerID] const parsed: Info = { - id: providerID, + id: ProviderID.make(providerID), name: provider.name ?? existing?.name ?? providerID, env: provider.env ?? existing?.env ?? [], options: mergeDeep(existing?.options ?? {}, provider.options ?? {}), @@ -828,7 +910,7 @@ export namespace Provider { return existingModel?.name ?? modelID }) const parsedModel: Model = { - id: modelID, + id: ModelID.make(modelID), api: { id: model.id ?? existingModel?.api.id ?? modelID, npm: @@ -841,7 +923,7 @@ export namespace Provider { }, status: model.status ?? existingModel?.status ?? "active", name, - providerID, + providerID: ProviderID.make(providerID), capabilities: { temperature: model.temperature ?? existingModel?.capabilities.temperature ?? false, reasoning: model.reasoning ?? existingModel?.capabilities.reasoning ?? false, @@ -893,7 +975,8 @@ export namespace Provider { // load env const env = Env.all() - for (const [providerID, provider] of Object.entries(database)) { + for (const [id, provider] of Object.entries(database)) { + const providerID = ProviderID.make(id) if (disabled.has(providerID)) continue const apiKey = provider.env.map((item) => env[item]).find(Boolean) if (!apiKey) continue @@ -904,7 +987,8 @@ export namespace Provider { } // load apikeys - for (const [providerID, provider] of Object.entries(await Auth.all())) { + for (const [id, provider] of Object.entries(await Auth.all())) { + const providerID = ProviderID.make(id) if (disabled.has(providerID)) continue if (provider.type === "api") { mergeProvider(providerID, { @@ -916,7 +1000,7 @@ export namespace Provider { for (const plugin of await Plugin.list()) { if (!plugin.auth) continue - const providerID = plugin.auth.provider + const providerID = ProviderID.make(plugin.auth.provider) if (disabled.has(providerID)) continue // For github-copilot plugin, check if auth exists for either github-copilot or github-copilot-enterprise @@ -925,7 +1009,7 @@ export namespace Provider { if (auth) hasAuth = true // Special handling for github-copilot: also check for enterprise auth - if (providerID === "github-copilot" && !hasAuth) { + if (providerID === ProviderID.githubCopilot && !hasAuth) { const enterpriseAuth = await Auth.get("github-copilot-enterprise") if (enterpriseAuth) hasAuth = true } @@ -942,8 +1026,8 @@ export namespace Provider { } // If this is github-copilot plugin, also register for github-copilot-enterprise if auth exists - if (providerID === "github-copilot") { - const enterpriseProviderID = "github-copilot-enterprise" + if (providerID === ProviderID.githubCopilot) { + const enterpriseProviderID = ProviderID.githubCopilotEnterprise if (!disabled.has(enterpriseProviderID)) { const enterpriseAuth = await Auth.get(enterpriseProviderID) if (enterpriseAuth) { @@ -961,7 +1045,8 @@ export namespace Provider { } } - for (const [providerID, fn] of Object.entries(CUSTOM_LOADERS)) { + for (const [id, fn] of Object.entries(CUSTOM_LOADERS)) { + const providerID = ProviderID.make(id) if (disabled.has(providerID)) continue const data = database[providerID] if (!data) { @@ -971,6 +1056,7 @@ export namespace Provider { const result = await fn(data) if (result && (result.autoload || providers[providerID])) { if (result.getModel) modelLoaders[providerID] = result.getModel + if (result.vars) varsLoaders[providerID] = result.vars const opts = result.options ?? {} const patch: Partial<Info> = providers[providerID] ? { options: opts } : { source: "custom", options: opts } mergeProvider(providerID, patch) @@ -978,7 +1064,8 @@ export namespace Provider { } // load config - for (const [providerID, provider] of configProviders) { + for (const [id, provider] of configProviders) { + const providerID = ProviderID.make(id) const partial: Partial<Info> = { source: "config" } if (provider.env) partial.env = provider.env if (provider.name) partial.name = provider.name @@ -986,7 +1073,8 @@ export namespace Provider { mergeProvider(providerID, partial) } - for (const [providerID, provider] of Object.entries(providers)) { + for (const [id, provider] of Object.entries(providers)) { + const providerID = ProviderID.make(id) if (!isProviderAllowed(providerID)) { delete providers[providerID] continue @@ -996,7 +1084,10 @@ export namespace Provider { for (const [modelID, model] of Object.entries(provider.models)) { model.api.id = model.api.id ?? model.id ?? modelID - if (modelID === "gpt-5-chat-latest" || (providerID === "openrouter" && modelID === "openai/gpt-5-chat")) + if ( + modelID === "gpt-5-chat-latest" || + (providerID === ProviderID.openrouter && modelID === "openai/gpt-5-chat") + ) delete provider.models[modelID] if (model.status === "alpha" && !Flag.OPENCODE_ENABLE_EXPERIMENTAL_MODELS) delete provider.models[modelID] if (model.status === "deprecated") delete provider.models[modelID] @@ -1032,6 +1123,7 @@ export namespace Provider { providers, sdk, modelLoaders, + varsLoaders, } }) @@ -1056,7 +1148,30 @@ export namespace Provider { options["includeUsage"] = true } - const baseURL = loadBaseURL(model, options) + const baseURL = iife(() => { + let url = + typeof options["baseURL"] === "string" && options["baseURL"] !== "" ? options["baseURL"] : model.api.url + if (!url) return + + // some models/providers have variable urls, ex: "https://${AZURE_RESOURCE_NAME}.services.ai.azure.com/anthropic/v1" + // We track this in models.dev, and then when we are resolving the baseURL + // we need to string replace that literal: "${AZURE_RESOURCE_NAME}" + const loader = s.varsLoaders[model.providerID] + if (loader) { + const vars = loader(options) + for (const [key, value] of Object.entries(vars)) { + const field = "${" + key + "}" + url = url.replaceAll(field, value) + } + } + + url = url.replace(/\$\{([^}]+)\}/g, (item, key) => { + const val = Env.get(String(key)) + return val ?? item + }) + return url + }) + if (baseURL !== undefined) options["baseURL"] = baseURL if (options["apiKey"] === undefined && provider.key) options["apiKey"] = provider.key if (model.headers) @@ -1070,21 +1185,23 @@ export namespace Provider { if (existing) return existing const customFetch = options["fetch"] + const chunkTimeout = options["chunkTimeout"] || DEFAULT_CHUNK_TIMEOUT + delete options["chunkTimeout"] options["fetch"] = async (input: any, init?: BunFetchRequestInit) => { // Preserve custom fetch if it exists, wrap it with timeout logic const fetchFn = customFetch ?? fetch const opts = init ?? {} + const chunkAbortCtl = typeof chunkTimeout === "number" && chunkTimeout > 0 ? new AbortController() : undefined + const signals: AbortSignal[] = [] - if (options["timeout"] !== undefined && options["timeout"] !== null) { - const signals: AbortSignal[] = [] - if (opts.signal) signals.push(opts.signal) - if (options["timeout"] !== false) signals.push(AbortSignal.timeout(options["timeout"])) - - const combined = signals.length > 1 ? AbortSignal.any(signals) : signals[0] + if (opts.signal) signals.push(opts.signal) + if (chunkAbortCtl) signals.push(chunkAbortCtl.signal) + if (options["timeout"] !== undefined && options["timeout"] !== null && options["timeout"] !== false) + signals.push(AbortSignal.timeout(options["timeout"])) - opts.signal = combined - } + const combined = signals.length === 0 ? null : signals.length === 1 ? signals[0] : AbortSignal.any(signals) + if (combined) opts.signal = combined // Strip openai itemId metadata following what codex does // Codex uses #[serde(skip_serializing)] on id fields for all item types: @@ -1104,11 +1221,14 @@ export namespace Provider { } } - return fetchFn(input, { + const res = await fetchFn(input, { ...opts, // @ts-ignore see here: https://github.com/oven-sh/bun/issues/16682 timeout: false, }) + + if (!chunkAbortCtl) return res + return wrapSSE(res, chunkTimeout, chunkAbortCtl) } const bundledFn = BUNDLED_PROVIDERS[model.api.npm] @@ -1144,11 +1264,11 @@ export namespace Provider { } } - export async function getProvider(providerID: string) { + export async function getProvider(providerID: ProviderID) { return state().then((s) => s.providers[providerID]) } - export async function getModel(providerID: string, modelID: string) { + export async function getModel(providerID: ProviderID, modelID: ModelID) { const s = await state() const provider = s.providers[providerID] if (!provider) { @@ -1195,7 +1315,7 @@ export namespace Provider { } } - export async function closest(providerID: string, query: string[]) { + export async function closest(providerID: ProviderID, query: string[]) { const s = await state() const provider = s.providers[providerID] if (!provider) return undefined @@ -1210,7 +1330,7 @@ export namespace Provider { } } - export async function getSmallModel(providerID: string) { + export async function getSmallModel(providerID: ProviderID) { const cfg = await Config.get() if (cfg.small_model) { @@ -1229,7 +1349,7 @@ export namespace Provider { "gemini-2.5-flash", "gpt-5-nano", ] - if (providerID.startsWith("altimate-code")) { + if (providerID.startsWith("opencode")) { priority = ["gpt-5-nano"] } if (providerID.startsWith("github-copilot")) { @@ -1237,7 +1357,7 @@ export namespace Provider { priority = ["gpt-5-mini", "claude-haiku-4.5", ...priority] } for (const item of priority) { - if (providerID === "amazon-bedrock") { + if (providerID === ProviderID.amazonBedrock) { const crossRegionPrefixes = ["global.", "us.", "eu."] const candidates = Object.keys(provider.models).filter((m) => m.includes(item)) @@ -1246,38 +1366,32 @@ export namespace Provider { // 2. User's region prefix (us., eu.) // 3. Unprefixed model const globalMatch = candidates.find((m) => m.startsWith("global.")) - if (globalMatch) return getModel(providerID, globalMatch) + if (globalMatch) return getModel(providerID, ModelID.make(globalMatch)) const region = provider.options?.region if (region) { const regionPrefix = region.split("-")[0] if (regionPrefix === "us" || regionPrefix === "eu") { const regionalMatch = candidates.find((m) => m.startsWith(`${regionPrefix}.`)) - if (regionalMatch) return getModel(providerID, regionalMatch) + if (regionalMatch) return getModel(providerID, ModelID.make(regionalMatch)) } } const unprefixed = candidates.find((m) => !crossRegionPrefixes.some((p) => m.startsWith(p))) - if (unprefixed) return getModel(providerID, unprefixed) + if (unprefixed) return getModel(providerID, ModelID.make(unprefixed)) } else { for (const model of Object.keys(provider.models)) { - if (model.includes(item)) return getModel(providerID, model) + if (model.includes(item)) return getModel(providerID, ModelID.make(model)) } } } } - // Check if altimate-code provider is available before using it - const altimateCodeProvider = await state().then((state) => state.providers["altimate-code"]) - if (altimateCodeProvider && altimateCodeProvider.models["gpt-5-nano"]) { - return getModel("altimate-code", "gpt-5-nano") - } - return undefined } const priority = ["gpt-5", "claude-sonnet-4", "big-pickle", "gemini-3-pro"] - export function sort(models: Model[]) { + export function sort<T extends { id: string }>(models: T[]) { return sortBy( models, [(model) => priority.findIndex((filter) => model.id.includes(filter)), "desc"], @@ -1291,11 +1405,11 @@ export namespace Provider { if (cfg.model) return parseModel(cfg.model) const providers = await list() - const recent = (await Filesystem.readJson<{ recent?: { providerID: string; modelID: string }[] }>( + const recent = (await Filesystem.readJson<{ recent?: { providerID: ProviderID; modelID: ModelID }[] }>( path.join(Global.Path.state, "model.json"), ) .then((x) => (Array.isArray(x.recent) ? x.recent : [])) - .catch(() => [])) as { providerID: string; modelID: string }[] + .catch(() => [])) as { providerID: ProviderID; modelID: ModelID }[] for (const entry of recent) { const provider = providers[entry.providerID] if (!provider) continue @@ -1316,16 +1430,16 @@ export namespace Provider { export function parseModel(model: string) { const [providerID, ...rest] = model.split("/") return { - providerID: providerID, - modelID: rest.join("/"), + providerID: ProviderID.make(providerID), + modelID: ModelID.make(rest.join("/")), } } export const ModelNotFoundError = NamedError.create( "ProviderModelNotFoundError", z.object({ - providerID: z.string(), - modelID: z.string(), + providerID: ProviderID.zod, + modelID: ModelID.zod, suggestions: z.array(z.string()).optional(), }), ) @@ -1333,7 +1447,7 @@ export namespace Provider { export const InitError = NamedError.create( "ProviderInitError", z.object({ - providerID: z.string(), + providerID: ProviderID.zod, }), ) } diff --git a/packages/opencode/src/provider/schema.ts b/packages/opencode/src/provider/schema.ts new file mode 100644 index 0000000000..9eac235ceb --- /dev/null +++ b/packages/opencode/src/provider/schema.ts @@ -0,0 +1,38 @@ +import { Schema } from "effect" +import z from "zod" + +import { withStatics } from "@/util/schema" + +const providerIdSchema = Schema.String.pipe(Schema.brand("ProviderID")) + +export type ProviderID = typeof providerIdSchema.Type + +export const ProviderID = providerIdSchema.pipe( + withStatics((schema: typeof providerIdSchema) => ({ + make: (id: string) => schema.makeUnsafe(id), + zod: z.string().pipe(z.custom<ProviderID>()), + // Well-known providers + opencode: schema.makeUnsafe("opencode"), + anthropic: schema.makeUnsafe("anthropic"), + openai: schema.makeUnsafe("openai"), + google: schema.makeUnsafe("google"), + googleVertex: schema.makeUnsafe("google-vertex"), + githubCopilot: schema.makeUnsafe("github-copilot"), + githubCopilotEnterprise: schema.makeUnsafe("github-copilot-enterprise"), + amazonBedrock: schema.makeUnsafe("amazon-bedrock"), + azure: schema.makeUnsafe("azure"), + openrouter: schema.makeUnsafe("openrouter"), + mistral: schema.makeUnsafe("mistral"), + })), +) + +const modelIdSchema = Schema.String.pipe(Schema.brand("ModelID")) + +export type ModelID = typeof modelIdSchema.Type + +export const ModelID = modelIdSchema.pipe( + withStatics((schema: typeof modelIdSchema) => ({ + make: (id: string) => schema.makeUnsafe(id), + zod: z.string().pipe(z.custom<ModelID>()), + })), +) diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index 6980be0518..05b9f031fe 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -51,7 +51,7 @@ export namespace ProviderTransform { ): ModelMessage[] { // Anthropic rejects messages with empty content - filter out empty string messages // and remove empty text/reasoning parts from array content - if (model.api.npm === "@ai-sdk/anthropic") { + if (model.api.npm === "@ai-sdk/anthropic" || model.api.npm === "@ai-sdk/amazon-bedrock") { msgs = msgs .map((msg) => { if (typeof msg.content === "string") { @@ -440,7 +440,9 @@ export namespace ProviderTransform { const copilotEfforts = iife(() => { if (id.includes("5.1-codex-max") || id.includes("5.2") || id.includes("5.3")) return [...WIDELY_SUPPORTED_EFFORTS, "xhigh"] - return WIDELY_SUPPORTED_EFFORTS + const arr = [...WIDELY_SUPPORTED_EFFORTS] + if (id.includes("gpt-5") && model.release_date >= "2025-12-04") arr.push("xhigh") + return arr }) return Object.fromEntries( copilotEfforts.map((effort) => [ @@ -655,9 +657,21 @@ export namespace ProviderTransform { // https://v5.ai-sdk.dev/providers/ai-sdk-providers/perplexity return {} - case "@mymediset/sap-ai-provider": case "@jerome-benoit/sap-ai-provider-v2": if (model.api.id.includes("anthropic")) { + if (isAnthropicAdaptive) { + return Object.fromEntries( + adaptiveEfforts.map((effort) => [ + effort, + { + thinking: { + type: "adaptive", + }, + effort, + }, + ]), + ) + } return { high: { thinking: { @@ -673,7 +687,26 @@ export namespace ProviderTransform { }, } } - return Object.fromEntries(WIDELY_SUPPORTED_EFFORTS.map((effort) => [effort, { reasoningEffort: effort }])) + if (model.api.id.includes("gemini") && id.includes("2.5")) { + return { + high: { + thinkingConfig: { + includeThoughts: true, + thinkingBudget: 16000, + }, + }, + max: { + thinkingConfig: { + includeThoughts: true, + thinkingBudget: 24576, + }, + }, + } + } + if (model.api.id.includes("gpt") || /\bo[1-9]/.test(model.api.id)) { + return Object.fromEntries(WIDELY_SUPPORTED_EFFORTS.map((effort) => [effort, { reasoningEffort: effort }])) + } + return {} } return {} } diff --git a/packages/opencode/src/pty/index.ts b/packages/opencode/src/pty/index.ts index dee3fbc542..d6bc4973a0 100644 --- a/packages/opencode/src/pty/index.ts +++ b/packages/opencode/src/pty/index.ts @@ -2,12 +2,12 @@ import { BusEvent } from "@/bus/bus-event" import { Bus } from "@/bus" import { type IPty } from "bun-pty" import z from "zod" -import { Identifier } from "../id/id" import { Log } from "../util/log" import { Instance } from "../project/instance" import { lazy } from "@opencode-ai/util/lazy" import { Shell } from "@/shell/shell" import { Plugin } from "@/plugin" +import { PtyID } from "./schema" export namespace Pty { const log = Log.create({ service: "pty" }) @@ -40,7 +40,7 @@ export namespace Pty { export const Info = z .object({ - id: Identifier.schema("pty"), + id: PtyID.zod, title: z.string(), command: z.string(), args: z.array(z.string()), @@ -77,8 +77,8 @@ export namespace Pty { export const Event = { Created: BusEvent.define("pty.created", z.object({ info: Info })), Updated: BusEvent.define("pty.updated", z.object({ info: Info })), - Exited: BusEvent.define("pty.exited", z.object({ id: Identifier.schema("pty"), exitCode: z.number() })), - Deleted: BusEvent.define("pty.deleted", z.object({ id: Identifier.schema("pty") })), + Exited: BusEvent.define("pty.exited", z.object({ id: PtyID.zod, exitCode: z.number() })), + Deleted: BusEvent.define("pty.deleted", z.object({ id: PtyID.zod })), } interface ActiveSession { @@ -91,7 +91,7 @@ export namespace Pty { } const state = Instance.state( - () => new Map<string, ActiveSession>(), + () => new Map<PtyID, ActiveSession>(), async (sessions) => { for (const session of sessions.values()) { try { @@ -113,12 +113,12 @@ export namespace Pty { return Array.from(state().values()).map((s) => s.info) } - export function get(id: string) { + export function get(id: PtyID) { return state().get(id)?.info } export async function create(input: CreateInput) { - const id = Identifier.create("pty", false) + const id = PtyID.ascending() const command = input.command || Shell.preferred() const args = input.args || [] if (command.endsWith("sh")) { @@ -195,24 +195,17 @@ export namespace Pty { session.bufferCursor += excess }) ptyProcess.onExit(({ exitCode }) => { + if (session.info.status === "exited") return log.info("session exited", { id, exitCode }) session.info.status = "exited" - for (const [key, ws] of session.subscribers.entries()) { - try { - if (ws.data === key) ws.close() - } catch { - // ignore - } - } - session.subscribers.clear() Bus.publish(Event.Exited, { id, exitCode }) - state().delete(id) + remove(id) }) Bus.publish(Event.Created, { info }) return info } - export async function update(id: string, input: UpdateInput) { + export async function update(id: PtyID, input: UpdateInput) { const session = state().get(id) if (!session) return if (input.title) { @@ -225,9 +218,10 @@ export namespace Pty { return session.info } - export async function remove(id: string) { + export async function remove(id: PtyID) { const session = state().get(id) if (!session) return + state().delete(id) log.info("removing session", { id }) try { session.process.kill() @@ -240,25 +234,24 @@ export namespace Pty { } } session.subscribers.clear() - state().delete(id) - Bus.publish(Event.Deleted, { id }) + Bus.publish(Event.Deleted, { id: session.info.id }) } - export function resize(id: string, cols: number, rows: number) { + export function resize(id: PtyID, cols: number, rows: number) { const session = state().get(id) if (session && session.info.status === "running") { session.process.resize(cols, rows) } } - export function write(id: string, data: string) { + export function write(id: PtyID, data: string) { const session = state().get(id) if (session && session.info.status === "running") { session.process.write(data) } } - export function connect(id: string, ws: Socket, cursor?: number) { + export function connect(id: PtyID, ws: Socket, cursor?: number) { const session = state().get(id) if (!session) { ws.close() diff --git a/packages/opencode/src/pty/schema.ts b/packages/opencode/src/pty/schema.ts new file mode 100644 index 0000000000..47b3196f0b --- /dev/null +++ b/packages/opencode/src/pty/schema.ts @@ -0,0 +1,17 @@ +import { Schema } from "effect" +import z from "zod" + +import { Identifier } from "@/id/id" +import { withStatics } from "@/util/schema" + +const ptyIdSchema = Schema.String.pipe(Schema.brand("PtyID")) + +export type PtyID = typeof ptyIdSchema.Type + +export const PtyID = ptyIdSchema.pipe( + withStatics((schema: typeof ptyIdSchema) => ({ + make: (id: string) => schema.makeUnsafe(id), + ascending: (id?: string) => schema.makeUnsafe(Identifier.ascending("pty", id)), + zod: Identifier.schema("pty").pipe(z.custom<PtyID>()), + })), +) diff --git a/packages/opencode/src/question/index.ts b/packages/opencode/src/question/index.ts index c93b74b9a4..cf52979fc8 100644 --- a/packages/opencode/src/question/index.ts +++ b/packages/opencode/src/question/index.ts @@ -1,9 +1,10 @@ import { Bus } from "@/bus" import { BusEvent } from "@/bus/bus-event" -import { Identifier } from "@/id/id" +import { SessionID, MessageID } from "@/session/schema" import { Instance } from "@/project/instance" import { Log } from "@/util/log" import z from "zod" +import { QuestionID } from "./schema" export namespace Question { const log = Log.create({ service: "question" }) @@ -33,12 +34,12 @@ export namespace Question { export const Request = z .object({ - id: Identifier.schema("question"), - sessionID: Identifier.schema("session"), + id: QuestionID.zod, + sessionID: SessionID.zod, questions: z.array(Info).describe("Questions to ask"), tool: z .object({ - messageID: z.string(), + messageID: MessageID.zod, callID: z.string(), }) .optional(), @@ -65,42 +66,37 @@ export namespace Question { Replied: BusEvent.define( "question.replied", z.object({ - sessionID: z.string(), - requestID: z.string(), + sessionID: SessionID.zod, + requestID: QuestionID.zod, answers: z.array(Answer), }), ), Rejected: BusEvent.define( "question.rejected", z.object({ - sessionID: z.string(), - requestID: z.string(), + sessionID: SessionID.zod, + requestID: QuestionID.zod, }), ), } - const state = Instance.state(async () => { - const pending: Record< - string, - { - info: Request - resolve: (answers: Answer[]) => void - reject: (e: any) => void - } - > = {} + interface PendingEntry { + info: Request + resolve: (answers: Answer[]) => void + reject: (e: any) => void + } - return { - pending, - } - }) + const state = Instance.state(async () => ({ + pending: new Map<QuestionID, PendingEntry>(), + })) export async function ask(input: { - sessionID: string + sessionID: SessionID questions: Info[] - tool?: { messageID: string; callID: string } + tool?: { messageID: MessageID; callID: string } }): Promise<Answer[]> { const s = await state() - const id = Identifier.ascending("question") + const id = QuestionID.ascending() log.info("asking", { id, questions: input.questions.length }) @@ -111,23 +107,23 @@ export namespace Question { questions: input.questions, tool: input.tool, } - s.pending[id] = { + s.pending.set(id, { info, resolve, reject, - } + }) Bus.publish(Event.Asked, info) }) } - export async function reply(input: { requestID: string; answers: Answer[] }): Promise<void> { + export async function reply(input: { requestID: QuestionID; answers: Answer[] }): Promise<void> { const s = await state() - const existing = s.pending[input.requestID] + const existing = s.pending.get(input.requestID) if (!existing) { log.warn("reply for unknown request", { requestID: input.requestID }) return } - delete s.pending[input.requestID] + s.pending.delete(input.requestID) log.info("replied", { requestID: input.requestID, answers: input.answers }) @@ -140,14 +136,14 @@ export namespace Question { existing.resolve(input.answers) } - export async function reject(requestID: string): Promise<void> { + export async function reject(requestID: QuestionID): Promise<void> { const s = await state() - const existing = s.pending[requestID] + const existing = s.pending.get(requestID) if (!existing) { log.warn("reject for unknown request", { requestID }) return } - delete s.pending[requestID] + s.pending.delete(requestID) log.info("rejected", { requestID }) @@ -166,6 +162,6 @@ export namespace Question { } export async function list() { - return state().then((x) => Object.values(x.pending).map((x) => x.info)) + return state().then((x) => Array.from(x.pending.values(), (x) => x.info)) } } diff --git a/packages/opencode/src/question/schema.ts b/packages/opencode/src/question/schema.ts new file mode 100644 index 0000000000..65e9ad07cb --- /dev/null +++ b/packages/opencode/src/question/schema.ts @@ -0,0 +1,17 @@ +import { Schema } from "effect" +import z from "zod" + +import { Identifier } from "@/id/id" +import { withStatics } from "@/util/schema" + +const questionIdSchema = Schema.String.pipe(Schema.brand("QuestionID")) + +export type QuestionID = typeof questionIdSchema.Type + +export const QuestionID = questionIdSchema.pipe( + withStatics((schema: typeof questionIdSchema) => ({ + make: (id: string) => schema.makeUnsafe(id), + ascending: (id?: string) => schema.makeUnsafe(Identifier.ascending("question", id)), + zod: Identifier.schema("question").pipe(z.custom<QuestionID>()), + })), +) diff --git a/packages/opencode/src/server/routes/config.ts b/packages/opencode/src/server/routes/config.ts index 6ee589824d..1293e77e0c 100644 --- a/packages/opencode/src/server/routes/config.ts +++ b/packages/opencode/src/server/routes/config.ts @@ -17,7 +17,7 @@ export const ConfigRoutes = lazy(() => "/", describeRoute({ summary: "Get configuration", - description: "Retrieve the current OpenCode configuration settings and preferences.", + description: "Retrieve the current Altimate Code configuration settings and preferences.", operationId: "config.get", responses: { 200: { @@ -38,7 +38,7 @@ export const ConfigRoutes = lazy(() => "/", describeRoute({ summary: "Update configuration", - description: "Update OpenCode configuration settings and preferences.", + description: "Update Altimate Code configuration settings and preferences.", operationId: "config.update", responses: { 200: { diff --git a/packages/opencode/src/server/routes/experimental.ts b/packages/opencode/src/server/routes/experimental.ts index 98c7ece105..b56765e4c0 100644 --- a/packages/opencode/src/server/routes/experimental.ts +++ b/packages/opencode/src/server/routes/experimental.ts @@ -1,6 +1,7 @@ import { Hono } from "hono" import { describeRoute, validator, resolver } from "hono-openapi" import z from "zod" +import { ProviderID, ModelID } from "../../provider/schema" import { ToolRegistry } from "../../tool/registry" import { Worktree } from "../../worktree" import { Instance } from "../../project/instance" @@ -77,7 +78,7 @@ export const ExperimentalRoutes = lazy(() => ), async (c) => { const { provider, model } = c.req.valid("query") - const tools = await ToolRegistry.tools({ providerID: provider, modelID: model }) + const tools = await ToolRegistry.tools({ providerID: ProviderID.make(provider), modelID: ModelID.make(model) }) return c.json( tools.map((t) => ({ id: t.id, @@ -192,7 +193,7 @@ export const ExperimentalRoutes = lazy(() => describeRoute({ summary: "List sessions", description: - "Get a list of all OpenCode sessions across projects, sorted by most recently updated. Archived sessions are excluded by default.", + "Get a list of all Altimate Code sessions across projects, sorted by most recently updated. Archived sessions are excluded by default.", operationId: "experimental.session.list", responses: { 200: { diff --git a/packages/opencode/src/server/routes/global.ts b/packages/opencode/src/server/routes/global.ts index 4d019f6a7e..5017a89c7e 100644 --- a/packages/opencode/src/server/routes/global.ts +++ b/packages/opencode/src/server/routes/global.ts @@ -21,7 +21,7 @@ export const GlobalRoutes = lazy(() => "/health", describeRoute({ summary: "Get health", - description: "Get health information about the OpenCode server.", + description: "Get health information about the Altimate Code server.", operationId: "global.health", responses: { 200: { @@ -42,7 +42,7 @@ export const GlobalRoutes = lazy(() => "/event", describeRoute({ summary: "Get global events", - description: "Subscribe to global events from the OpenCode system using server-sent events.", + description: "Subscribe to global events from the Altimate Code system using server-sent events.", operationId: "global.event", responses: { 200: { @@ -111,7 +111,7 @@ export const GlobalRoutes = lazy(() => "/config", describeRoute({ summary: "Get global configuration", - description: "Retrieve the current global OpenCode configuration settings and preferences.", + description: "Retrieve the current global Altimate Code configuration settings and preferences.", operationId: "global.config.get", responses: { 200: { @@ -132,7 +132,7 @@ export const GlobalRoutes = lazy(() => "/config", describeRoute({ summary: "Update global configuration", - description: "Update global OpenCode configuration settings and preferences.", + description: "Update global Altimate Code configuration settings and preferences.", operationId: "global.config.update", responses: { 200: { @@ -157,7 +157,7 @@ export const GlobalRoutes = lazy(() => "/dispose", describeRoute({ summary: "Dispose instance", - description: "Clean up and dispose all OpenCode instances, releasing all resources.", + description: "Clean up and dispose all Altimate Code instances, releasing all resources.", operationId: "global.dispose", responses: { 200: { diff --git a/packages/opencode/src/server/routes/permission.ts b/packages/opencode/src/server/routes/permission.ts index 8fc2d594d7..6d86703c66 100644 --- a/packages/opencode/src/server/routes/permission.ts +++ b/packages/opencode/src/server/routes/permission.ts @@ -2,6 +2,7 @@ import { Hono } from "hono" import { describeRoute, validator, resolver } from "hono-openapi" import z from "zod" import { PermissionNext } from "@/permission/next" +import { PermissionID } from "@/permission/schema" import { errors } from "../error" import { lazy } from "../../util/lazy" @@ -28,7 +29,7 @@ export const PermissionRoutes = lazy(() => validator( "param", z.object({ - requestID: z.string(), + requestID: PermissionID.zod, }), ), validator("json", z.object({ reply: PermissionNext.Reply, message: z.string().optional() })), diff --git a/packages/opencode/src/server/routes/project.ts b/packages/opencode/src/server/routes/project.ts index 81092284de..8ddfdd2d52 100644 --- a/packages/opencode/src/server/routes/project.ts +++ b/packages/opencode/src/server/routes/project.ts @@ -4,8 +4,10 @@ import { resolver } from "hono-openapi" import { Instance } from "../../project/instance" import { Project } from "../../project/project" import z from "zod" +import { ProjectID } from "../../project/schema" import { errors } from "../error" import { lazy } from "../../util/lazy" +import { InstanceBootstrap } from "../../project/bootstrap" export const ProjectRoutes = lazy(() => new Hono() @@ -13,7 +15,7 @@ export const ProjectRoutes = lazy(() => "/", describeRoute({ summary: "List all projects", - description: "Get a list of projects that have been opened with OpenCode.", + description: "Get a list of projects that have been opened with Altimate Code.", operationId: "project.list", responses: { 200: { @@ -35,7 +37,7 @@ export const ProjectRoutes = lazy(() => "/current", describeRoute({ summary: "Get current project", - description: "Retrieve the currently active project that OpenCode is working with.", + description: "Retrieve the currently active project that Altimate Code is working with.", operationId: "project.current", responses: { 200: { @@ -52,6 +54,40 @@ export const ProjectRoutes = lazy(() => return c.json(Instance.project) }, ) + .post( + "/git/init", + describeRoute({ + summary: "Initialize git repository", + description: "Create a git repository for the current project and return the refreshed project info.", + operationId: "project.initGit", + responses: { + 200: { + description: "Project information after git initialization", + content: { + "application/json": { + schema: resolver(Project.Info), + }, + }, + }, + }, + }), + async (c) => { + const dir = Instance.directory + const prev = Instance.project + const next = await Project.initGit({ + directory: dir, + project: prev, + }) + if (next.id === prev.id && next.vcs === prev.vcs && next.worktree === prev.worktree) return c.json(next) + await Instance.reload({ + directory: dir, + worktree: dir, + project: next, + init: InstanceBootstrap, + }) + return c.json(next) + }, + ) .patch( "/:projectID", describeRoute({ @@ -70,7 +106,7 @@ export const ProjectRoutes = lazy(() => ...errors(400, 404), }, }), - validator("param", z.object({ projectID: z.string() })), + validator("param", z.object({ projectID: ProjectID.zod })), validator("json", Project.update.schema.omit({ projectID: true })), async (c) => { const projectID = c.req.valid("param").projectID diff --git a/packages/opencode/src/server/routes/provider.ts b/packages/opencode/src/server/routes/provider.ts index 872b48be79..fc716d25cb 100644 --- a/packages/opencode/src/server/routes/provider.ts +++ b/packages/opencode/src/server/routes/provider.ts @@ -5,6 +5,7 @@ import { Config } from "../../config/config" import { Provider } from "../../provider/provider" import { ModelsDev } from "../../provider/models" import { ProviderAuth } from "../../provider/auth" +import { ProviderID } from "../../provider/schema" import { mapValues } from "remeda" import { errors } from "../error" import { lazy } from "../../util/lazy" @@ -101,7 +102,7 @@ export const ProviderRoutes = lazy(() => validator( "param", z.object({ - providerID: z.string().meta({ description: "Provider ID" }), + providerID: ProviderID.zod.meta({ description: "Provider ID" }), }), ), validator( @@ -141,7 +142,7 @@ export const ProviderRoutes = lazy(() => validator( "param", z.object({ - providerID: z.string().meta({ description: "Provider ID" }), + providerID: ProviderID.zod.meta({ description: "Provider ID" }), }), ), validator( diff --git a/packages/opencode/src/server/routes/pty.ts b/packages/opencode/src/server/routes/pty.ts index 368c9612bf..8418319ea0 100644 --- a/packages/opencode/src/server/routes/pty.ts +++ b/packages/opencode/src/server/routes/pty.ts @@ -3,6 +3,7 @@ import { describeRoute, validator, resolver } from "hono-openapi" import { upgradeWebSocket } from "hono/bun" import z from "zod" import { Pty } from "@/pty" +import { PtyID } from "@/pty/schema" import { NotFoundError } from "../../storage/db" import { errors } from "../error" import { lazy } from "../../util/lazy" @@ -13,7 +14,7 @@ export const PtyRoutes = lazy(() => "/", describeRoute({ summary: "List PTY sessions", - description: "Get a list of all active pseudo-terminal (PTY) sessions managed by OpenCode.", + description: "Get a list of all active pseudo-terminal (PTY) sessions managed by Altimate Code.", operationId: "pty.list", responses: { 200: { @@ -72,7 +73,7 @@ export const PtyRoutes = lazy(() => ...errors(404), }, }), - validator("param", z.object({ ptyID: z.string() })), + validator("param", z.object({ ptyID: PtyID.zod })), async (c) => { const info = Pty.get(c.req.valid("param").ptyID) if (!info) { @@ -99,7 +100,7 @@ export const PtyRoutes = lazy(() => ...errors(400), }, }), - validator("param", z.object({ ptyID: z.string() })), + validator("param", z.object({ ptyID: PtyID.zod })), validator("json", Pty.UpdateInput), async (c) => { const info = await Pty.update(c.req.valid("param").ptyID, c.req.valid("json")) @@ -124,7 +125,7 @@ export const PtyRoutes = lazy(() => ...errors(404), }, }), - validator("param", z.object({ ptyID: z.string() })), + validator("param", z.object({ ptyID: PtyID.zod })), async (c) => { await Pty.remove(c.req.valid("param").ptyID) return c.json(true) @@ -148,9 +149,9 @@ export const PtyRoutes = lazy(() => ...errors(404), }, }), - validator("param", z.object({ ptyID: z.string() })), + validator("param", z.object({ ptyID: PtyID.zod })), upgradeWebSocket((c) => { - const id = c.req.param("ptyID") + const id = PtyID.zod.parse(c.req.param("ptyID")) const cursor = (() => { const value = c.req.query("cursor") if (!value) return diff --git a/packages/opencode/src/server/routes/question.ts b/packages/opencode/src/server/routes/question.ts index eab675e816..3fff895fa6 100644 --- a/packages/opencode/src/server/routes/question.ts +++ b/packages/opencode/src/server/routes/question.ts @@ -1,6 +1,7 @@ import { Hono } from "hono" import { describeRoute, validator } from "hono-openapi" import { resolver } from "hono-openapi" +import { QuestionID } from "@/question/schema" import { Question } from "../../question" import z from "zod" import { errors } from "../error" @@ -51,7 +52,7 @@ export const QuestionRoutes = lazy(() => validator( "param", z.object({ - requestID: z.string(), + requestID: QuestionID.zod, }), ), validator("json", Question.Reply), @@ -86,7 +87,7 @@ export const QuestionRoutes = lazy(() => validator( "param", z.object({ - requestID: z.string(), + requestID: QuestionID.zod, }), ), async (c) => { diff --git a/packages/opencode/src/server/routes/session.ts b/packages/opencode/src/server/routes/session.ts index 12938aeaba..93c84dabfd 100644 --- a/packages/opencode/src/server/routes/session.ts +++ b/packages/opencode/src/server/routes/session.ts @@ -1,6 +1,7 @@ import { Hono } from "hono" import { stream } from "hono/streaming" import { describeRoute, validator, resolver } from "hono-openapi" +import { SessionID, MessageID, PartID } from "@/session/schema" import z from "zod" import { Session } from "../../session" import { MessageV2 } from "../../session/message-v2" @@ -14,6 +15,8 @@ import { Agent } from "../../agent/agent" import { Snapshot } from "@/snapshot" import { Log } from "../../util/log" import { PermissionNext } from "@/permission/next" +import { PermissionID } from "@/permission/schema" +import { ModelID, ProviderID } from "@/provider/schema" import { errors } from "../error" import { lazy } from "../../util/lazy" @@ -25,7 +28,7 @@ export const SessionRoutes = lazy(() => "/", describeRoute({ summary: "List sessions", - description: "Get a list of all OpenCode sessions, sorted by most recently updated.", + description: "Get a list of all Altimate Code sessions, sorted by most recently updated.", operationId: "session.list", responses: { 200: { @@ -93,7 +96,7 @@ export const SessionRoutes = lazy(() => "/:sessionID", describeRoute({ summary: "Get session", - description: "Retrieve detailed information about a specific OpenCode session.", + description: "Retrieve detailed information about a specific Altimate Code session.", tags: ["Session"], operationId: "session.get", responses: { @@ -173,7 +176,7 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string().meta({ description: "Session ID" }), + sessionID: SessionID.zod, }), ), async (c) => { @@ -186,7 +189,7 @@ export const SessionRoutes = lazy(() => "/", describeRoute({ summary: "Create session", - description: "Create a new OpenCode session for interacting with AI assistants and managing conversations.", + description: "Create a new Altimate Code session for interacting with AI assistants and managing conversations.", operationId: "session.create", responses: { ...errors(400), @@ -258,7 +261,7 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string(), + sessionID: SessionID.zod, }), ), validator( @@ -309,7 +312,7 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string().meta({ description: "Session ID" }), + sessionID: SessionID.zod, }), ), validator("json", Session.initialize.schema.omit({ sessionID: true })), @@ -372,7 +375,7 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string(), + sessionID: SessionID.zod, }), ), async (c) => { @@ -401,7 +404,7 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string(), + sessionID: SessionID.zod, }), ), async (c) => { @@ -502,14 +505,14 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string().meta({ description: "Session ID" }), + sessionID: SessionID.zod, }), ), validator( "json", z.object({ - providerID: z.string(), - modelID: z.string(), + providerID: ProviderID.zod, + modelID: ModelID.zod, auto: z.boolean().optional().default(false), }), ), @@ -561,22 +564,70 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string().meta({ description: "Session ID" }), + sessionID: SessionID.zod, }), ), validator( "query", - z.object({ - limit: z.coerce.number().optional(), - }), + z + .object({ + limit: z.coerce + .number() + .int() + .min(0) + .optional() + .meta({ description: "Maximum number of messages to return" }), + before: z + .string() + .optional() + .meta({ description: "Opaque cursor for loading older messages" }) + .refine( + (value) => { + if (!value) return true + try { + MessageV2.cursor.decode(value) + return true + } catch { + return false + } + }, + { message: "Invalid cursor" }, + ), + }) + .refine((value) => !value.before || value.limit !== undefined, { + message: "before requires limit", + path: ["before"], + }), ), async (c) => { const query = c.req.valid("query") - const messages = await Session.messages({ - sessionID: c.req.valid("param").sessionID, + const sessionID = c.req.valid("param").sessionID + if (query.limit === undefined) { + await Session.get(sessionID) + const messages = await Session.messages({ sessionID }) + return c.json(messages) + } + + if (query.limit === 0) { + await Session.get(sessionID) + const messages = await Session.messages({ sessionID }) + return c.json(messages) + } + + const page = await MessageV2.page({ + sessionID, limit: query.limit, + before: query.before, }) - return c.json(messages) + if (page.cursor) { + const url = new URL(c.req.url) + url.searchParams.set("limit", query.limit.toString()) + url.searchParams.set("before", page.cursor) + c.header("Access-Control-Expose-Headers", "Link, X-Next-Cursor") + c.header("Link", `<${url.toString()}>; rel=\"next\"`) + c.header("X-Next-Cursor", page.cursor) + } + return c.json(page.items) }, ) .get( @@ -605,8 +656,8 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string().meta({ description: "Session ID" }), - messageID: z.string().meta({ description: "Message ID" }), + sessionID: SessionID.zod, + messageID: MessageID.zod, }), ), async (c) => { @@ -640,8 +691,8 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string().meta({ description: "Session ID" }), - messageID: z.string().meta({ description: "Message ID" }), + sessionID: SessionID.zod, + messageID: MessageID.zod, }), ), async (c) => { @@ -674,9 +725,9 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string().meta({ description: "Session ID" }), - messageID: z.string().meta({ description: "Message ID" }), - partID: z.string().meta({ description: "Part ID" }), + sessionID: SessionID.zod, + messageID: MessageID.zod, + partID: PartID.zod, }), ), async (c) => { @@ -709,9 +760,9 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string().meta({ description: "Session ID" }), - messageID: z.string().meta({ description: "Message ID" }), - partID: z.string().meta({ description: "Part ID" }), + sessionID: SessionID.zod, + messageID: MessageID.zod, + partID: PartID.zod, }), ), validator("json", MessageV2.Part), @@ -753,7 +804,7 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string().meta({ description: "Session ID" }), + sessionID: SessionID.zod, }), ), validator("json", SessionPrompt.PromptInput.omit({ sessionID: true })), @@ -785,7 +836,7 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string().meta({ description: "Session ID" }), + sessionID: SessionID.zod, }), ), validator("json", SessionPrompt.PromptInput.omit({ sessionID: true })), @@ -825,7 +876,7 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string().meta({ description: "Session ID" }), + sessionID: SessionID.zod, }), ), validator("json", SessionPrompt.CommandInput.omit({ sessionID: true })), @@ -857,7 +908,7 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string().meta({ description: "Session ID" }), + sessionID: SessionID.zod, }), ), validator("json", SessionPrompt.ShellInput.omit({ sessionID: true })), @@ -889,7 +940,7 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string(), + sessionID: SessionID.zod, }), ), validator("json", SessionRevert.RevertInput.omit({ sessionID: true })), @@ -924,7 +975,7 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string(), + sessionID: SessionID.zod, }), ), async (c) => { @@ -955,8 +1006,8 @@ export const SessionRoutes = lazy(() => validator( "param", z.object({ - sessionID: z.string(), - permissionID: z.string(), + sessionID: SessionID.zod, + permissionID: PermissionID.zod, }), ), validator("json", z.object({ response: PermissionNext.Reply })), diff --git a/packages/opencode/src/server/server.ts b/packages/opencode/src/server/server.ts index 7270423ee8..a75c0d6f3e 100644 --- a/packages/opencode/src/server/server.ts +++ b/packages/opencode/src/server/server.ts @@ -21,6 +21,10 @@ import { Auth } from "../auth" import { Flag } from "../flag/flag" import { Command } from "../command" import { Global } from "../global" +import { WorkspaceContext } from "../control-plane/workspace-context" +import { WorkspaceID } from "../control-plane/schema" +import { ProviderID } from "../provider/schema" +import { WorkspaceRouterMiddleware } from "../control-plane/workspace-router-middleware" import { ProjectRoutes } from "./routes/project" import { SessionRoutes } from "./routes/session" import { PtyRoutes } from "./routes/pty" @@ -29,17 +33,18 @@ import { FileRoutes } from "./routes/file" import { ConfigRoutes } from "./routes/config" import { ExperimentalRoutes } from "./routes/experimental" import { ProviderRoutes } from "./routes/provider" -import { lazy } from "../util/lazy" import { InstanceBootstrap } from "../project/bootstrap" import { NotFoundError } from "../storage/db" import type { ContentfulStatusCode } from "hono/utils/http-status" import { websocket } from "hono/bun" import { HTTPException } from "hono/http-exception" import { errors } from "./error" +import { Filesystem } from "@/util/filesystem" import { QuestionRoutes } from "./routes/question" import { PermissionRoutes } from "./routes/permission" import { GlobalRoutes } from "./routes/global" import { MDNS } from "./mdns" +import { lazy } from "@/util/lazy" // @ts-ignore This global is needed to prevent ai-sdk from logging warnings to stdout https://github.com/vercel/ai/blob/2dc67e0ef538307f21368db32d5a12345d98831b/packages/ai/src/logger/log-warnings.ts#L85 globalThis.AI_SDK_LOG_WARNINGS = false @@ -47,525 +52,534 @@ globalThis.AI_SDK_LOG_WARNINGS = false export namespace Server { const log = Log.create({ service: "server" }) - let _url: URL | undefined - let _corsWhitelist: string[] = [] + export const Default = lazy(() => createApp({})) - export function url(): URL { - return _url ?? new URL("http://localhost:4096") - } - - const app = new Hono() - export const App: () => Hono = lazy( - () => - // TODO: Break server.ts into smaller route files to fix type inference - app - .onError((err, c) => { - log.error("failed", { - error: err, - }) - if (err instanceof NamedError) { - let status: ContentfulStatusCode - if (err instanceof NotFoundError) status = 404 - else if (err instanceof Provider.ModelNotFoundError) status = 400 - else if (err.name.startsWith("Worktree")) status = 400 - else status = 500 - return c.json(err.toObject(), { status }) - } - if (err instanceof HTTPException) return err.getResponse() - const message = err instanceof Error && err.stack ? err.stack : err.toString() - return c.json(new NamedError.Unknown({ message }).toObject(), { - status: 500, - }) + export const createApp = (opts: { cors?: string[] }): Hono => { + const app = new Hono() + return app + .onError((err, c) => { + log.error("failed", { + error: err, }) - .use((c, next) => { - // Allow CORS preflight requests to succeed without auth. - // Browser clients sending Authorization headers will preflight with OPTIONS. - if (c.req.method === "OPTIONS") return next() - const password = Flag.OPENCODE_SERVER_PASSWORD - if (!password) return next() - const username = Flag.OPENCODE_SERVER_USERNAME ?? "altimate" - return basicAuth({ username, password })(c, next) + if (err instanceof NamedError) { + let status: ContentfulStatusCode + if (err instanceof NotFoundError) status = 404 + else if (err instanceof Provider.ModelNotFoundError) status = 400 + else if (err.name.startsWith("Worktree")) status = 400 + else status = 500 + return c.json(err.toObject(), { status }) + } + if (err instanceof HTTPException) return err.getResponse() + const message = err instanceof Error && err.stack ? err.stack : err.toString() + return c.json(new NamedError.Unknown({ message }).toObject(), { + status: 500, }) - .use(async (c, next) => { - const skipLogging = c.req.path === "/log" - if (!skipLogging) { - log.info("request", { - method: c.req.method, - path: c.req.path, - }) - } - const timer = log.time("request", { + }) + .use((c, next) => { + // Allow CORS preflight requests to succeed without auth. + // Browser clients sending Authorization headers will preflight with OPTIONS. + if (c.req.method === "OPTIONS") return next() + const password = Flag.OPENCODE_SERVER_PASSWORD + if (!password) return next() + const username = Flag.OPENCODE_SERVER_USERNAME ?? "altimate" // altimate_change — branded default username + return basicAuth({ username, password })(c, next) + }) + .use(async (c, next) => { + const skipLogging = c.req.path === "/log" + if (!skipLogging) { + log.info("request", { method: c.req.method, path: c.req.path, }) - await next() - if (!skipLogging) { - timer.stop() - } + } + const timer = log.time("request", { + method: c.req.method, + path: c.req.path, }) - .use( - cors({ - origin(input) { - if (!input) return + await next() + if (!skipLogging) { + timer.stop() + } + }) + .use( + cors({ + origin(input) { + if (!input) return - if (input.startsWith("http://localhost:")) return input - if (input.startsWith("http://127.0.0.1:")) return input - if ( - input === "tauri://localhost" || - input === "http://tauri.localhost" || - input === "https://tauri.localhost" - ) - return input + if (input.startsWith("http://localhost:")) return input + if (input.startsWith("http://127.0.0.1:")) return input + if ( + input === "tauri://localhost" || + input === "http://tauri.localhost" || + input === "https://tauri.localhost" + ) + return input - // *.opencode.dev (https only, adjust if needed) - if (/^https:\/\/([a-z0-9-]+\.)*altimate-code\.dev$/.test(input)) { - return input - } - if (_corsWhitelist.includes(input)) { - return input - } + // *.altimate.ai (https only, adjust if needed) + if (/^https:\/\/([a-z0-9-]+\.)*altimate\.ai$/.test(input)) { + return input + } + if (opts?.cors?.includes(input)) { + return input + } - return - }, - }), - ) - .route("/global", GlobalRoutes()) - .put( - "/auth/:providerID", - describeRoute({ - summary: "Set auth credentials", - description: "Set authentication credentials", - operationId: "auth.set", - responses: { - 200: { - description: "Successfully set authentication credentials", - content: { - "application/json": { - schema: resolver(z.boolean()), - }, + return + }, + }), + ) + .route("/global", GlobalRoutes()) + .put( + "/auth/:providerID", + describeRoute({ + summary: "Set auth credentials", + description: "Set authentication credentials", + operationId: "auth.set", + responses: { + 200: { + description: "Successfully set authentication credentials", + content: { + "application/json": { + schema: resolver(z.boolean()), }, }, - ...errors(400), }, - }), - validator( - "param", - z.object({ - providerID: z.string(), - }), - ), - validator("json", Auth.Info), - async (c) => { - const providerID = c.req.valid("param").providerID - const info = c.req.valid("json") - await Auth.set(providerID, info) - return c.json(true) + ...errors(400), }, - ) - .delete( - "/auth/:providerID", - describeRoute({ - summary: "Remove auth credentials", - description: "Remove authentication credentials", - operationId: "auth.remove", - responses: { - 200: { - description: "Successfully removed authentication credentials", - content: { - "application/json": { - schema: resolver(z.boolean()), - }, + }), + validator( + "param", + z.object({ + providerID: ProviderID.zod, + }), + ), + validator("json", Auth.Info), + async (c) => { + const providerID = c.req.valid("param").providerID + const info = c.req.valid("json") + await Auth.set(providerID, info) + return c.json(true) + }, + ) + .delete( + "/auth/:providerID", + describeRoute({ + summary: "Remove auth credentials", + description: "Remove authentication credentials", + operationId: "auth.remove", + responses: { + 200: { + description: "Successfully removed authentication credentials", + content: { + "application/json": { + schema: resolver(z.boolean()), }, }, - ...errors(400), }, - }), - validator( - "param", - z.object({ - providerID: z.string(), - }), - ), - async (c) => { - const providerID = c.req.valid("param").providerID - await Auth.remove(providerID) - return c.json(true) + ...errors(400), }, - ) - .use(async (c, next) => { - if (c.req.path === "/log") return next() - const raw = c.req.query("directory") || c.req.header("x-altimate-code-directory") || process.cwd() - const directory = (() => { + }), + validator( + "param", + z.object({ + providerID: ProviderID.zod, + }), + ), + async (c) => { + const providerID = c.req.valid("param").providerID + await Auth.remove(providerID) + return c.json(true) + }, + ) + .use(async (c, next) => { + if (c.req.path === "/log") return next() + const rawWorkspaceID = c.req.query("workspace") || c.req.header("x-opencode-workspace") + const raw = c.req.query("directory") || c.req.header("x-opencode-directory") || process.cwd() + const directory = Filesystem.resolve( + (() => { try { return decodeURIComponent(raw) } catch { return raw } - })() - return Instance.provide({ - directory, - init: InstanceBootstrap, - async fn() { - return next() - }, - }) - }) - .get( - "/doc", - openAPIRouteHandler(app, { - documentation: { - info: { - title: "altimate", - version: "0.0.3", - description: "altimate api", + })(), + ) + + return WorkspaceContext.provide({ + workspaceID: rawWorkspaceID ? WorkspaceID.make(rawWorkspaceID) : undefined, + async fn() { + return Instance.provide({ + directory, + init: InstanceBootstrap, + async fn() { + return next() }, - openapi: "3.1.1", + }) + }, + }) + }) + .use(WorkspaceRouterMiddleware) + .get( + "/doc", + openAPIRouteHandler(app, { + documentation: { + info: { + title: "opencode", + version: "0.0.3", + description: "opencode api", }, + openapi: "3.1.1", + }, + }), + ) + .use( + validator( + "query", + z.object({ + directory: z.string().optional(), + workspace: z.string().optional(), }), - ) - .use(validator("query", z.object({ directory: z.string().optional() }))) - .route("/project", ProjectRoutes()) - .route("/pty", PtyRoutes()) - .route("/config", ConfigRoutes()) - .route("/experimental", ExperimentalRoutes()) - .route("/session", SessionRoutes()) - .route("/permission", PermissionRoutes()) - .route("/question", QuestionRoutes()) - .route("/provider", ProviderRoutes()) - .route("/", FileRoutes()) - .route("/mcp", McpRoutes()) - .route("/tui", TuiRoutes()) - .post( - "/instance/dispose", - describeRoute({ - summary: "Dispose instance", - description: "Clean up and dispose the current Altimate CLI instance, releasing all resources.", - operationId: "instance.dispose", - responses: { - 200: { - description: "Instance disposed", - content: { - "application/json": { - schema: resolver(z.boolean()), - }, + ), + ) + .route("/project", ProjectRoutes()) + .route("/pty", PtyRoutes()) + .route("/config", ConfigRoutes()) + .route("/experimental", ExperimentalRoutes()) + .route("/session", SessionRoutes()) + .route("/permission", PermissionRoutes()) + .route("/question", QuestionRoutes()) + .route("/provider", ProviderRoutes()) + .route("/", FileRoutes()) + .route("/mcp", McpRoutes()) + .route("/tui", TuiRoutes()) + .post( + "/instance/dispose", + describeRoute({ + summary: "Dispose instance", + description: "Clean up and dispose the current Altimate Code instance, releasing all resources.", + operationId: "instance.dispose", + responses: { + 200: { + description: "Instance disposed", + content: { + "application/json": { + schema: resolver(z.boolean()), }, }, }, - }), - async (c) => { - await Instance.dispose() - return c.json(true) }, - ) - .get( - "/path", - describeRoute({ - summary: "Get paths", - description: - "Retrieve the current working directory and related path information for the Altimate CLI instance.", - operationId: "path.get", - responses: { - 200: { - description: "Path", - content: { - "application/json": { - schema: resolver( - z - .object({ - home: z.string(), - state: z.string(), - config: z.string(), - worktree: z.string(), - directory: z.string(), - }) - .meta({ - ref: "Path", - }), - ), - }, + }), + async (c) => { + await Instance.dispose() + return c.json(true) + }, + ) + .get( + "/path", + describeRoute({ + summary: "Get paths", + description: "Retrieve the current working directory and related path information for the Altimate Code instance.", + operationId: "path.get", + responses: { + 200: { + description: "Path", + content: { + "application/json": { + schema: resolver( + z + .object({ + home: z.string(), + state: z.string(), + config: z.string(), + worktree: z.string(), + directory: z.string(), + }) + .meta({ + ref: "Path", + }), + ), }, }, }, - }), - async (c) => { - return c.json({ - home: Global.Path.home, - state: Global.Path.state, - config: Global.Path.config, - worktree: Instance.worktree, - directory: Instance.directory, - }) }, - ) - .get( - "/vcs", - describeRoute({ - summary: "Get VCS info", - description: - "Retrieve version control system (VCS) information for the current project, such as git branch.", - operationId: "vcs.get", - responses: { - 200: { - description: "VCS info", - content: { - "application/json": { - schema: resolver(Vcs.Info), - }, + }), + async (c) => { + return c.json({ + home: Global.Path.home, + state: Global.Path.state, + config: Global.Path.config, + worktree: Instance.worktree, + directory: Instance.directory, + }) + }, + ) + .get( + "/vcs", + describeRoute({ + summary: "Get VCS info", + description: "Retrieve version control system (VCS) information for the current project, such as git branch.", + operationId: "vcs.get", + responses: { + 200: { + description: "VCS info", + content: { + "application/json": { + schema: resolver(Vcs.Info), }, }, }, - }), - async (c) => { - const branch = await Vcs.branch() - return c.json({ - branch, - }) }, - ) - .get( - "/command", - describeRoute({ - summary: "List commands", - description: "Get a list of all available commands in the Altimate CLI system.", - operationId: "command.list", - responses: { - 200: { - description: "List of commands", - content: { - "application/json": { - schema: resolver(Command.Info.array()), - }, + }), + async (c) => { + const branch = await Vcs.branch() + return c.json({ + branch, + }) + }, + ) + .get( + "/command", + describeRoute({ + summary: "List commands", + description: "Get a list of all available commands in the Altimate Code system.", + operationId: "command.list", + responses: { + 200: { + description: "List of commands", + content: { + "application/json": { + schema: resolver(Command.Info.array()), }, }, }, - }), - async (c) => { - const commands = await Command.list() - return c.json(commands) }, - ) - .post( - "/log", - describeRoute({ - summary: "Write log", - description: "Write a log entry to the server logs with specified level and metadata.", - operationId: "app.log", - responses: { - 200: { - description: "Log entry written successfully", - content: { - "application/json": { - schema: resolver(z.boolean()), - }, + }), + async (c) => { + const commands = await Command.list() + return c.json(commands) + }, + ) + .post( + "/log", + describeRoute({ + summary: "Write log", + description: "Write a log entry to the server logs with specified level and metadata.", + operationId: "app.log", + responses: { + 200: { + description: "Log entry written successfully", + content: { + "application/json": { + schema: resolver(z.boolean()), }, }, - ...errors(400), }, + ...errors(400), + }, + }), + validator( + "json", + z.object({ + service: z.string().meta({ description: "Service name for the log entry" }), + level: z.enum(["debug", "info", "error", "warn"]).meta({ description: "Log level" }), + message: z.string().meta({ description: "Log message" }), + extra: z + .record(z.string(), z.any()) + .optional() + .meta({ description: "Additional metadata for the log entry" }), }), - validator( - "json", - z.object({ - service: z.string().meta({ description: "Service name for the log entry" }), - level: z.enum(["debug", "info", "error", "warn"]).meta({ description: "Log level" }), - message: z.string().meta({ description: "Log message" }), - extra: z - .record(z.string(), z.any()) - .optional() - .meta({ description: "Additional metadata for the log entry" }), - }), - ), - async (c) => { - const { service, level, message, extra } = c.req.valid("json") - const logger = Log.create({ service }) + ), + async (c) => { + const { service, level, message, extra } = c.req.valid("json") + const logger = Log.create({ service }) - switch (level) { - case "debug": - logger.debug(message, extra) - break - case "info": - logger.info(message, extra) - break - case "error": - logger.error(message, extra) - break - case "warn": - logger.warn(message, extra) - break - } + switch (level) { + case "debug": + logger.debug(message, extra) + break + case "info": + logger.info(message, extra) + break + case "error": + logger.error(message, extra) + break + case "warn": + logger.warn(message, extra) + break + } - return c.json(true) - }, - ) - .get( - "/agent", - describeRoute({ - summary: "List agents", - description: "Get a list of all available AI agents in the Altimate CLI system.", - operationId: "app.agents", - responses: { - 200: { - description: "List of agents", - content: { - "application/json": { - schema: resolver(Agent.Info.array()), - }, + return c.json(true) + }, + ) + .get( + "/agent", + describeRoute({ + summary: "List agents", + description: "Get a list of all available AI agents in the Altimate Code system.", + operationId: "app.agents", + responses: { + 200: { + description: "List of agents", + content: { + "application/json": { + schema: resolver(Agent.Info.array()), }, }, }, - }), - async (c) => { - const modes = await Agent.list() - return c.json(modes) }, - ) - .get( - "/skill", - describeRoute({ - summary: "List skills", - description: "Get a list of all available skills in the Altimate CLI system.", - operationId: "app.skills", - responses: { - 200: { - description: "List of skills", - content: { - "application/json": { - schema: resolver(Skill.Info.array()), - }, + }), + async (c) => { + const modes = await Agent.list() + return c.json(modes) + }, + ) + .get( + "/skill", + describeRoute({ + summary: "List skills", + description: "Get a list of all available skills in the Altimate Code system.", + operationId: "app.skills", + responses: { + 200: { + description: "List of skills", + content: { + "application/json": { + schema: resolver(Skill.Info.array()), }, }, }, - }), - async (c) => { - const skills = await Skill.all() - return c.json(skills) }, - ) - .get( - "/lsp", - describeRoute({ - summary: "Get LSP status", - description: "Get LSP server status", - operationId: "lsp.status", - responses: { - 200: { - description: "LSP server status", - content: { - "application/json": { - schema: resolver(LSP.Status.array()), - }, + }), + async (c) => { + const skills = await Skill.all() + return c.json(skills) + }, + ) + .get( + "/lsp", + describeRoute({ + summary: "Get LSP status", + description: "Get LSP server status", + operationId: "lsp.status", + responses: { + 200: { + description: "LSP server status", + content: { + "application/json": { + schema: resolver(LSP.Status.array()), }, }, }, - }), - async (c) => { - return c.json(await LSP.status()) }, - ) - .get( - "/formatter", - describeRoute({ - summary: "Get formatter status", - description: "Get formatter status", - operationId: "formatter.status", - responses: { - 200: { - description: "Formatter status", - content: { - "application/json": { - schema: resolver(Format.Status.array()), - }, + }), + async (c) => { + return c.json(await LSP.status()) + }, + ) + .get( + "/formatter", + describeRoute({ + summary: "Get formatter status", + description: "Get formatter status", + operationId: "formatter.status", + responses: { + 200: { + description: "Formatter status", + content: { + "application/json": { + schema: resolver(Format.Status.array()), }, }, }, - }), - async (c) => { - return c.json(await Format.status()) }, - ) - .get( - "/event", - describeRoute({ - summary: "Subscribe to events", - description: "Get events", - operationId: "event.subscribe", - responses: { - 200: { - description: "Event stream", - content: { - "text/event-stream": { - schema: resolver(BusEvent.payloads()), - }, + }), + async (c) => { + return c.json(await Format.status()) + }, + ) + .get( + "/event", + describeRoute({ + summary: "Subscribe to events", + description: "Get events", + operationId: "event.subscribe", + responses: { + 200: { + description: "Event stream", + content: { + "text/event-stream": { + schema: resolver(BusEvent.payloads()), }, }, }, - }), - async (c) => { - log.info("event connected") - c.header("X-Accel-Buffering", "no") - c.header("X-Content-Type-Options", "nosniff") - return streamSSE(c, async (stream) => { + }, + }), + async (c) => { + log.info("event connected") + c.header("X-Accel-Buffering", "no") + c.header("X-Content-Type-Options", "nosniff") + return streamSSE(c, async (stream) => { + stream.writeSSE({ + data: JSON.stringify({ + type: "server.connected", + properties: {}, + }), + }) + const unsub = Bus.subscribeAll(async (event) => { + await stream.writeSSE({ + data: JSON.stringify(event), + }) + if (event.type === Bus.InstanceDisposed.type) { + stream.close() + } + }) + + // Send heartbeat every 10s to prevent stalled proxy streams. + const heartbeat = setInterval(() => { stream.writeSSE({ data: JSON.stringify({ - type: "server.connected", + type: "server.heartbeat", properties: {}, }), }) - const unsub = Bus.subscribeAll(async (event) => { - await stream.writeSSE({ - data: JSON.stringify(event), - }) - if (event.type === Bus.InstanceDisposed.type) { - stream.close() - } - }) - - // Send heartbeat every 10s to prevent stalled proxy streams. - const heartbeat = setInterval(() => { - stream.writeSSE({ - data: JSON.stringify({ - type: "server.heartbeat", - properties: {}, - }), - }) - }, 10_000) + }, 10_000) - await new Promise<void>((resolve) => { - stream.onAbort(() => { - clearInterval(heartbeat) - unsub() - resolve() - log.info("event disconnected") - }) + await new Promise<void>((resolve) => { + stream.onAbort(() => { + clearInterval(heartbeat) + unsub() + resolve() + log.info("event disconnected") }) }) + }) + }, + ) + .all("/*", async (c) => { + const path = c.req.path + + const response = await proxy(`https://app.altimate.ai${path}`, { + ...c.req, + headers: { + ...c.req.raw.headers, + host: "app.altimate.ai", }, + }) + response.headers.set( + "Content-Security-Policy", + "default-src 'self'; script-src 'self' 'wasm-unsafe-eval'; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self' data:; media-src 'self' data:; connect-src 'self' data:", ) - .all("/*", async (c) => { - const path = c.req.path - - const response = await proxy(`https://app.opencode.dev${path}`, { - ...c.req, - headers: { - ...c.req.raw.headers, - host: "app.opencode.dev", - }, - }) - response.headers.set( - "Content-Security-Policy", - "default-src 'self'; script-src 'self' 'wasm-unsafe-eval'; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self' data:; media-src 'self' data:; connect-src 'self' data:", - ) - return response - }) as unknown as Hono, - ) + return response + }) + } export async function openapi() { // Cast to break excessive type recursion from long route chains - const result = await generateSpecs(App() as Hono, { + const result = await generateSpecs(Default(), { documentation: { info: { - title: "altimate", + title: "opencode", version: "1.0.0", - description: "altimate api", + description: "opencode api", }, openapi: "3.1.1", }, @@ -573,6 +587,9 @@ export namespace Server { return result } + /** @deprecated do not use this dumb shit */ + export let url: URL + export function listen(opts: { port: number hostname: string @@ -580,12 +597,12 @@ export namespace Server { mdnsDomain?: string cors?: string[] }) { - _corsWhitelist = opts.cors ?? [] - + url = new URL(`http://${opts.hostname}:${opts.port}`) + const app = createApp(opts) const args = { hostname: opts.hostname, idleTimeout: 0, - fetch: App().fetch, + fetch: app.fetch, websocket: websocket, } as const const tryServe = (port: number) => { @@ -598,8 +615,6 @@ export namespace Server { const server = opts.port === 0 ? (tryServe(4096) ?? tryServe(0)) : tryServe(opts.port) if (!server) throw new Error(`Failed to start server on port ${opts.port}`) - _url = server.url - const shouldPublishMDNS = opts.mdns && server.port && diff --git a/packages/opencode/src/session/compaction.ts b/packages/opencode/src/session/compaction.ts index 2ebbfb451c..06730133a2 100644 --- a/packages/opencode/src/session/compaction.ts +++ b/packages/opencode/src/session/compaction.ts @@ -1,7 +1,7 @@ import { BusEvent } from "@/bus/bus-event" import { Bus } from "@/bus" import { Session } from "." -import { Identifier } from "../id/id" +import { SessionID, MessageID, PartID } from "./schema" import { Instance } from "../project/instance" import { Provider } from "../provider/provider" import { MessageV2 } from "./message-v2" @@ -14,11 +14,13 @@ import { Agent } from "@/agent/agent" import { Plugin } from "@/plugin" import { Config } from "@/config/config" import { ProviderTransform } from "@/provider/transform" -import { Telemetry } from "@/telemetry" +import { Telemetry } from "@/telemetry" // altimate_change — telemetry for compaction events +import { ModelID, ProviderID } from "@/provider/schema" export namespace SessionCompaction { const log = Log.create({ service: "session.compaction" }) + // altimate_change start — observation masks for pruned tool outputs function formatBytes(bytes: number): string { if (bytes < 1024) return `${bytes} B` if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB` @@ -36,7 +38,6 @@ export namespace SessionCompaction { return "[unserializable]" } if (str.length <= maxLen) return str - // Avoid slicing mid-surrogate pair by finding a safe boundary let end = maxLen const code = str.charCodeAt(end - 1) if (code >= 0xd800 && code <= 0xdbff) end-- @@ -60,18 +61,21 @@ export namespace SessionCompaction { const fingerprint = firstLine ? ` — "${firstLine}"` : "" return `[Tool output cleared — ${part.tool}(${args}) returned ${lines} lines, ${formatBytes(bytes)}${fingerprint}]` } + // altimate_change end export const Event = { Compacted: BusEvent.define( "session.compacted", z.object({ - sessionID: z.string(), + sessionID: SessionID.zod, }), ), } const COMPACTION_BUFFER = 20_000 + // altimate_change start — improved isOverflow formula with safety guard and unified headroom + // See PR #35 — fixes upstream bugs with limit.input models and small-context models export async function isOverflow(input: { tokens: MessageV2.Assistant["tokens"]; model: Provider.Model }) { const config = await Config.get() if (config.compaction?.auto === false) return false @@ -89,6 +93,7 @@ export namespace SessionCompaction { if (base <= headroom) return false return count >= base - headroom } + // altimate_change end export const PRUNE_MINIMUM = 20_000 export const PRUNE_PROTECT = 40_000 @@ -98,7 +103,7 @@ export namespace SessionCompaction { // goes backwards through parts until there are 40_000 tokens worth of tool // calls. then erases output of previous tool calls. idea is to throw away old // tool calls that are no longer relevant. - export async function prune(input: { sessionID: string }) { + export async function prune(input: { sessionID: SessionID }) { const config = await Config.get() if (config.compaction?.prune === false) return log.info("pruning") @@ -133,16 +138,19 @@ export namespace SessionCompaction { if (pruned > PRUNE_MINIMUM) { for (const part of toPrune) { if (part.state.status === "completed") { + // altimate_change start — observation masks for pruned tool outputs const mask = createObservationMask(part) part.state.time.compacted = Date.now() part.state.metadata = { ...part.state.metadata, observation_mask: mask, } + // altimate_change end await Session.updatePart(part) } } log.info("pruned", { count: toPrune.length }) + // altimate_change start — telemetry for pruning Telemetry.track({ type: "tool_outputs_pruned", timestamp: Date.now(), @@ -150,18 +158,23 @@ export namespace SessionCompaction { count: toPrune.length, tokens_pruned: pruned, }) + // altimate_change end } } + // altimate_change start — compaction attempt tracking for loop protection const compactionAttempts = new Map<string, number>() + // altimate_change end export async function process(input: { - parentID: string + parentID: MessageID messages: MessageV2.WithParts[] - sessionID: string + sessionID: SessionID abort: AbortSignal auto: boolean + overflow?: boolean }) { + // altimate_change start — telemetry, attempt tracking, and circuit breaker const attempt = (compactionAttempts.get(input.sessionID) ?? 0) + 1 compactionAttempts.set(input.sessionID, attempt) input.abort.addEventListener("abort", () => { @@ -174,13 +187,39 @@ export namespace SessionCompaction { trigger: input.auto ? "overflow_detection" : "error_recovery", attempt, }) + if (attempt > 3) { + log.warn("compaction circuit breaker", { sessionID: input.sessionID, attempt }) + return + } + // altimate_change end const userMessage = input.messages.findLast((m) => m.info.id === input.parentID)!.info as MessageV2.User + + let messages = input.messages + let replay: MessageV2.WithParts | undefined + if (input.overflow) { + const idx = input.messages.findIndex((m) => m.info.id === input.parentID) + for (let i = idx - 1; i >= 0; i--) { + const msg = input.messages[i] + if (msg.info.role === "user" && !msg.parts.some((p) => p.type === "compaction")) { + replay = msg + messages = input.messages.slice(0, i) + break + } + } + const hasContent = + replay && messages.some((m) => m.info.role === "user" && !m.parts.some((p) => p.type === "compaction")) + if (!hasContent) { + replay = undefined + messages = input.messages + } + } + const agent = await Agent.get("compaction") const model = agent.model ? await Provider.getModel(agent.model.providerID, agent.model.modelID) : await Provider.getModel(userMessage.model.providerID, userMessage.model.modelID) const msg = (await Session.updateMessage({ - id: Identifier.ascending("message"), + id: MessageID.ascending(), role: "assistant", parentID: input.parentID, sessionID: input.sessionID, @@ -232,7 +271,7 @@ When constructing the summary, try to stick to this template: - [What important instructions did the user give you that are relevant] - [If there is a plan or spec, include information about it so next agent can continue using it] -## Data Context +## Data Context (altimate_change start — data engineering context for compaction summaries) - [What warehouse(s) or database(s) are we connected to?] - [What schemas, tables, or columns were discovered or are relevant?] @@ -240,6 +279,7 @@ When constructing the summary, try to stick to this template: - [Any lineage findings (upstream/downstream dependencies)?] - [Any query patterns, anti-patterns, or optimization opportunities found?] - [Skip this section entirely if the task is not data-engineering related] +(altimate_change end) ## Discoveries @@ -263,7 +303,7 @@ When constructing the summary, try to stick to this template: tools: {}, system: [], messages: [ - ...MessageV2.toModelMessages(input.messages, model), + ...MessageV2.toModelMessages(messages, model, { stripMedia: true }), { role: "user", content: [ @@ -277,52 +317,96 @@ When constructing the summary, try to stick to this template: model, }) + if (result === "compact") { + processor.message.error = new MessageV2.ContextOverflowError({ + message: replay + ? "Conversation history too large to compact - exceeds model context limit" + : "Session too large to compact - context exceeds model limit even after stripping media", + }).toObject() + processor.message.finish = "error" + await Session.updateMessage(processor.message) + return "stop" + } + if (result === "continue" && input.auto) { - const continueMsg = await Session.updateMessage({ - id: Identifier.ascending("message"), - role: "user", - sessionID: input.sessionID, - time: { - created: Date.now(), - }, - agent: userMessage.agent, - model: userMessage.model, - }) - await Session.updatePart({ - id: Identifier.ascending("part"), - messageID: continueMsg.id, - sessionID: input.sessionID, - type: "text", - synthetic: true, - text: "Continue if you have next steps, or stop and ask for clarification if you are unsure how to proceed.", - time: { - start: Date.now(), - end: Date.now(), - }, - }) + if (replay) { + const original = replay.info as MessageV2.User + const replayMsg = await Session.updateMessage({ + id: MessageID.ascending(), + role: "user", + sessionID: input.sessionID, + time: { created: Date.now() }, + agent: original.agent, + model: original.model, + format: original.format, + tools: original.tools, + system: original.system, + variant: original.variant, + }) + for (const part of replay.parts) { + if (part.type === "compaction") continue + const replayPart = + part.type === "file" && MessageV2.isMedia(part.mime) + ? { type: "text" as const, text: `[Attached ${part.mime}: ${part.filename ?? "file"}]` } + : part + await Session.updatePart({ + ...replayPart, + id: PartID.ascending(), + messageID: replayMsg.id, + sessionID: input.sessionID, + }) + } + } else { + const continueMsg = await Session.updateMessage({ + id: MessageID.ascending(), + role: "user", + sessionID: input.sessionID, + time: { created: Date.now() }, + agent: userMessage.agent, + model: userMessage.model, + }) + const text = + (input.overflow + ? "The previous request exceeded the provider's size limit due to large media attachments. The conversation was compacted and media files were removed from context. If the user was asking about attached images or files, explain that the attachments were too large to process and suggest they try again with smaller or fewer files.\n\n" + : "") + + "Continue if you have next steps, or stop and ask for clarification if you are unsure how to proceed." + await Session.updatePart({ + id: PartID.ascending(), + messageID: continueMsg.id, + sessionID: input.sessionID, + type: "text", + synthetic: true, + text, + time: { + start: Date.now(), + end: Date.now(), + }, + }) + } } if (processor.message.error) { - compactionAttempts.delete(input.sessionID) + compactionAttempts.delete(input.sessionID) // altimate_change — cleanup on error return "stop" } Bus.publish(Event.Compacted, { sessionID: input.sessionID }) - compactionAttempts.delete(input.sessionID) + compactionAttempts.delete(input.sessionID) // altimate_change — cleanup on success return "continue" } export const create = fn( z.object({ - sessionID: Identifier.schema("session"), + sessionID: SessionID.zod, agent: z.string(), model: z.object({ - providerID: z.string(), - modelID: z.string(), + providerID: ProviderID.zod, + modelID: ModelID.zod, }), auto: z.boolean(), + overflow: z.boolean().optional(), }), async (input) => { const msg = await Session.updateMessage({ - id: Identifier.ascending("message"), + id: MessageID.ascending(), role: "user", model: input.model, sessionID: input.sessionID, @@ -332,11 +416,12 @@ When constructing the summary, try to stick to this template: }, }) await Session.updatePart({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: msg.id, sessionID: msg.sessionID, type: "compaction", auto: input.auto, + overflow: input.overflow, }) }, ) diff --git a/packages/opencode/src/session/index.ts b/packages/opencode/src/session/index.ts index 14bf91ee8a..35103a4e6b 100644 --- a/packages/opencode/src/session/index.ts +++ b/packages/opencode/src/session/index.ts @@ -7,7 +7,6 @@ import z from "zod" import { type ProviderMetadata } from "ai" import { Config } from "../config/config" import { Flag } from "../flag/flag" -import { Identifier } from "../id/id" import { Installation } from "../installation" import { Database, NotFoundError, eq, and, or, gte, isNull, desc, like, inArray, lt } from "../storage/db" @@ -22,13 +21,17 @@ import { SessionPrompt } from "./prompt" import { fn } from "@/util/fn" import { Command } from "../command" import { Snapshot } from "@/snapshot" +import { WorkspaceContext } from "../control-plane/workspace-context" +import { ProjectID } from "../project/schema" +import { WorkspaceID } from "../control-plane/schema" +import { SessionID, MessageID, PartID } from "./schema" import type { Provider } from "@/provider/provider" +import { ModelID, ProviderID } from "@/provider/schema" import { PermissionNext } from "@/permission/next" import { Global } from "@/global" import type { LanguageModelV2Usage } from "@ai-sdk/provider" import { iife } from "@/util/iife" -import { Telemetry } from "@/telemetry" export namespace Session { const log = Log.create({ service: "session" }) @@ -64,6 +67,7 @@ export namespace Session { id: row.id, slug: row.slug, projectID: row.project_id, + workspaceID: row.workspace_id ?? undefined, directory: row.directory, parentID: row.parent_id ?? undefined, title: row.title, @@ -85,6 +89,7 @@ export namespace Session { return { id: info.id, project_id: info.projectID, + workspace_id: info.workspaceID, parent_id: info.parentID, slug: info.slug, directory: info.directory, @@ -116,11 +121,12 @@ export namespace Session { export const Info = z .object({ - id: Identifier.schema("session"), + id: SessionID.zod, slug: z.string(), - projectID: z.string(), + projectID: ProjectID.zod, + workspaceID: WorkspaceID.zod.optional(), directory: z.string(), - parentID: Identifier.schema("session").optional(), + parentID: SessionID.zod.optional(), summary: z .object({ additions: z.number(), @@ -145,8 +151,8 @@ export namespace Session { permission: PermissionNext.Ruleset.optional(), revert: z .object({ - messageID: z.string(), - partID: z.string().optional(), + messageID: MessageID.zod, + partID: PartID.zod.optional(), snapshot: z.string().optional(), diff: z.string().optional(), }) @@ -159,7 +165,7 @@ export namespace Session { export const ProjectInfo = z .object({ - id: z.string(), + id: ProjectID.zod, name: z.string().optional(), worktree: z.string(), }) @@ -197,14 +203,14 @@ export namespace Session { Diff: BusEvent.define( "session.diff", z.object({ - sessionID: z.string(), + sessionID: SessionID.zod, diff: Snapshot.FileDiff.array(), }), ), Error: BusEvent.define( "session.error", z.object({ - sessionID: z.string().optional(), + sessionID: SessionID.zod.optional(), error: MessageV2.Assistant.shape.error, }), ), @@ -213,9 +219,10 @@ export namespace Session { export const create = fn( z .object({ - parentID: Identifier.schema("session").optional(), + parentID: SessionID.zod.optional(), title: z.string().optional(), permission: Info.shape.permission, + workspaceID: WorkspaceID.zod.optional(), }) .optional(), async (input) => { @@ -224,14 +231,15 @@ export namespace Session { directory: Instance.directory, title: input?.title, permission: input?.permission, + workspaceID: input?.workspaceID, }) }, ) export const fork = fn( z.object({ - sessionID: Identifier.schema("session"), - messageID: Identifier.schema("message").optional(), + sessionID: SessionID.zod, + messageID: MessageID.zod.optional(), }), async (input) => { const original = await get(input.sessionID) @@ -239,16 +247,15 @@ export namespace Session { const title = getForkedTitle(original.title) const session = await createNext({ directory: Instance.directory, + workspaceID: original.workspaceID, title, }) const msgs = await messages({ sessionID: input.sessionID }) - const idMap = new Map<string, string>() + const idMap = new Map<string, MessageID>() - let messageCount = 0 for (const msg of msgs) { if (input.messageID && msg.info.id >= input.messageID) break - messageCount++ - const newID = Identifier.ascending("message") + const newID = MessageID.ascending() idMap.set(msg.info.id, newID) const parentID = msg.info.role === "assistant" && msg.info.parentID ? idMap.get(msg.info.parentID) : undefined @@ -262,24 +269,17 @@ export namespace Session { for (const part of msg.parts) { await updatePart({ ...part, - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: cloned.id, sessionID: session.id, }) } } - Telemetry.track({ - type: "session_forked", - timestamp: Date.now(), - session_id: session.id, - parent_session_id: input.sessionID, - message_count: messageCount, - }) return session }, ) - export const touch = fn(Identifier.schema("session"), async (sessionID) => { + export const touch = fn(SessionID.zod, async (sessionID) => { const now = Date.now() Database.use((db) => { const row = db @@ -295,18 +295,20 @@ export namespace Session { }) export async function createNext(input: { - id?: string + id?: SessionID title?: string - parentID?: string + parentID?: SessionID + workspaceID?: WorkspaceID directory: string permission?: PermissionNext.Ruleset }) { const result: Info = { - id: Identifier.descending("session", input.id), + id: SessionID.descending(input.id), slug: Slug.create(), version: Installation.VERSION, projectID: Instance.project.id, directory: input.directory, + workspaceID: input.workspaceID, parentID: input.parentID, title: input.title ?? createDefaultTitle(!!input.parentID), permission: input.permission, @@ -342,13 +344,13 @@ export namespace Session { return path.join(base, [input.time.created, input.slug].join("-") + ".md") } - export const get = fn(Identifier.schema("session"), async (id) => { + export const get = fn(SessionID.zod, async (id) => { const row = Database.use((db) => db.select().from(SessionTable).where(eq(SessionTable.id, id)).get()) if (!row) throw new NotFoundError({ message: `Session not found: ${id}` }) return fromRow(row) }) - export const share = fn(Identifier.schema("session"), async (id) => { + export const share = fn(SessionID.zod, async (id) => { const cfg = await Config.get() if (cfg.share === "disabled") { throw new Error("Sharing is disabled in configuration") @@ -364,7 +366,7 @@ export namespace Session { return share }) - export const unshare = fn(Identifier.schema("session"), async (id) => { + export const unshare = fn(SessionID.zod, async (id) => { // Use ShareNext to remove the share (same as share function uses ShareNext to create) const { ShareNext } = await import("@/share/share-next") await ShareNext.remove(id) @@ -378,7 +380,7 @@ export namespace Session { export const setTitle = fn( z.object({ - sessionID: Identifier.schema("session"), + sessionID: SessionID.zod, title: z.string(), }), async (input) => { @@ -399,7 +401,7 @@ export namespace Session { export const setArchived = fn( z.object({ - sessionID: Identifier.schema("session"), + sessionID: SessionID.zod, time: z.number().optional(), }), async (input) => { @@ -420,7 +422,7 @@ export namespace Session { export const setPermission = fn( z.object({ - sessionID: Identifier.schema("session"), + sessionID: SessionID.zod, permission: PermissionNext.Ruleset, }), async (input) => { @@ -441,7 +443,7 @@ export namespace Session { export const setRevert = fn( z.object({ - sessionID: Identifier.schema("session"), + sessionID: SessionID.zod, revert: Info.shape.revert, summary: Info.shape.summary, }), @@ -467,7 +469,7 @@ export namespace Session { }, ) - export const clearRevert = fn(Identifier.schema("session"), async (sessionID) => { + export const clearRevert = fn(SessionID.zod, async (sessionID) => { return Database.use((db) => { const row = db .update(SessionTable) @@ -487,7 +489,7 @@ export namespace Session { export const setSummary = fn( z.object({ - sessionID: Identifier.schema("session"), + sessionID: SessionID.zod, summary: Info.shape.summary, }), async (input) => { @@ -511,7 +513,7 @@ export namespace Session { }, ) - export const diff = fn(Identifier.schema("session"), async (sessionID) => { + export const diff = fn(SessionID.zod, async (sessionID) => { try { return await Storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID]) } catch { @@ -521,7 +523,7 @@ export namespace Session { export const messages = fn( z.object({ - sessionID: Identifier.schema("session"), + sessionID: SessionID.zod, limit: z.number().optional(), }), async (input) => { @@ -537,6 +539,7 @@ export namespace Session { export function* list(input?: { directory?: string + workspaceID?: WorkspaceID roots?: boolean start?: number search?: string @@ -545,6 +548,9 @@ export namespace Session { const project = Instance.project const conditions = [eq(SessionTable.project_id, project.id)] + if (WorkspaceContext.workspaceID) { + conditions.push(eq(SessionTable.workspace_id, WorkspaceContext.workspaceID)) + } if (input?.directory) { conditions.push(eq(SessionTable.directory, input.directory)) } @@ -643,7 +649,7 @@ export namespace Session { } } - export const children = fn(Identifier.schema("session"), async (parentID) => { + export const children = fn(SessionID.zod, async (parentID) => { const project = Instance.project const rows = Database.use((db) => db @@ -655,7 +661,7 @@ export namespace Session { return rows.map(fromRow) }) - export const remove = fn(Identifier.schema("session"), async (sessionID) => { + export const remove = fn(SessionID.zod, async (sessionID) => { const project = Instance.project try { const session = await get(sessionID) @@ -701,13 +707,15 @@ export namespace Session { export const removeMessage = fn( z.object({ - sessionID: Identifier.schema("session"), - messageID: Identifier.schema("message"), + sessionID: SessionID.zod, + messageID: MessageID.zod, }), async (input) => { // CASCADE delete handles parts automatically Database.use((db) => { - db.delete(MessageTable).where(eq(MessageTable.id, input.messageID)).run() + db.delete(MessageTable) + .where(and(eq(MessageTable.id, input.messageID), eq(MessageTable.session_id, input.sessionID))) + .run() Database.effect(() => Bus.publish(MessageV2.Event.Removed, { sessionID: input.sessionID, @@ -721,13 +729,15 @@ export namespace Session { export const removePart = fn( z.object({ - sessionID: Identifier.schema("session"), - messageID: Identifier.schema("message"), - partID: Identifier.schema("part"), + sessionID: SessionID.zod, + messageID: MessageID.zod, + partID: PartID.zod, }), async (input) => { Database.use((db) => { - db.delete(PartTable).where(eq(PartTable.id, input.partID)).run() + db.delete(PartTable) + .where(and(eq(PartTable.id, input.partID), eq(PartTable.session_id, input.sessionID))) + .run() Database.effect(() => Bus.publish(MessageV2.Event.PartRemoved, { sessionID: input.sessionID, @@ -758,7 +768,7 @@ export namespace Session { .run() Database.effect(() => Bus.publish(MessageV2.Event.PartUpdated, { - part, + part: structuredClone(part), }), ) }) @@ -767,9 +777,9 @@ export namespace Session { export const updatePartDelta = fn( z.object({ - sessionID: z.string(), - messageID: z.string(), - partID: z.string(), + sessionID: SessionID.zod, + messageID: MessageID.zod, + partID: PartID.zod, field: z.string(), delta: z.string(), }), @@ -806,7 +816,7 @@ export namespace Session { // OpenRouter provides inputTokens as the total count of input tokens (including cached). // AFAIK other providers (OpenRouter/OpenAI/Gemini etc.) do it the same way e.g. vercel/ai#8794 (comment) // Anthropic does it differently though - inputTokens doesn't include cached tokens. - // It looks like Altimate CLI's cost calculation assumes all providers return inputTokens the same way Anthropic does (I'm guessing getUsage logic was originally implemented with anthropic), so it's causing incorrect cost calculation for OpenRouter and others. + // It looks like Altimate Code's cost calculation assumes all providers return inputTokens the same way Anthropic does (I'm guessing getUsage logic was originally implemented with anthropic), so it's causing incorrect cost calculation for OpenRouter and others. const excludesCachedTokens = !!(input.metadata?.["anthropic"] || input.metadata?.["bedrock"]) const adjustedInputTokens = safe( excludesCachedTokens ? inputTokens : inputTokens - cacheReadInputTokens - cacheWriteInputTokens, @@ -865,10 +875,10 @@ export namespace Session { export const initialize = fn( z.object({ - sessionID: Identifier.schema("session"), - modelID: z.string(), - providerID: z.string(), - messageID: Identifier.schema("message"), + sessionID: SessionID.zod, + modelID: ModelID.zod, + providerID: ProviderID.zod, + messageID: MessageID.zod, }), async (input) => { await SessionPrompt.command({ diff --git a/packages/opencode/src/session/message-v2.ts b/packages/opencode/src/session/message-v2.ts index cc5e0d70e6..7d7d4bcc7b 100644 --- a/packages/opencode/src/session/message-v2.ts +++ b/packages/opencode/src/session/message-v2.ts @@ -1,13 +1,13 @@ import { BusEvent } from "@/bus/bus-event" +import { SessionID, MessageID, PartID } from "./schema" import z from "zod" import { NamedError } from "@opencode-ai/util/error" import { APICallError, convertToModelMessages, LoadAPIKeyError, type ModelMessage, type UIMessage } from "ai" -import { Identifier } from "../id/id" import { LSP } from "../lsp" import { Snapshot } from "@/snapshot" import { fn } from "@/util/fn" -import { Database, eq, desc, inArray } from "@/storage/db" -import { MessageTable, PartTable } from "./session.sql" +import { Database, NotFoundError, and, desc, eq, inArray, lt, or } from "@/storage/db" +import { MessageTable, PartTable, SessionTable } from "./session.sql" import { ProviderTransform } from "@/provider/transform" import { STATUS_CODES } from "http" import { Storage } from "@/storage/storage" @@ -15,6 +15,7 @@ import { ProviderError } from "@/provider/error" import { iife } from "@/util/iife" import { type SystemError } from "bun" import type { Provider } from "@/provider/provider" +import { ModelID, ProviderID } from "@/provider/schema" export namespace MessageV2 { export function isMedia(mime: string) { @@ -78,9 +79,9 @@ export namespace MessageV2 { export type OutputFormat = z.infer<typeof Format> const PartBase = z.object({ - id: z.string(), - sessionID: z.string(), - messageID: z.string(), + id: PartID.zod, + sessionID: SessionID.zod, + messageID: MessageID.zod, }) export const SnapshotPart = PartBase.extend({ @@ -213,8 +214,8 @@ export namespace MessageV2 { agent: z.string(), model: z .object({ - providerID: z.string(), - modelID: z.string(), + providerID: ProviderID.zod, + modelID: ModelID.zod, }) .optional(), command: z.string().optional(), @@ -343,8 +344,8 @@ export namespace MessageV2 { export type ToolPart = z.infer<typeof ToolPart> const Base = z.object({ - id: z.string(), - sessionID: z.string(), + id: MessageID.zod, + sessionID: SessionID.zod, }) export const User = Base.extend({ @@ -362,8 +363,8 @@ export namespace MessageV2 { .optional(), agent: z.string(), model: z.object({ - providerID: z.string(), - modelID: z.string(), + providerID: ProviderID.zod, + modelID: ModelID.zod, }), system: z.string().optional(), tools: z.record(z.string(), z.boolean()).optional(), @@ -410,9 +411,9 @@ export namespace MessageV2 { APIError.Schema, ]) .optional(), - parentID: z.string(), - modelID: z.string(), - providerID: z.string(), + parentID: MessageID.zod, + modelID: ModelID.zod, + providerID: ProviderID.zod, /** * @deprecated */ @@ -457,8 +458,8 @@ export namespace MessageV2 { Removed: BusEvent.define( "message.removed", z.object({ - sessionID: z.string(), - messageID: z.string(), + sessionID: SessionID.zod, + messageID: MessageID.zod, }), ), PartUpdated: BusEvent.define( @@ -470,9 +471,9 @@ export namespace MessageV2 { PartDelta: BusEvent.define( "message.part.delta", z.object({ - sessionID: z.string(), - messageID: z.string(), - partID: z.string(), + sessionID: SessionID.zod, + messageID: MessageID.zod, + partID: PartID.zod, field: z.string(), delta: z.string(), }), @@ -480,9 +481,9 @@ export namespace MessageV2 { PartRemoved: BusEvent.define( "message.part.removed", z.object({ - sessionID: z.string(), - messageID: z.string(), - partID: z.string(), + sessionID: SessionID.zod, + messageID: MessageID.zod, + partID: PartID.zod, }), ), } @@ -493,6 +494,68 @@ export namespace MessageV2 { }) export type WithParts = z.infer<typeof WithParts> + const Cursor = z.object({ + id: MessageID.zod, + time: z.number(), + }) + type Cursor = z.infer<typeof Cursor> + + export const cursor = { + encode(input: Cursor) { + return Buffer.from(JSON.stringify(input)).toString("base64url") + }, + decode(input: string) { + return Cursor.parse(JSON.parse(Buffer.from(input, "base64url").toString("utf8"))) + }, + } + + const info = (row: typeof MessageTable.$inferSelect) => + ({ + ...row.data, + id: row.id, + sessionID: row.session_id, + }) as MessageV2.Info + + const part = (row: typeof PartTable.$inferSelect) => + ({ + ...row.data, + id: row.id, + sessionID: row.session_id, + messageID: row.message_id, + }) as MessageV2.Part + + const older = (row: Cursor) => + or( + lt(MessageTable.time_created, row.time), + and(eq(MessageTable.time_created, row.time), lt(MessageTable.id, row.id)), + ) + + async function hydrate(rows: (typeof MessageTable.$inferSelect)[]) { + const ids = rows.map((row) => row.id) + const partByMessage = new Map<string, MessageV2.Part[]>() + if (ids.length > 0) { + const partRows = Database.use((db) => + db + .select() + .from(PartTable) + .where(inArray(PartTable.message_id, ids)) + .orderBy(PartTable.message_id, PartTable.id) + .all(), + ) + for (const row of partRows) { + const next = part(row) + const list = partByMessage.get(row.message_id) + if (list) list.push(next) + else partByMessage.set(row.message_id, [next]) + } + } + + return rows.map((row) => ({ + info: info(row), + parts: partByMessage.get(row.id) ?? [], + })) + } + export function toModelMessages( input: WithParts[], model: Provider.Model, @@ -698,7 +761,7 @@ export namespace MessageV2 { // media (images, PDFs) in tool results if (media.length > 0) { result.push({ - id: Identifier.ascending("message"), + id: MessageID.ascending(), role: "user", parts: [ { @@ -728,60 +791,65 @@ export namespace MessageV2 { ) } - export const stream = fn(Identifier.schema("session"), async function* (sessionID) { - const size = 50 - let offset = 0 - while (true) { + export const page = fn( + z.object({ + sessionID: SessionID.zod, + limit: z.number().int().positive(), + before: z.string().optional(), + }), + async (input) => { + const before = input.before ? cursor.decode(input.before) : undefined + const where = before + ? and(eq(MessageTable.session_id, input.sessionID), older(before)) + : eq(MessageTable.session_id, input.sessionID) const rows = Database.use((db) => db .select() .from(MessageTable) - .where(eq(MessageTable.session_id, sessionID)) - .orderBy(desc(MessageTable.time_created)) - .limit(size) - .offset(offset) + .where(where) + .orderBy(desc(MessageTable.time_created), desc(MessageTable.id)) + .limit(input.limit + 1) .all(), ) - if (rows.length === 0) break - - const ids = rows.map((row) => row.id) - const partsByMessage = new Map<string, MessageV2.Part[]>() - if (ids.length > 0) { - const partRows = Database.use((db) => - db - .select() - .from(PartTable) - .where(inArray(PartTable.message_id, ids)) - .orderBy(PartTable.message_id, PartTable.id) - .all(), + if (rows.length === 0) { + const row = Database.use((db) => + db.select({ id: SessionTable.id }).from(SessionTable).where(eq(SessionTable.id, input.sessionID)).get(), ) - for (const row of partRows) { - const part = { - ...row.data, - id: row.id, - sessionID: row.session_id, - messageID: row.message_id, - } as MessageV2.Part - const list = partsByMessage.get(row.message_id) - if (list) list.push(part) - else partsByMessage.set(row.message_id, [part]) + if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` }) + return { + items: [] as MessageV2.WithParts[], + more: false, } } - for (const row of rows) { - const info = { ...row.data, id: row.id, sessionID: row.session_id } as MessageV2.Info - yield { - info, - parts: partsByMessage.get(row.id) ?? [], - } + const more = rows.length > input.limit + const page = more ? rows.slice(0, input.limit) : rows + const items = await hydrate(page) + items.reverse() + const tail = page.at(-1) + return { + items, + more, + cursor: more && tail ? cursor.encode({ id: tail.id, time: tail.time_created }) : undefined, } + }, + ) - offset += rows.length - if (rows.length < size) break + export const stream = fn(SessionID.zod, async function* (sessionID) { + const size = 50 + let before: string | undefined + while (true) { + const next = await page({ sessionID, limit: size, before }) + if (next.items.length === 0) break + for (let i = next.items.length - 1; i >= 0; i--) { + yield next.items[i] + } + if (!next.more || !next.cursor) break + before = next.cursor } }) - export const parts = fn(Identifier.schema("message"), async (message_id) => { + export const parts = fn(MessageID.zod, async (message_id) => { const rows = Database.use((db) => db.select().from(PartTable).where(eq(PartTable.message_id, message_id)).orderBy(PartTable.id).all(), ) @@ -792,15 +860,20 @@ export namespace MessageV2 { export const get = fn( z.object({ - sessionID: Identifier.schema("session"), - messageID: Identifier.schema("message"), + sessionID: SessionID.zod, + messageID: MessageID.zod, }), async (input): Promise<WithParts> => { - const row = Database.use((db) => db.select().from(MessageTable).where(eq(MessageTable.id, input.messageID)).get()) - if (!row) throw new Error(`Message not found: ${input.messageID}`) - const info = { ...row.data, id: row.id, sessionID: row.session_id } as MessageV2.Info + const row = Database.use((db) => + db + .select() + .from(MessageTable) + .where(and(eq(MessageTable.id, input.messageID), eq(MessageTable.session_id, input.sessionID))) + .get(), + ) + if (!row) throw new NotFoundError({ message: `Message not found: ${input.messageID}` }) return { - info, + info: info(row), parts: await parts(input.messageID), } }, @@ -824,7 +897,7 @@ export namespace MessageV2 { return result } - export function fromError(e: unknown, ctx: { providerID: string }) { + export function fromError(e: unknown, ctx: { providerID: ProviderID }): NonNullable<Assistant["error"]> { switch (true) { case e instanceof DOMException && e.name === "AbortError": return new MessageV2.AbortedError( diff --git a/packages/opencode/src/session/message.ts b/packages/opencode/src/session/message.ts index 5c950d0e40..ee5eac08b6 100644 --- a/packages/opencode/src/session/message.ts +++ b/packages/opencode/src/session/message.ts @@ -1,4 +1,6 @@ import z from "zod" +import { SessionID } from "./schema" +import { ModelID, ProviderID } from "../provider/schema" import { NamedError } from "@opencode-ai/util/error" export namespace Message { @@ -142,7 +144,7 @@ export namespace Message { error: z .discriminatedUnion("name", [AuthError.Schema, NamedError.Unknown.Schema, OutputLengthError.Schema]) .optional(), - sessionID: z.string(), + sessionID: SessionID.zod, tool: z.record( z.string(), z @@ -159,8 +161,8 @@ export namespace Message { assistant: z .object({ system: z.string().array(), - modelID: z.string(), - providerID: z.string(), + modelID: ModelID.zod, + providerID: ProviderID.zod, path: z.object({ cwd: z.string(), root: z.string(), diff --git a/packages/opencode/src/session/processor.ts b/packages/opencode/src/session/processor.ts index 06e9e1931c..70ea586b84 100644 --- a/packages/opencode/src/session/processor.ts +++ b/packages/opencode/src/session/processor.ts @@ -1,6 +1,5 @@ import { MessageV2 } from "./message-v2" import { Log } from "@/util/log" -import { Identifier } from "@/id/id" import { Session } from "." import { Agent } from "@/agent/agent" import { Snapshot } from "@/snapshot" @@ -15,8 +14,8 @@ import { Config } from "@/config/config" import { SessionCompaction } from "./compaction" import { PermissionNext } from "@/permission/next" import { Question } from "@/question" -import { Telemetry } from "@/telemetry" -import { MCP } from "@/mcp" +import { PartID } from "./schema" +import type { SessionID, MessageID } from "./schema" export namespace SessionProcessor { const DOOM_LOOP_THRESHOLD = 3 @@ -27,7 +26,7 @@ export namespace SessionProcessor { export function create(input: { assistantMessage: MessageV2.Assistant - sessionID: string + sessionID: SessionID model: Provider.Model abort: AbortSignal }) { @@ -36,20 +35,11 @@ export namespace SessionProcessor { let blocked = false let attempt = 0 let needsCompaction = false - let stepStartTime = Date.now() - let toolCallCounter = 0 - let previousTool: string | null = null - let generationCounter = 0 - let retryErrorType: string | null = null - let retryStartTime: number | null = null const result = { get message() { return input.assistantMessage }, - get toolCallCount() { - return toolCallCounter - }, partFromToolCall(toolCallID: string) { return toolcalls[toolCallID] }, @@ -75,7 +65,7 @@ export namespace SessionProcessor { continue } const reasoningPart = { - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: input.assistantMessage.id, sessionID: input.assistantMessage.sessionID, type: "reasoning" as const, @@ -121,7 +111,7 @@ export namespace SessionProcessor { case "tool-input-start": const part = await Session.updatePart({ - id: toolcalls[value.id]?.id ?? Identifier.ascending("part"), + id: toolcalls[value.id]?.id ?? PartID.ascending(), messageID: input.assistantMessage.id, sessionID: input.assistantMessage.sessionID, type: "tool", @@ -184,13 +174,6 @@ export namespace SessionProcessor { always: [value.toolName], ruleset: agent.permission, }) - Telemetry.track({ - type: "doom_loop_detected", - timestamp: Date.now(), - session_id: input.sessionID, - tool_name: value.toolName, - repeat_count: DOOM_LOOP_THRESHOLD, - }) } } break @@ -213,22 +196,7 @@ export namespace SessionProcessor { attachments: value.output.attachments, }, }) - const toolType = MCP.isMcpTool(match.tool) ? "mcp" as const : "standard" as const - Telemetry.track({ - type: "tool_call", - timestamp: Date.now(), - session_id: input.sessionID, - message_id: input.assistantMessage.id, - tool_name: match.tool, - tool_type: toolType, - tool_category: Telemetry.categorizeToolName(match.tool, toolType), - status: "success", - duration_ms: Date.now() - match.state.time.start, - sequence_index: toolCallCounter, - previous_tool: previousTool, - }) - toolCallCounter++ - previousTool = match.tool + delete toolcalls[value.toolCallId] } break @@ -242,30 +210,14 @@ export namespace SessionProcessor { state: { status: "error", input: value.input ?? match.state.input, - error: (value.error instanceof Error ? value.error.message : String(value.error)).slice(0, 1000), + error: (value.error as any).toString(), time: { start: match.state.time.start, end: Date.now(), }, }, }) - const errToolType = MCP.isMcpTool(match.tool) ? "mcp" as const : "standard" as const - Telemetry.track({ - type: "tool_call", - timestamp: Date.now(), - session_id: input.sessionID, - message_id: input.assistantMessage.id, - tool_name: match.tool, - tool_type: errToolType, - tool_category: Telemetry.categorizeToolName(match.tool, errToolType), - status: "error", - duration_ms: Date.now() - match.state.time.start, - sequence_index: toolCallCounter, - previous_tool: previousTool, - error: (value.error instanceof Error ? value.error.message : String(value.error)).slice(0, 500), - }) - toolCallCounter++ - previousTool = match.tool + if ( value.error instanceof PermissionNext.RejectedError || value.error instanceof Question.RejectedError @@ -280,10 +232,9 @@ export namespace SessionProcessor { throw value.error case "start-step": - stepStartTime = Date.now() snapshot = await Snapshot.track() await Session.updatePart({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: input.assistantMessage.id, sessionID: input.sessionID, snapshot, @@ -292,21 +243,6 @@ export namespace SessionProcessor { break case "finish-step": - generationCounter++ - if (attempt > 0 && retryErrorType) { - Telemetry.track({ - type: "error_recovered", - timestamp: Date.now(), - session_id: input.sessionID, - error_type: retryErrorType, - recovery_strategy: "retry", - attempts: attempt, - recovered: true, - duration_ms: Date.now() - (retryStartTime ?? Date.now()), - }) - retryErrorType = null - retryStartTime = null - } const usage = Session.getUsage({ model: input.model, usage: value.usage, @@ -316,7 +252,7 @@ export namespace SessionProcessor { input.assistantMessage.cost += usage.cost input.assistantMessage.tokens = usage.tokens await Session.updatePart({ - id: Identifier.ascending("part"), + id: PartID.ascending(), reason: value.finishReason, snapshot: await Snapshot.track(), messageID: input.assistantMessage.id, @@ -326,48 +262,11 @@ export namespace SessionProcessor { cost: usage.cost, }) await Session.updateMessage(input.assistantMessage) - Telemetry.track({ - type: "generation", - timestamp: Date.now(), - session_id: input.sessionID, - message_id: input.assistantMessage.id, - model_id: input.model.id, - provider_id: input.model.providerID, - agent: input.assistantMessage.agent ?? "", - finish_reason: value.finishReason, - tokens: { - input: usage.tokens.input, - output: usage.tokens.output, - reasoning: usage.tokens.reasoning, - cache_read: usage.tokens.cache.read, - cache_write: usage.tokens.cache.write, - }, - cost: usage.cost, - duration_ms: Date.now() - stepStartTime, - }) - // Context utilization tracking - const totalTokens = usage.tokens.input + usage.tokens.output + usage.tokens.cache.read - const contextLimit = input.model.limit?.context ?? 0 - if (contextLimit > 0) { - const cacheRead = usage.tokens.cache.read - const totalInput = cacheRead + usage.tokens.input - Telemetry.track({ - type: "context_utilization", - timestamp: Date.now(), - session_id: input.sessionID, - model_id: input.model.id, - tokens_used: totalTokens, - context_limit: contextLimit, - utilization_pct: Math.round((totalTokens / contextLimit) * 1000) / 1000, - generation_number: generationCounter, - cache_hit_ratio: totalInput > 0 ? Math.round((cacheRead / totalInput) * 1000) / 1000 : 0, - }) - } if (snapshot) { const patch = await Snapshot.patch(snapshot) if (patch.files.length) { await Session.updatePart({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: input.assistantMessage.id, sessionID: input.sessionID, type: "patch", @@ -381,14 +280,17 @@ export namespace SessionProcessor { sessionID: input.sessionID, messageID: input.assistantMessage.parentID, }) - if (await SessionCompaction.isOverflow({ tokens: usage.tokens, model: input.model })) { + if ( + !input.assistantMessage.summary && + (await SessionCompaction.isOverflow({ tokens: usage.tokens, model: input.model })) + ) { needsCompaction = true } break case "text-start": currentText = { - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: input.assistantMessage.id, sessionID: input.assistantMessage.sessionID, type: "text", @@ -429,9 +331,9 @@ export namespace SessionProcessor { ) currentText.text = textOutput.text currentText.time = { - ...currentText.time, + start: currentText.time?.start ?? Date.now(), end: Date.now(), - } as typeof currentText.time + } if (value.providerMetadata) currentText.metadata = value.providerMetadata await Session.updatePart(currentText) } @@ -454,70 +356,40 @@ export namespace SessionProcessor { error: e, stack: JSON.stringify(e.stack), }) - Telemetry.track({ - type: "error", - timestamp: Date.now(), - session_id: input.sessionID, - error_name: e?.name ?? "UnknownError", - error_message: (e?.message ?? String(e)).slice(0, 500), - context: "processor", - }) const error = MessageV2.fromError(e, { providerID: input.model.providerID }) if (MessageV2.ContextOverflowError.isInstance(error)) { - log.info("context overflow detected, triggering compaction") needsCompaction = true - const tokens = input.assistantMessage.tokens - Telemetry.track({ - type: "context_overflow_recovered", - timestamp: Date.now(), - session_id: input.sessionID, - model_id: input.model.id, - provider_id: input.model.providerID, - tokens_used: - tokens.total || - tokens.input + tokens.output + tokens.cache.read + tokens.cache.write, - }) - break - } - const retry = SessionRetry.retryable(error) - if (retry !== undefined) { - Telemetry.track({ - type: "provider_error", - timestamp: Date.now(), - session_id: input.sessionID, - provider_id: input.model.providerID, - model_id: input.model.id, - error_type: e?.name ?? "UnknownError", - error_message: (e?.message ?? String(e)).slice(0, 500), - http_status: (e as any)?.status, + Bus.publish(Session.Event.Error, { + sessionID: input.sessionID, + error, }) - if (attempt === 0) { - retryStartTime = Date.now() + } else { + const retry = SessionRetry.retryable(error) + if (retry !== undefined) { + attempt++ + const delay = SessionRetry.delay(attempt, error.name === "APIError" ? error : undefined) + SessionStatus.set(input.sessionID, { + type: "retry", + attempt, + message: retry, + next: Date.now() + delay, + }) + await SessionRetry.sleep(delay, input.abort).catch(() => {}) + continue } - retryErrorType = e?.name ?? "UnknownError" - attempt++ - const delay = SessionRetry.delay(attempt, error.name === "APIError" ? error : undefined) - SessionStatus.set(input.sessionID, { - type: "retry", - attempt, - message: retry, - next: Date.now() + delay, + input.assistantMessage.error = error + Bus.publish(Session.Event.Error, { + sessionID: input.assistantMessage.sessionID, + error: input.assistantMessage.error, }) - await SessionRetry.sleep(delay, input.abort).catch(() => {}) - continue + SessionStatus.set(input.sessionID, { type: "idle" }) } - input.assistantMessage.error = error - Bus.publish(Session.Event.Error, { - sessionID: input.assistantMessage.sessionID, - error: input.assistantMessage.error, - }) - SessionStatus.set(input.sessionID, { type: "idle" }) } if (snapshot) { const patch = await Snapshot.patch(snapshot) if (patch.files.length) { await Session.updatePart({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: input.assistantMessage.id, sessionID: input.sessionID, type: "patch", diff --git a/packages/opencode/src/session/prompt.ts b/packages/opencode/src/session/prompt.ts index 9732fe2f9e..a6658577d6 100644 --- a/packages/opencode/src/session/prompt.ts +++ b/packages/opencode/src/session/prompt.ts @@ -3,13 +3,14 @@ import os from "os" import fs from "fs/promises" import z from "zod" import { Filesystem } from "../util/filesystem" -import { Identifier } from "../id/id" +import { SessionID, MessageID, PartID } from "./schema" import { MessageV2 } from "./message-v2" import { Log } from "../util/log" import { SessionRevert } from "./revert" import { Session } from "." import { Agent } from "../agent/agent" import { Provider } from "../provider/provider" +import { ModelID, ProviderID } from "../provider/schema" import { type Tool as AITool, tool, jsonSchema, type ToolCallOptions, asSchema } from "ai" import { SessionCompaction } from "./compaction" import { Instance } from "../project/instance" @@ -32,7 +33,8 @@ import { Flag } from "../flag/flag" import { ulid } from "ulid" import { spawn } from "child_process" import { Command } from "../command" -import { $, fileURLToPath, pathToFileURL } from "bun" +import { $ } from "bun" +import { pathToFileURL, fileURLToPath } from "url" import { ConfigMarkdown } from "../config/markdown" import { SessionSummary } from "./summary" import { NamedError } from "@opencode-ai/util/error" @@ -46,7 +48,8 @@ import { LLM } from "./llm" import { iife } from "@/util/iife" import { Shell } from "@/shell/shell" import { Truncate } from "@/tool/truncation" -import { Telemetry } from "@/telemetry" +import { decodeDataUrl } from "@/util/data-url" +import { Telemetry } from "@/telemetry" // altimate_change — session telemetry // @ts-ignore globalThis.AI_SDK_LOG_WARNINGS = false @@ -85,18 +88,18 @@ export namespace SessionPrompt { }, ) - export function assertNotBusy(sessionID: string) { + export function assertNotBusy(sessionID: SessionID) { const match = state()[sessionID] if (match) throw new Session.BusyError(sessionID) } export const PromptInput = z.object({ - sessionID: Identifier.schema("session"), - messageID: Identifier.schema("message").optional(), + sessionID: SessionID.zod, + messageID: MessageID.zod.optional(), model: z .object({ - providerID: z.string(), - modelID: z.string(), + providerID: ProviderID.zod, + modelID: ModelID.zod, }) .optional(), agent: z.string().optional(), @@ -237,7 +240,7 @@ export namespace SessionPrompt { return parts } - function start(sessionID: string) { + function start(sessionID: SessionID) { const s = state() if (s[sessionID]) return const controller = new AbortController() @@ -248,14 +251,14 @@ export namespace SessionPrompt { return controller.signal } - function resume(sessionID: string) { + function resume(sessionID: SessionID) { const s = state() if (!s[sessionID]) return return s[sessionID].abort.signal } - export function cancel(sessionID: string) { + export function cancel(sessionID: SessionID) { log.info("cancel", { sessionID }) const s = state() const match = s[sessionID] @@ -270,7 +273,7 @@ export namespace SessionPrompt { } export const LoopInput = z.object({ - sessionID: Identifier.schema("session"), + sessionID: SessionID.zod, resume_existing: z.boolean().optional(), }) export const loop = fn(LoopInput, async (input) => { @@ -292,18 +295,17 @@ export namespace SessionPrompt { let structuredOutput: unknown | undefined let step = 0 + const session = await Session.get(sessionID) + // altimate_change start — session telemetry tracking + await Telemetry.init() + Telemetry.setContext({ sessionId: sessionID, projectId: Instance.project?.id ?? "" }) const sessionStartTime = Date.now() let sessionTotalCost = 0 let sessionTotalTokens = 0 let toolCallCount = 0 - let compactionAttempts = 0 - let totalCompactions = 0 + let compactionCount = 0 let sessionAgentName = "" let sessionHadError = false - const MAX_COMPACTION_ATTEMPTS = 3 - const session = await Session.get(sessionID) - await Telemetry.init() - Telemetry.setContext({ sessionId: sessionID, projectId: Instance.project?.id ?? "" }) let emergencySessionEndFired = false const emergencySessionEnd = () => { if (emergencySessionEndFired) return @@ -318,14 +320,9 @@ export namespace SessionPrompt { duration_ms: Date.now() - sessionStartTime, }) } - // beforeExit covers event-loop drain without entering the session loop. - // exit covers process.exit() calls (sync only — track() buffers, flush is best-effort). - // SIGINT/SIGTERM are NOT handled here: the abort controller already triggers - // loop exit → finally block → normal session_end. Adding signal handlers - // would interfere with the default termination behavior. process.once("beforeExit", emergencySessionEnd) process.once("exit", emergencySessionEnd) - try { + // altimate_change end while (true) { SessionStatus.set(sessionID, { type: "busy" }) log.info("loop", { step, sessionID }) @@ -350,7 +347,6 @@ export namespace SessionPrompt { } if (!lastUser) throw new Error("No user message found in stream. This should never happen.") - if (!sessionAgentName) sessionAgentName = lastUser.agent if ( lastAssistant?.finish && !["tool-calls", "unknown"].includes(lastAssistant.finish) && @@ -389,7 +385,7 @@ export namespace SessionPrompt { const taskTool = await TaskTool.init() const taskModel = task.model ? await Provider.getModel(task.model.providerID, task.model.modelID) : model const assistantMessage = (await Session.updateMessage({ - id: Identifier.ascending("message"), + id: MessageID.ascending(), role: "assistant", parentID: lastUser.id, sessionID, @@ -414,7 +410,7 @@ export namespace SessionPrompt { }, })) as MessageV2.Assistant let part = (await Session.updatePart({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: assistantMessage.id, sessionID: assistantMessage.sessionID, type: "tool", @@ -459,14 +455,14 @@ export namespace SessionPrompt { extra: { bypassAgentCheck: true }, messages: msgs, async metadata(input) { - await Session.updatePart({ + part = (await Session.updatePart({ ...part, type: "tool", state: { ...part.state, ...input, }, - } satisfies MessageV2.ToolPart) + } satisfies MessageV2.ToolPart)) as MessageV2.ToolPart }, async ask(req) { await PermissionNext.ask({ @@ -483,7 +479,7 @@ export namespace SessionPrompt { }) const attachments = result?.attachments?.map((attachment) => ({ ...attachment, - id: Identifier.ascending("part"), + id: PartID.ascending(), sessionID, messageID: assistantMessage.id, })) @@ -527,7 +523,7 @@ export namespace SessionPrompt { start: part.state.status === "running" ? part.state.time.start : Date.now(), end: Date.now(), }, - metadata: part.metadata, + metadata: "metadata" in part.state ? part.state.metadata : undefined, input: part.state.input, }, } satisfies MessageV2.ToolPart) @@ -538,7 +534,7 @@ export namespace SessionPrompt { // If we create assistant messages w/ out user ones following mid loop thinking signatures // will be missing and it can cause errors for models like gemini for example const summaryUserMsg: MessageV2.User = { - id: Identifier.ascending("message"), + id: MessageID.ascending(), sessionID, role: "user", time: { @@ -549,7 +545,7 @@ export namespace SessionPrompt { } await Session.updateMessage(summaryUserMsg) await Session.updatePart({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: summaryUserMsg.id, sessionID, type: "text", @@ -569,6 +565,7 @@ export namespace SessionPrompt { abort, sessionID, auto: task.auto, + overflow: task.overflow, }) if (result === "stop") break continue @@ -580,26 +577,6 @@ export namespace SessionPrompt { lastFinished.summary !== true && (await SessionCompaction.isOverflow({ tokens: lastFinished.tokens, model })) ) { - compactionAttempts++ - totalCompactions++ - if (compactionAttempts > MAX_COMPACTION_ATTEMPTS) { - log.warn("compaction loop detected, stopping", { compactionAttempts, sessionID }) - Bus.publish(Session.Event.Error, { - sessionID, - error: new NamedError.Unknown({ - message: `Context still too large after ${MAX_COMPACTION_ATTEMPTS} compaction attempts. Try starting a new conversation.`, - }).toObject(), - }) - sessionHadError = true - break - } - Telemetry.track({ - type: "compaction_triggered", - timestamp: Date.now(), - session_id: sessionID, - trigger: "overflow_detection", - attempt: compactionAttempts, - }) await SessionCompaction.create({ sessionID, agent: lastUser.agent, @@ -621,7 +598,7 @@ export namespace SessionPrompt { const processor = SessionProcessor.create({ assistantMessage: (await Session.updateMessage({ - id: Identifier.ascending("message"), + id: MessageID.ascending(), parentID: lastUser.id, role: "assistant", mode: agent.name, @@ -680,6 +657,8 @@ export namespace SessionPrompt { sessionID: sessionID, messageID: lastUser.id, }) + // altimate_change start — session start telemetry + sessionAgentName = lastUser.agent Telemetry.track({ type: "session_start", timestamp: Date.now(), @@ -689,6 +668,7 @@ export namespace SessionPrompt { agent: lastUser.agent, project_id: Instance.project?.id ?? "", }) + // altimate_change end } // Ephemerally wrap queued user messages with a reminder to stay on track @@ -713,13 +693,14 @@ export namespace SessionPrompt { await Plugin.trigger("experimental.chat.messages.transform", {}, { messages: msgs }) // Build system prompt, adding structured output instruction if needed + const skills = await SystemPrompt.skills(agent) // Inject persistent memory blocks from previous sessions (gated by feature flag) const memoryInjection = Flag.ALTIMATE_DISABLE_MEMORY ? "" : await MemoryPrompt.inject() const system = [ ...(await SystemPrompt.environment(model)), + ...(skills ? [skills] : []), ...(memoryInjection ? [memoryInjection] : []), ...(await InstructionPrompt.system()), - ...(lastUser.system ? [lastUser.system] : []), ] const format = lastUser.format ?? { type: "text" } if (format.type === "json_schema") { @@ -748,13 +729,6 @@ export namespace SessionPrompt { toolChoice: format.type === "json_schema" ? "required" : undefined, }) - sessionTotalCost += processor.message.cost - sessionTotalTokens += - (processor.message.tokens?.input ?? 0) + - (processor.message.tokens?.output ?? 0) + - (processor.message.tokens?.reasoning ?? 0) - toolCallCount += processor.toolCallCount - // If structured output was captured, save it and exit immediately // This takes priority because the StructuredOutput tool was called successfully if (structuredOutput !== undefined) { @@ -779,82 +753,67 @@ export namespace SessionPrompt { } } - if (result === "stop") { - if (processor.message.error) sessionHadError = true - break - } - if (result === "continue") { - // Reset compaction counter after a successful non-compaction step. - // The counter protects against tight compact→overflow loops within - // a single turn, but should not accumulate across unrelated turns. - compactionAttempts = 0 - } + // altimate_change start — accumulate session metrics + sessionTotalCost += processor.message.cost ?? 0 + const t = processor.message.tokens + sessionTotalTokens += (t.input + t.output + t.reasoning + t.cache.read + t.cache.write) + const stepParts = await MessageV2.parts(processor.message.id) + toolCallCount += stepParts.filter((p) => p.type === "tool").length + if (processor.message.error) sessionHadError = true + // altimate_change end + + if (result === "stop") break if (result === "compact") { - compactionAttempts++ - totalCompactions++ - if (compactionAttempts > MAX_COMPACTION_ATTEMPTS) { - log.warn("compaction loop detected, stopping", { compactionAttempts, sessionID }) - Bus.publish(Session.Event.Error, { - sessionID, - error: new NamedError.Unknown({ - message: `Context still too large after ${MAX_COMPACTION_ATTEMPTS} compaction attempts. Try starting a new conversation.`, - }).toObject(), - }) - sessionHadError = true - break - } - Telemetry.track({ - type: "compaction_triggered", - timestamp: Date.now(), - session_id: sessionID, - trigger: "error_recovery", - attempt: compactionAttempts, - }) + // altimate_change start — track compaction count + compactionCount++ + // altimate_change end await SessionCompaction.create({ sessionID, agent: lastUser.agent, model: lastUser.model, auto: true, + overflow: !processor.message.finish, }) } continue } SessionCompaction.prune({ sessionID }) - } finally { - process.removeListener("beforeExit", emergencySessionEnd) - process.removeListener("exit", emergencySessionEnd) - const outcome: "completed" | "abandoned" | "error" = abort.aborted - ? "abandoned" - : sessionHadError - ? "error" - : sessionTotalCost === 0 && toolCallCount === 0 - ? "abandoned" - : "completed" + // altimate_change start — session end telemetry + const outcome = abort.aborted + ? "aborted" + : sessionHadError + ? "error" + : sessionTotalCost === 0 && toolCallCount === 0 + ? "abandoned" + : "completed" + Telemetry.track({ + type: "agent_outcome", + timestamp: Date.now(), + session_id: sessionID, + agent: sessionAgentName, + tool_calls: toolCallCount, + generations: step, + duration_ms: Date.now() - sessionStartTime, + cost: sessionTotalCost, + compactions: compactionCount, + outcome, + }) + if (!emergencySessionEndFired) { + emergencySessionEndFired = true + process.off("beforeExit", emergencySessionEnd) + process.off("exit", emergencySessionEnd) Telemetry.track({ - type: "agent_outcome", + type: "session_end", timestamp: Date.now(), session_id: sessionID, - agent: sessionAgentName, - tool_calls: toolCallCount, - generations: step, + total_cost: sessionTotalCost, + total_tokens: sessionTotalTokens, + tool_call_count: toolCallCount, duration_ms: Date.now() - sessionStartTime, - cost: sessionTotalCost, - compactions: totalCompactions, - outcome, }) - if (!emergencySessionEndFired) { - Telemetry.track({ - type: "session_end", - timestamp: Date.now(), - session_id: sessionID, - total_cost: sessionTotalCost, - total_tokens: sessionTotalTokens, - tool_call_count: toolCallCount, - duration_ms: Date.now() - sessionStartTime, - }) - } - await Telemetry.shutdown() } + await Telemetry.shutdown() + // altimate_change end for await (const item of MessageV2.stream(sessionID)) { if (item.info.role === "user") continue const queued = state()[sessionID]?.callbacks ?? [] @@ -866,7 +825,7 @@ export namespace SessionPrompt { throw new Error("Impossible") }) - async function lastModel(sessionID: string) { + async function lastModel(sessionID: SessionID) { for await (const item of MessageV2.stream(sessionID)) { if (item.info.role === "user" && item.info.model) return item.info.model } @@ -922,7 +881,7 @@ export namespace SessionPrompt { }) for (const item of await ToolRegistry.tools( - { modelID: input.model.api.id, providerID: input.model.providerID }, + { modelID: ModelID.make(input.model.api.id), providerID: input.model.providerID }, input.agent, )) { const schema = ProviderTransform.schema(input.model, z.toJSONSchema(item.parameters)) @@ -948,7 +907,7 @@ export namespace SessionPrompt { ...result, attachments: result.attachments?.map((attachment) => ({ ...attachment, - id: Identifier.ascending("part"), + id: PartID.ascending(), sessionID: ctx.sessionID, messageID: input.processor.message.id, })), @@ -1051,7 +1010,7 @@ export namespace SessionPrompt { output: truncated.content, attachments: attachments.map((attachment) => ({ ...attachment, - id: Identifier.ascending("part"), + id: PartID.ascending(), sessionID: ctx.sessionID, messageID: input.processor.message.id, })), @@ -1105,7 +1064,7 @@ export namespace SessionPrompt { const variant = input.variant ?? (agent.variant && full?.variants?.[agent.variant] ? agent.variant : undefined) const info: MessageV2.Info = { - id: input.messageID ?? Identifier.ascending("message"), + id: input.messageID ?? MessageID.ascending(), role: "user", sessionID: input.sessionID, time: { @@ -1123,7 +1082,7 @@ export namespace SessionPrompt { type Draft<T> = T extends MessageV2.Part ? Omit<T, "id"> & { id?: string } : never const assign = (part: Draft<MessageV2.Part>): MessageV2.Part => ({ ...part, - id: part.id ?? Identifier.ascending("part"), + id: part.id ? PartID.make(part.id) : PartID.ascending(), }) const parts = await Promise.all( @@ -1213,7 +1172,7 @@ export namespace SessionPrompt { sessionID: input.sessionID, type: "text", synthetic: true, - text: Buffer.from(part.url, "base64url").toString(), + text: decodeDataUrl(part.url), }, { ...part, @@ -1469,7 +1428,7 @@ export namespace SessionPrompt { if (!Flag.OPENCODE_EXPERIMENTAL_PLAN_MODE) { if (input.agent.name === "plan") { userMessage.parts.push({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: userMessage.info.id, sessionID: userMessage.info.sessionID, type: "text", @@ -1480,7 +1439,7 @@ export namespace SessionPrompt { const wasPlan = input.messages.some((msg) => msg.info.role === "assistant" && msg.info.agent === "plan") if (wasPlan && input.agent.name === "builder") { userMessage.parts.push({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: userMessage.info.id, sessionID: userMessage.info.sessionID, type: "text", @@ -1500,7 +1459,7 @@ export namespace SessionPrompt { const exists = await Filesystem.exists(plan) if (exists) { const part = await Session.updatePart({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: userMessage.info.id, sessionID: userMessage.info.sessionID, type: "text", @@ -1519,7 +1478,7 @@ export namespace SessionPrompt { const exists = await Filesystem.exists(plan) if (!exists) await fs.mkdir(path.dirname(plan), { recursive: true }) const part = await Session.updatePart({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: userMessage.info.id, sessionID: userMessage.info.sessionID, type: "text", @@ -1602,12 +1561,12 @@ NOTE: At any point in time through this workflow you should feel free to ask the } export const ShellInput = z.object({ - sessionID: Identifier.schema("session"), + sessionID: SessionID.zod, agent: z.string(), model: z .object({ - providerID: z.string(), - modelID: z.string(), + providerID: ProviderID.zod, + modelID: ModelID.zod, }) .optional(), command: z.string(), @@ -1639,7 +1598,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the const agent = await Agent.get(input.agent) const model = input.model ?? agent.model ?? (await lastModel(input.sessionID)) const userMsg: MessageV2.User = { - id: Identifier.ascending("message"), + id: MessageID.ascending(), sessionID: input.sessionID, time: { created: Date.now(), @@ -1654,7 +1613,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the await Session.updateMessage(userMsg) const userPart: MessageV2.Part = { type: "text", - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: userMsg.id, sessionID: input.sessionID, text: "The following tool was executed by the user", @@ -1663,7 +1622,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the await Session.updatePart(userPart) const msg: MessageV2.Assistant = { - id: Identifier.ascending("message"), + id: MessageID.ascending(), sessionID: input.sessionID, parentID: userMsg.id, mode: input.agent, @@ -1689,7 +1648,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the await Session.updateMessage(msg) const part: MessageV2.Part = { type: "tool", - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: msg.id, sessionID: input.sessionID, tool: "bash", @@ -1769,6 +1728,7 @@ NOTE: At any point in time through this workflow you should feel free to ask the const proc = spawn(shell, args, { cwd, detached: process.platform !== "win32", + windowsHide: process.platform === "win32", stdio: ["ignore", "pipe", "pipe"], env: { ...process.env, @@ -1852,8 +1812,8 @@ NOTE: At any point in time through this workflow you should feel free to ask the } export const CommandInput = z.object({ - messageID: Identifier.schema("message").optional(), - sessionID: Identifier.schema("session"), + messageID: MessageID.zod.optional(), + sessionID: SessionID.zod, agent: z.string().optional(), model: z.string().optional(), arguments: z.string(), @@ -2025,23 +1985,14 @@ NOTE: At any point in time through this workflow you should feel free to ask the messageID: result.info.id, }) - Telemetry.track({ - type: "command", - timestamp: Date.now(), - session_id: input.sessionID, - command_name: input.command, - command_source: command.source ?? "unknown", - message_id: result.info.id, - }) - return result } async function ensureTitle(input: { session: Session.Info history: MessageV2.WithParts[] - providerID: string - modelID: string + providerID: ProviderID + modelID: ModelID }) { if (input.session.parentID) return if (!Session.isDefaultTitle(input.session.title)) return diff --git a/packages/opencode/src/session/prompt/anthropic.txt b/packages/opencode/src/session/prompt/anthropic.txt index 21d9c0e9f2..fff5bef987 100644 --- a/packages/opencode/src/session/prompt/anthropic.txt +++ b/packages/opencode/src/session/prompt/anthropic.txt @@ -1,4 +1,4 @@ -You are OpenCode, the best coding agent on the planet. +You are Altimate Code, the best coding agent on the planet. You are an interactive CLI tool that helps users with software engineering tasks. Use the instructions below and the tools available to you to assist the user. @@ -7,9 +7,9 @@ IMPORTANT: You must NEVER generate or guess URLs for the user unless you are con If the user asks for help or wants to give feedback inform them of the following: - ctrl+p to list available actions - To give feedback, users should report the issue at - https://github.com/anomalyco/opencode + https://github.com/AltimateAI/altimate-code -When the user directly asks about OpenCode (eg. "can OpenCode do...", "does OpenCode have..."), or asks in second person (eg. "are you able...", "can you do..."), or asks how to use a specific OpenCode feature (eg. implement a hook, write a slash command, or install an MCP server), use the WebFetch tool to gather information to answer the question from OpenCode docs. The list of available docs is available at https://opencode.ai/docs +When the user directly asks about Altimate Code (eg. "can Altimate Code do...", "does Altimate Code have..."), or asks in second person (eg. "are you able...", "can you do..."), or asks how to use a specific Altimate Code feature (eg. implement a hook, write a slash command, or install an MCP server), use the WebFetch tool to gather information to answer the question from Altimate Code docs. The list of available docs is available at https://altimate.ai/docs # Tone and style - Only use emojis if the user explicitly requests it. Avoid using emojis in all communication unless asked. @@ -18,7 +18,7 @@ When the user directly asks about OpenCode (eg. "can OpenCode do...", "does Open - NEVER create files unless they're absolutely necessary for achieving your goal. ALWAYS prefer editing an existing file to creating a new one. This includes markdown files. # Professional objectivity -Prioritize technical accuracy and truthfulness over validating the user's beliefs. Focus on facts and problem-solving, providing direct, objective technical info without any unnecessary superlatives, praise, or emotional validation. It is best for the user if OpenCode honestly applies the same rigorous standards to all ideas and disagrees when necessary, even if it may not be what the user wants to hear. Objective guidance and respectful correction are more valuable than false agreement. Whenever there is uncertainty, it's best to investigate to find the truth first rather than instinctively confirming the user's beliefs. +Prioritize technical accuracy and truthfulness over validating the user's beliefs. Focus on facts and problem-solving, providing direct, objective technical info without any unnecessary superlatives, praise, or emotional validation. It is best for the user if Altimate Code honestly applies the same rigorous standards to all ideas and disagrees when necessary, even if it may not be what the user wants to hear. Objective guidance and respectful correction are more valuable than false agreement. Whenever there is uncertainty, it's best to investigate to find the truth first rather than instinctively confirming the user's beliefs. # Task Management You have access to the TodoWrite tools to help you manage and plan tasks. Use these tools VERY frequently to ensure that you are tracking your tasks and giving the user visibility into your progress. diff --git a/packages/opencode/src/session/prompt/codex_header.txt b/packages/opencode/src/session/prompt/codex_header.txt index d595cadb0e..4f01a4a65f 100644 --- a/packages/opencode/src/session/prompt/codex_header.txt +++ b/packages/opencode/src/session/prompt/codex_header.txt @@ -1,4 +1,4 @@ -You are OpenCode, the best coding agent on the planet. +You are Altimate Code, the best coding agent on the planet. You are an interactive CLI tool that helps users with software engineering tasks. Use the instructions below and the tools available to you to assist the user. diff --git a/packages/opencode/src/session/prompt/qwen.txt b/packages/opencode/src/session/prompt/qwen.txt index d87fc37957..c24c9ed75e 100644 --- a/packages/opencode/src/session/prompt/qwen.txt +++ b/packages/opencode/src/session/prompt/qwen.txt @@ -6,9 +6,9 @@ IMPORTANT: You must NEVER generate or guess URLs for the user unless you are con If the user asks for help or wants to give feedback inform them of the following: - /help: Get help with using opencode -- To give feedback, users should report the issue at https://github.com/anomalyco/opencode/issues +- To give feedback, users should report the issue at https://github.com/AltimateAI/altimate-code/issues -When the user directly asks about opencode (eg 'can opencode do...', 'does opencode have...') or asks in second person (eg 'are you able...', 'can you do...'), first use the WebFetch tool to gather information to answer the question from opencode docs at https://opencode.ai +When the user directly asks about opencode (eg 'can opencode do...', 'does opencode have...') or asks in second person (eg 'are you able...', 'can you do...'), first use the WebFetch tool to gather information to answer the question from opencode docs at https://altimate.ai # Tone and style You should be concise, direct, and to the point. When you run a non-trivial bash command, you should explain what the command does and why you are running it, to make sure the user understands what you are doing (this is especially important when you are running a command that will make changes to the user's system). diff --git a/packages/opencode/src/session/revert.ts b/packages/opencode/src/session/revert.ts index ef9c7e2aac..c5c9edbbdf 100644 --- a/packages/opencode/src/session/revert.ts +++ b/packages/opencode/src/session/revert.ts @@ -1,5 +1,5 @@ import z from "zod" -import { Identifier } from "../id/id" +import { SessionID, MessageID, PartID } from "./schema" import { Snapshot } from "../snapshot" import { MessageV2 } from "./message-v2" import { Session } from "." @@ -15,9 +15,9 @@ export namespace SessionRevert { const log = Log.create({ service: "session.revert" }) export const RevertInput = z.object({ - sessionID: Identifier.schema("session"), - messageID: Identifier.schema("message"), - partID: Identifier.schema("part").optional(), + sessionID: SessionID.zod, + messageID: MessageID.zod, + partID: PartID.zod.optional(), }) export type RevertInput = z.infer<typeof RevertInput> @@ -79,7 +79,7 @@ export namespace SessionRevert { return session } - export async function unrevert(input: { sessionID: string }) { + export async function unrevert(input: { sessionID: SessionID }) { log.info("unreverting", input) SessionPrompt.assertNotBusy(input.sessionID) const session = await Session.get(input.sessionID) diff --git a/packages/opencode/src/session/schema.ts b/packages/opencode/src/session/schema.ts new file mode 100644 index 0000000000..b37fefae69 --- /dev/null +++ b/packages/opencode/src/session/schema.ts @@ -0,0 +1,41 @@ +import { Schema } from "effect" +import z from "zod" + +import { withStatics } from "@/util/schema" +import { Identifier } from "@/id/id" + +const sessionIdSchema = Schema.String.pipe(Schema.brand("SessionID")) + +export type SessionID = typeof sessionIdSchema.Type + +export const SessionID = sessionIdSchema.pipe( + withStatics((schema: typeof sessionIdSchema) => ({ + make: (id: string) => schema.makeUnsafe(id), + descending: (id?: string) => schema.makeUnsafe(Identifier.descending("session", id)), + zod: Identifier.schema("session").pipe(z.custom<SessionID>()), + })), +) + +const messageIdSchema = Schema.String.pipe(Schema.brand("MessageID")) + +export type MessageID = typeof messageIdSchema.Type + +export const MessageID = messageIdSchema.pipe( + withStatics((schema: typeof messageIdSchema) => ({ + make: (id: string) => schema.makeUnsafe(id), + ascending: (id?: string) => schema.makeUnsafe(Identifier.ascending("message", id)), + zod: Identifier.schema("message").pipe(z.custom<MessageID>()), + })), +) + +const partIdSchema = Schema.String.pipe(Schema.brand("PartID")) + +export type PartID = typeof partIdSchema.Type + +export const PartID = partIdSchema.pipe( + withStatics((schema: typeof partIdSchema) => ({ + make: (id: string) => schema.makeUnsafe(id), + ascending: (id?: string) => schema.makeUnsafe(Identifier.ascending("part", id)), + zod: Identifier.schema("part").pipe(z.custom<PartID>()), + })), +) diff --git a/packages/opencode/src/session/session.sql.ts b/packages/opencode/src/session/session.sql.ts index 0630760f3b..b3229edd13 100644 --- a/packages/opencode/src/session/session.sql.ts +++ b/packages/opencode/src/session/session.sql.ts @@ -1,9 +1,12 @@ import { sqliteTable, text, integer, index, primaryKey } from "drizzle-orm/sqlite-core" import { ProjectTable } from "../project/project.sql" import type { MessageV2 } from "./message-v2" -import type { Snapshot } from "@/snapshot" -import type { PermissionNext } from "@/permission/next" -import { Timestamps } from "@/storage/schema.sql" +import type { Snapshot } from "../snapshot" +import type { PermissionNext } from "../permission/next" +import type { ProjectID } from "../project/schema" +import type { SessionID, MessageID, PartID } from "./schema" +import type { WorkspaceID } from "../control-plane/schema" +import { Timestamps } from "../storage/schema.sql" type PartData = Omit<MessageV2.Part, "id" | "sessionID" | "messageID"> type InfoData = Omit<MessageV2.Info, "id" | "sessionID"> @@ -11,12 +14,13 @@ type InfoData = Omit<MessageV2.Info, "id" | "sessionID"> export const SessionTable = sqliteTable( "session", { - id: text().primaryKey(), + id: text().$type<SessionID>().primaryKey(), project_id: text() + .$type<ProjectID>() .notNull() .references(() => ProjectTable.id, { onDelete: "cascade" }), - workspace_id: text(), - parent_id: text(), + workspace_id: text().$type<WorkspaceID>(), + parent_id: text().$type<SessionID>(), slug: text().notNull(), directory: text().notNull(), title: text().notNull(), @@ -26,7 +30,7 @@ export const SessionTable = sqliteTable( summary_deletions: integer(), summary_files: integer(), summary_diffs: text({ mode: "json" }).$type<Snapshot.FileDiff[]>(), - revert: text({ mode: "json" }).$type<{ messageID: string; partID?: string; snapshot?: string; diff?: string }>(), + revert: text({ mode: "json" }).$type<{ messageID: MessageID; partID?: PartID; snapshot?: string; diff?: string }>(), permission: text({ mode: "json" }).$type<PermissionNext.Ruleset>(), ...Timestamps, time_compacting: integer(), @@ -42,34 +46,40 @@ export const SessionTable = sqliteTable( export const MessageTable = sqliteTable( "message", { - id: text().primaryKey(), + id: text().$type<MessageID>().primaryKey(), session_id: text() + .$type<SessionID>() .notNull() .references(() => SessionTable.id, { onDelete: "cascade" }), ...Timestamps, data: text({ mode: "json" }).notNull().$type<InfoData>(), }, - (table) => [index("message_session_idx").on(table.session_id)], + (table) => [index("message_session_time_created_id_idx").on(table.session_id, table.time_created, table.id)], ) export const PartTable = sqliteTable( "part", { - id: text().primaryKey(), + id: text().$type<PartID>().primaryKey(), message_id: text() + .$type<MessageID>() .notNull() .references(() => MessageTable.id, { onDelete: "cascade" }), - session_id: text().notNull(), + session_id: text().$type<SessionID>().notNull(), ...Timestamps, data: text({ mode: "json" }).notNull().$type<PartData>(), }, - (table) => [index("part_message_idx").on(table.message_id), index("part_session_idx").on(table.session_id)], + (table) => [ + index("part_message_id_id_idx").on(table.message_id, table.id), + index("part_session_idx").on(table.session_id), + ], ) export const TodoTable = sqliteTable( "todo", { session_id: text() + .$type<SessionID>() .notNull() .references(() => SessionTable.id, { onDelete: "cascade" }), content: text().notNull(), diff --git a/packages/opencode/src/session/status.ts b/packages/opencode/src/session/status.ts index 1db03b5db0..57e7939853 100644 --- a/packages/opencode/src/session/status.ts +++ b/packages/opencode/src/session/status.ts @@ -1,6 +1,7 @@ import { BusEvent } from "@/bus/bus-event" import { Bus } from "@/bus" import { Instance } from "@/project/instance" +import { SessionID } from "./schema" import z from "zod" export namespace SessionStatus { @@ -28,7 +29,7 @@ export namespace SessionStatus { Status: BusEvent.define( "session.status", z.object({ - sessionID: z.string(), + sessionID: SessionID.zod, status: Info, }), ), @@ -36,7 +37,7 @@ export namespace SessionStatus { Idle: BusEvent.define( "session.idle", z.object({ - sessionID: z.string(), + sessionID: SessionID.zod, }), ), } @@ -46,7 +47,7 @@ export namespace SessionStatus { return data }) - export function get(sessionID: string) { + export function get(sessionID: SessionID) { return ( state()[sessionID] ?? { type: "idle", @@ -58,7 +59,7 @@ export namespace SessionStatus { return state() } - export function set(sessionID: string, status: Info) { + export function set(sessionID: SessionID, status: Info) { Bus.publish(Event.Status, { sessionID, status, diff --git a/packages/opencode/src/session/summary.ts b/packages/opencode/src/session/summary.ts index 349336ba78..678a008518 100644 --- a/packages/opencode/src/session/summary.ts +++ b/packages/opencode/src/session/summary.ts @@ -4,6 +4,7 @@ import { Session } from "." import { MessageV2 } from "./message-v2" import { Identifier } from "@/id/id" +import { SessionID, MessageID } from "./schema" import { Snapshot } from "@/snapshot" import { Storage } from "@/storage/storage" @@ -68,8 +69,8 @@ export namespace SessionSummary { export const summarize = fn( z.object({ - sessionID: z.string(), - messageID: z.string(), + sessionID: SessionID.zod, + messageID: MessageID.zod, }), async (input) => { const all = await Session.messages({ sessionID: input.sessionID }) @@ -80,7 +81,7 @@ export namespace SessionSummary { }, ) - async function summarizeSession(input: { sessionID: string; messages: MessageV2.WithParts[] }) { + async function summarizeSession(input: { sessionID: SessionID; messages: MessageV2.WithParts[] }) { const diffs = await computeDiff({ messages: input.messages }) await Session.setSummary({ sessionID: input.sessionID, @@ -113,8 +114,8 @@ export namespace SessionSummary { export const diff = fn( z.object({ - sessionID: Identifier.schema("session"), - messageID: Identifier.schema("message").optional(), + sessionID: SessionID.zod, + messageID: MessageID.zod.optional(), }), async (input) => { const diffs = await Storage.read<Snapshot.FileDiff[]>(["session_diff", input.sessionID]).catch(() => []) diff --git a/packages/opencode/src/session/system.ts b/packages/opencode/src/session/system.ts index a61dd8cba5..a4c4684ffe 100644 --- a/packages/opencode/src/session/system.ts +++ b/packages/opencode/src/session/system.ts @@ -10,6 +10,9 @@ import PROMPT_GEMINI from "./prompt/gemini.txt" import PROMPT_CODEX from "./prompt/codex_header.txt" import PROMPT_TRINITY from "./prompt/trinity.txt" import type { Provider } from "@/provider/provider" +import type { Agent } from "@/agent/agent" +import { PermissionNext } from "@/permission/next" +import { Skill } from "@/skill" export namespace SystemPrompt { export function instructions() { @@ -34,6 +37,7 @@ export namespace SystemPrompt { `Here is some useful information about the environment you are running in:`, `<env>`, ` Working directory: ${Instance.directory}`, + ` Workspace root folder: ${Instance.worktree}`, ` Is directory a git repo: ${project.vcs === "git" ? "yes" : "no"}`, ` Platform: ${process.platform}`, ` Today's date: ${new Date().toDateString()}`, @@ -51,4 +55,18 @@ export namespace SystemPrompt { ].join("\n"), ] } + + export async function skills(agent: Agent.Info) { + if (PermissionNext.disabled(["skill"], agent.permission).has("skill")) return + + const list = await Skill.available(agent) + + return [ + "Skills provide specialized instructions and workflows for specific tasks.", + "Use the skill tool to load a skill when a task matches its description.", + // the agents seem to ingest the information about skills a bit better if we present a more verbose + // version of them here and a less verbose version in tool description, rather than vice versa. + Skill.fmt(list, { verbose: true }), + ].join("\n") + } } diff --git a/packages/opencode/src/session/todo.ts b/packages/opencode/src/session/todo.ts index ec2bcdda3c..02ad0d3b33 100644 --- a/packages/opencode/src/session/todo.ts +++ b/packages/opencode/src/session/todo.ts @@ -1,5 +1,6 @@ import { BusEvent } from "@/bus/bus-event" import { Bus } from "@/bus" +import { SessionID } from "./schema" import z from "zod" import { Database, eq, asc } from "../storage/db" import { TodoTable } from "./session.sql" @@ -18,13 +19,13 @@ export namespace Todo { Updated: BusEvent.define( "todo.updated", z.object({ - sessionID: z.string(), + sessionID: SessionID.zod, todos: z.array(Info), }), ), } - export function update(input: { sessionID: string; todos: Info[] }) { + export function update(input: { sessionID: SessionID; todos: Info[] }) { Database.transaction((db) => { db.delete(TodoTable).where(eq(TodoTable.session_id, input.sessionID)).run() if (input.todos.length === 0) return @@ -43,7 +44,7 @@ export namespace Todo { Bus.publish(Event.Updated, input) } - export function get(sessionID: string) { + export function get(sessionID: SessionID) { const rows = Database.use((db) => db.select().from(TodoTable).where(eq(TodoTable.session_id, sessionID)).orderBy(asc(TodoTable.position)).all(), ) diff --git a/packages/opencode/src/share/share-next.ts b/packages/opencode/src/share/share-next.ts index 94a2f0a010..a02e15b2b2 100644 --- a/packages/opencode/src/share/share-next.ts +++ b/packages/opencode/src/share/share-next.ts @@ -1,8 +1,10 @@ import { Bus } from "@/bus" +import { Account } from "@/account" import { Config } from "@/config/config" -import { ulid } from "ulid" import { Provider } from "@/provider/provider" +import { ProviderID, ModelID } from "@/provider/schema" import { Session } from "@/session" +import type { SessionID } from "@/session/schema" import { MessageV2 } from "@/session/message-v2" import { Database, eq } from "@/storage/db" import { SessionShareTable } from "./share.sql" @@ -12,8 +14,51 @@ import type * as SDK from "@opencode-ai/sdk/v2" export namespace ShareNext { const log = Log.create({ service: "share-next" }) + type ApiEndpoints = { + create: string + sync: (shareId: string) => string + remove: (shareId: string) => string + data: (shareId: string) => string + } + + function apiEndpoints(resource: string): ApiEndpoints { + return { + create: `/api/${resource}`, + sync: (shareId) => `/api/${resource}/${shareId}/sync`, + remove: (shareId) => `/api/${resource}/${shareId}`, + data: (shareId) => `/api/${resource}/${shareId}/data`, + } + } + + const legacyApi = apiEndpoints("share") + const consoleApi = apiEndpoints("shares") + export async function url() { - return Config.get().then((x) => x.enterprise?.url ?? "https://altimate.ai") + const req = await request() + return req.baseUrl + } + + export async function request(): Promise<{ + headers: Record<string, string> + api: ApiEndpoints + baseUrl: string + }> { + const headers: Record<string, string> = {} + + const active = Account.active() + if (!active?.active_org_id) { + const baseUrl = await Config.get().then((x) => x.enterprise?.url ?? "https://altimate.ai") + return { headers, api: legacyApi, baseUrl } + } + + const token = await Account.token(active.id) + if (!token) { + throw new Error("No active account token available for sharing") + } + + headers["authorization"] = `Bearer ${token}` + headers["x-org-id"] = active.active_org_id + return { headers, api: consoleApi, baseUrl: active.url } } const disabled = process.env["OPENCODE_DISABLE_SHARE"] === "true" || process.env["OPENCODE_DISABLE_SHARE"] === "1" @@ -66,18 +111,23 @@ export namespace ShareNext { }) } - export async function create(sessionID: string) { + export async function create(sessionID: SessionID) { if (disabled) return { id: "", url: "", secret: "" } log.info("creating share", { sessionID }) - const result = await fetch(`${await url()}/api/share`, { + const req = await request() + const response = await fetch(`${req.baseUrl}${req.api.create}`, { method: "POST", - headers: { - "Content-Type": "application/json", - }, + headers: { ...req.headers, "Content-Type": "application/json" }, body: JSON.stringify({ sessionID: sessionID }), }) - .then((x) => x.json()) - .then((x) => x as { id: string; url: string; secret: string }) + + if (!response.ok) { + const message = await response.text().catch(() => response.statusText) + throw new Error(`Failed to create share (${response.status}): ${message || response.statusText}`) + } + + const result = (await response.json()) as { id: string; url: string; secret: string } + Database.use((db) => db .insert(SessionShareTable) @@ -92,7 +142,7 @@ export namespace ShareNext { return result } - function get(sessionID: string) { + function get(sessionID: SessionID) { const row = Database.use((db) => db.select().from(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).get(), ) @@ -122,20 +172,35 @@ export namespace ShareNext { data: SDK.Model[] } + function key(item: Data) { + switch (item.type) { + case "session": + return "session" + case "message": + return `message/${item.data.id}` + case "part": + return `part/${item.data.messageID}/${item.data.id}` + case "session_diff": + return "session_diff" + case "model": + return "model" + } + } + const queue = new Map<string, { timeout: NodeJS.Timeout; data: Map<string, Data> }>() - async function sync(sessionID: string, data: Data[]) { + async function sync(sessionID: SessionID, data: Data[]) { if (disabled) return const existing = queue.get(sessionID) if (existing) { for (const item of data) { - existing.data.set("id" in item ? (item.id as string) : ulid(), item) + existing.data.set(key(item), item) } return } const dataMap = new Map<string, Data>() for (const item of data) { - dataMap.set("id" in item ? (item.id as string) : ulid(), item) + dataMap.set(key(item), item) } const timeout = setTimeout(async () => { @@ -145,47 +210,60 @@ export namespace ShareNext { const share = get(sessionID) if (!share) return - await fetch(`${await url()}/api/share/${share.id}/sync`, { + const req = await request() + const response = await fetch(`${req.baseUrl}${req.api.sync(share.id)}`, { method: "POST", - headers: { - "Content-Type": "application/json", - }, + headers: { ...req.headers, "Content-Type": "application/json" }, body: JSON.stringify({ secret: share.secret, data: Array.from(queued.data.values()), }), }) + + if (!response.ok) { + log.warn("failed to sync share", { sessionID, shareID: share.id, status: response.status }) + } }, 1000) queue.set(sessionID, { timeout, data: dataMap }) } - export async function remove(sessionID: string) { + export async function remove(sessionID: SessionID) { if (disabled) return log.info("removing share", { sessionID }) const share = get(sessionID) if (!share) return - await fetch(`${await url()}/api/share/${share.id}`, { + + const req = await request() + const response = await fetch(`${req.baseUrl}${req.api.remove(share.id)}`, { method: "DELETE", - headers: { - "Content-Type": "application/json", - }, + headers: { ...req.headers, "Content-Type": "application/json" }, body: JSON.stringify({ secret: share.secret, }), }) + + if (!response.ok) { + const message = await response.text().catch(() => response.statusText) + throw new Error(`Failed to remove share (${response.status}): ${message || response.statusText}`) + } + Database.use((db) => db.delete(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).run()) } - async function fullSync(sessionID: string) { + async function fullSync(sessionID: SessionID) { log.info("full sync", { sessionID }) const session = await Session.get(sessionID) const diffs = await Session.diff(sessionID) const messages = await Array.fromAsync(MessageV2.stream(sessionID)) const models = await Promise.all( - messages - .filter((m) => m.info.role === "user") - .map((m) => (m.info as SDK.UserMessage).model) - .map((m) => Provider.getModel(m.providerID, m.modelID).then((m) => m)), + Array.from( + new Map( + messages + .filter((m) => m.info.role === "user") + .map((m) => (m.info as SDK.UserMessage).model) + .map((m) => [`${m.providerID}/${m.modelID}`, m] as const), + ).values(), + ).map((m) => Provider.getModel(ProviderID.make(m.providerID), ModelID.make(m.modelID)).then((item) => item)), ) await sync(sessionID, [ { diff --git a/packages/opencode/src/share/share.sql.ts b/packages/opencode/src/share/share.sql.ts index 268d41a6f6..f337e106a5 100644 --- a/packages/opencode/src/share/share.sql.ts +++ b/packages/opencode/src/share/share.sql.ts @@ -1,6 +1,6 @@ import { sqliteTable, text } from "drizzle-orm/sqlite-core" import { SessionTable } from "../session/session.sql" -import { Timestamps } from "@/storage/schema.sql" +import { Timestamps } from "../storage/schema.sql" export const SessionShareTable = sqliteTable("session_share", { session_id: text() diff --git a/packages/opencode/src/shell/shell.ts b/packages/opencode/src/shell/shell.ts index 60ae46f5ee..a30889d699 100644 --- a/packages/opencode/src/shell/shell.ts +++ b/packages/opencode/src/shell/shell.ts @@ -15,7 +15,10 @@ export namespace Shell { if (process.platform === "win32") { await new Promise<void>((resolve) => { - const killer = spawn("taskkill", ["/pid", String(pid), "/f", "/t"], { stdio: "ignore" }) + const killer = spawn("taskkill", ["/pid", String(pid), "/f", "/t"], { + stdio: "ignore", + windowsHide: true, + }) killer.once("exit", () => resolve()) killer.once("error", () => resolve()) }) diff --git a/packages/opencode/src/skill/skill.ts b/packages/opencode/src/skill/skill.ts index c474c94dd7..fa984b3e11 100644 --- a/packages/opencode/src/skill/skill.ts +++ b/packages/opencode/src/skill/skill.ts @@ -13,6 +13,9 @@ import { Bus } from "@/bus" import { Session } from "@/session" import { Discovery } from "./discovery" import { Glob } from "../util/glob" +import { pathToFileURL } from "url" +import type { Agent } from "@/agent/agent" +import { PermissionNext } from "@/permission/next" export namespace Skill { const log = Log.create({ service: "skill" }) @@ -186,4 +189,30 @@ export namespace Skill { export async function dirs() { return state().then((x) => x.dirs) } + + export async function available(agent?: Agent.Info) { + const list = await all() + if (!agent) return list + return list.filter((skill) => PermissionNext.evaluate("skill", skill.name, agent.permission).action !== "deny") + } + + export function fmt(list: Info[], opts: { verbose: boolean }) { + if (list.length === 0) { + return "No skills are currently available." + } + if (opts.verbose) { + return [ + "<available_skills>", + ...list.flatMap((skill) => [ + ` <skill>`, + ` <name>${skill.name}</name>`, + ` <description>${skill.description}</description>`, + ` <location>${pathToFileURL(skill.location).href}</location>`, + ` </skill>`, + ]), + "</available_skills>", + ].join("\n") + } + return ["## Available Skills", ...list.flatMap((skill) => `- **${skill.name}**: ${skill.description}`)].join("\n") + } } diff --git a/packages/opencode/src/snapshot/index.ts b/packages/opencode/src/snapshot/index.ts index 1acbdba092..72252b7b4c 100644 --- a/packages/opencode/src/snapshot/index.ts +++ b/packages/opencode/src/snapshot/index.ts @@ -1,4 +1,3 @@ -import { $ } from "bun" import path from "path" import fs from "fs/promises" import { Filesystem } from "../util/filesystem" @@ -9,12 +8,17 @@ import z from "zod" import { Config } from "../config/config" import { Instance } from "../project/instance" import { Scheduler } from "../scheduler" +import { Process } from "@/util/process" export namespace Snapshot { const log = Log.create({ service: "snapshot" }) const hour = 60 * 60 * 1000 const prune = "7.days" + function args(git: string, cmd: string[]) { + return ["--git-dir", git, "--work-tree", Instance.worktree, ...cmd] + } + export function init() { Scheduler.register({ id: "snapshot.cleanup", @@ -25,7 +29,7 @@ export namespace Snapshot { } export async function cleanup() { - if (Instance.project.vcs !== "git" || Flag.OPENCODE_CLIENT === "acp") return + if (Instance.project.vcs !== "git") return const cfg = await Config.get() if (cfg.snapshot === false) return const git = gitdir() @@ -34,13 +38,13 @@ export namespace Snapshot { .then(() => true) .catch(() => false) if (!exists) return - const result = await $`git --git-dir ${git} --work-tree ${Instance.worktree} gc --prune=${prune}` - .quiet() - .cwd(Instance.directory) - .nothrow() - if (result.exitCode !== 0) { + const result = await Process.run(["git", ...args(git, ["gc", `--prune=${prune}`])], { + cwd: Instance.directory, + nothrow: true, + }) + if (result.code !== 0) { log.warn("cleanup failed", { - exitCode: result.exitCode, + exitCode: result.code, stderr: result.stderr.toString(), stdout: result.stdout.toString(), }) @@ -50,32 +54,32 @@ export namespace Snapshot { } export async function track() { - if (Instance.project.vcs !== "git" || Flag.OPENCODE_CLIENT === "acp") return + if (Instance.project.vcs !== "git") return const cfg = await Config.get() if (cfg.snapshot === false) return const git = gitdir() if (await fs.mkdir(git, { recursive: true })) { - await $`git init` - .env({ + await Process.run(["git", "init"], { + env: { ...process.env, GIT_DIR: git, GIT_WORK_TREE: Instance.worktree, - }) - .quiet() - .nothrow() + }, + nothrow: true, + }) + // Configure git to not convert line endings on Windows - await $`git --git-dir ${git} config core.autocrlf false`.quiet().nothrow() - await $`git --git-dir ${git} config core.longpaths true`.quiet().nothrow() - await $`git --git-dir ${git} config core.symlinks true`.quiet().nothrow() - await $`git --git-dir ${git} config core.fsmonitor false`.quiet().nothrow() + await Process.run(["git", "--git-dir", git, "config", "core.autocrlf", "false"], { nothrow: true }) + await Process.run(["git", "--git-dir", git, "config", "core.longpaths", "true"], { nothrow: true }) + await Process.run(["git", "--git-dir", git, "config", "core.symlinks", "true"], { nothrow: true }) + await Process.run(["git", "--git-dir", git, "config", "core.fsmonitor", "false"], { nothrow: true }) log.info("initialized") } await add(git) - const hash = await $`git --git-dir ${git} --work-tree ${Instance.worktree} write-tree` - .quiet() - .cwd(Instance.directory) - .nothrow() - .text() + const hash = await Process.text(["git", ...args(git, ["write-tree"])], { + cwd: Instance.directory, + nothrow: true, + }).then((x) => x.text) log.info("tracking", { hash, cwd: Instance.directory, git }) return hash.trim() } @@ -89,19 +93,32 @@ export namespace Snapshot { export async function patch(hash: string): Promise<Patch> { const git = gitdir() await add(git) - const result = - await $`git -c core.autocrlf=false -c core.longpaths=true -c core.symlinks=true -c core.quotepath=false --git-dir ${git} --work-tree ${Instance.worktree} diff --no-ext-diff --name-only ${hash} -- .` - .quiet() - .cwd(Instance.directory) - .nothrow() + const result = await Process.text( + [ + "git", + "-c", + "core.autocrlf=false", + "-c", + "core.longpaths=true", + "-c", + "core.symlinks=true", + "-c", + "core.quotepath=false", + ...args(git, ["diff", "--no-ext-diff", "--name-only", hash, "--", "."]), + ], + { + cwd: Instance.directory, + nothrow: true, + }, + ) // If git diff fails, return empty patch - if (result.exitCode !== 0) { - log.warn("failed to get diff", { hash, exitCode: result.exitCode }) + if (result.code !== 0) { + log.warn("failed to get diff", { hash, exitCode: result.code }) return { hash, files: [] } } - const files = result.text() + const files = result.text return { hash, files: files @@ -116,20 +133,37 @@ export namespace Snapshot { export async function restore(snapshot: string) { log.info("restore", { commit: snapshot }) const git = gitdir() - const result = - await $`git -c core.longpaths=true -c core.symlinks=true --git-dir ${git} --work-tree ${Instance.worktree} read-tree ${snapshot} && git -c core.longpaths=true -c core.symlinks=true --git-dir ${git} --work-tree ${Instance.worktree} checkout-index -a -f` - .quiet() - .cwd(Instance.worktree) - .nothrow() - - if (result.exitCode !== 0) { + const result = await Process.run( + ["git", "-c", "core.longpaths=true", "-c", "core.symlinks=true", ...args(git, ["read-tree", snapshot])], + { + cwd: Instance.worktree, + nothrow: true, + }, + ) + if (result.code === 0) { + const checkout = await Process.run( + ["git", "-c", "core.longpaths=true", "-c", "core.symlinks=true", ...args(git, ["checkout-index", "-a", "-f"])], + { + cwd: Instance.worktree, + nothrow: true, + }, + ) + if (checkout.code === 0) return log.error("failed to restore snapshot", { snapshot, - exitCode: result.exitCode, - stderr: result.stderr.toString(), - stdout: result.stdout.toString(), + exitCode: checkout.code, + stderr: checkout.stderr.toString(), + stdout: checkout.stdout.toString(), }) + return } + + log.error("failed to restore snapshot", { + snapshot, + exitCode: result.code, + stderr: result.stderr.toString(), + stdout: result.stdout.toString(), + }) } export async function revert(patches: Patch[]) { @@ -139,19 +173,37 @@ export namespace Snapshot { for (const file of item.files) { if (files.has(file)) continue log.info("reverting", { file, hash: item.hash }) - const result = - await $`git -c core.longpaths=true -c core.symlinks=true --git-dir ${git} --work-tree ${Instance.worktree} checkout ${item.hash} -- ${file}` - .quiet() - .cwd(Instance.worktree) - .nothrow() - if (result.exitCode !== 0) { + const result = await Process.run( + [ + "git", + "-c", + "core.longpaths=true", + "-c", + "core.symlinks=true", + ...args(git, ["checkout", item.hash, "--", file]), + ], + { + cwd: Instance.worktree, + nothrow: true, + }, + ) + if (result.code !== 0) { const relativePath = path.relative(Instance.worktree, file) - const checkTree = - await $`git -c core.longpaths=true -c core.symlinks=true --git-dir ${git} --work-tree ${Instance.worktree} ls-tree ${item.hash} -- ${relativePath}` - .quiet() - .cwd(Instance.worktree) - .nothrow() - if (checkTree.exitCode === 0 && checkTree.text().trim()) { + const checkTree = await Process.text( + [ + "git", + "-c", + "core.longpaths=true", + "-c", + "core.symlinks=true", + ...args(git, ["ls-tree", item.hash, "--", relativePath]), + ], + { + cwd: Instance.worktree, + nothrow: true, + }, + ) + if (checkTree.code === 0 && checkTree.text.trim()) { log.info("file existed in snapshot but checkout failed, keeping", { file, }) @@ -168,23 +220,36 @@ export namespace Snapshot { export async function diff(hash: string) { const git = gitdir() await add(git) - const result = - await $`git -c core.autocrlf=false -c core.longpaths=true -c core.symlinks=true -c core.quotepath=false --git-dir ${git} --work-tree ${Instance.worktree} diff --no-ext-diff ${hash} -- .` - .quiet() - .cwd(Instance.worktree) - .nothrow() + const result = await Process.text( + [ + "git", + "-c", + "core.autocrlf=false", + "-c", + "core.longpaths=true", + "-c", + "core.symlinks=true", + "-c", + "core.quotepath=false", + ...args(git, ["diff", "--no-ext-diff", hash, "--", "."]), + ], + { + cwd: Instance.worktree, + nothrow: true, + }, + ) - if (result.exitCode !== 0) { + if (result.code !== 0) { log.warn("failed to get diff", { hash, - exitCode: result.exitCode, + exitCode: result.code, stderr: result.stderr.toString(), stdout: result.stdout.toString(), }) return "" } - return result.text().trim() + return result.text.trim() } export const FileDiff = z @@ -205,12 +270,24 @@ export namespace Snapshot { const result: FileDiff[] = [] const status = new Map<string, "added" | "deleted" | "modified">() - const statuses = - await $`git -c core.autocrlf=false -c core.longpaths=true -c core.symlinks=true -c core.quotepath=false --git-dir ${git} --work-tree ${Instance.worktree} diff --no-ext-diff --name-status --no-renames ${from} ${to} -- .` - .quiet() - .cwd(Instance.directory) - .nothrow() - .text() + const statuses = await Process.text( + [ + "git", + "-c", + "core.autocrlf=false", + "-c", + "core.longpaths=true", + "-c", + "core.symlinks=true", + "-c", + "core.quotepath=false", + ...args(git, ["diff", "--no-ext-diff", "--name-status", "--no-renames", from, to, "--", "."]), + ], + { + cwd: Instance.directory, + nothrow: true, + }, + ).then((x) => x.text) for (const line of statuses.trim().split("\n")) { if (!line) continue @@ -220,26 +297,57 @@ export namespace Snapshot { status.set(file, kind) } - for await (const line of $`git -c core.autocrlf=false -c core.longpaths=true -c core.symlinks=true -c core.quotepath=false --git-dir ${git} --work-tree ${Instance.worktree} diff --no-ext-diff --no-renames --numstat ${from} ${to} -- .` - .quiet() - .cwd(Instance.directory) - .nothrow() - .lines()) { + for (const line of await Process.lines( + [ + "git", + "-c", + "core.autocrlf=false", + "-c", + "core.longpaths=true", + "-c", + "core.symlinks=true", + "-c", + "core.quotepath=false", + ...args(git, ["diff", "--no-ext-diff", "--no-renames", "--numstat", from, to, "--", "."]), + ], + { + cwd: Instance.directory, + nothrow: true, + }, + )) { if (!line) continue const [additions, deletions, file] = line.split("\t") const isBinaryFile = additions === "-" && deletions === "-" const before = isBinaryFile ? "" - : await $`git -c core.autocrlf=false -c core.longpaths=true -c core.symlinks=true --git-dir ${git} --work-tree ${Instance.worktree} show ${from}:${file}` - .quiet() - .nothrow() - .text() + : await Process.text( + [ + "git", + "-c", + "core.autocrlf=false", + "-c", + "core.longpaths=true", + "-c", + "core.symlinks=true", + ...args(git, ["show", `${from}:${file}`]), + ], + { nothrow: true }, + ).then((x) => x.text) const after = isBinaryFile ? "" - : await $`git -c core.autocrlf=false -c core.longpaths=true -c core.symlinks=true --git-dir ${git} --work-tree ${Instance.worktree} show ${to}:${file}` - .quiet() - .nothrow() - .text() + : await Process.text( + [ + "git", + "-c", + "core.autocrlf=false", + "-c", + "core.longpaths=true", + "-c", + "core.symlinks=true", + ...args(git, ["show", `${to}:${file}`]), + ], + { nothrow: true }, + ).then((x) => x.text) const added = isBinaryFile ? 0 : parseInt(additions) const deleted = isBinaryFile ? 0 : parseInt(deletions) result.push({ @@ -261,10 +369,22 @@ export namespace Snapshot { async function add(git: string) { await syncExclude(git) - await $`git -c core.autocrlf=false -c core.longpaths=true -c core.symlinks=true --git-dir ${git} --work-tree ${Instance.worktree} add .` - .quiet() - .cwd(Instance.directory) - .nothrow() + await Process.run( + [ + "git", + "-c", + "core.autocrlf=false", + "-c", + "core.longpaths=true", + "-c", + "core.symlinks=true", + ...args(git, ["add", "."]), + ], + { + cwd: Instance.directory, + nothrow: true, + }, + ) } async function syncExclude(git: string) { @@ -281,11 +401,10 @@ export namespace Snapshot { } async function excludes() { - const file = await $`git rev-parse --path-format=absolute --git-path info/exclude` - .quiet() - .cwd(Instance.worktree) - .nothrow() - .text() + const file = await Process.text(["git", "rev-parse", "--path-format=absolute", "--git-path", "info/exclude"], { + cwd: Instance.worktree, + nothrow: true, + }).then((x) => x.text) if (!file.trim()) return const exists = await fs .stat(file.trim()) diff --git a/packages/opencode/src/storage/db.ts b/packages/opencode/src/storage/db.ts index f29aac18d1..beb8e3eb52 100644 --- a/packages/opencode/src/storage/db.ts +++ b/packages/opencode/src/storage/db.ts @@ -12,8 +12,11 @@ import z from "zod" import path from "path" import { readFileSync, readdirSync, existsSync } from "fs" import * as schema from "./schema" +import { Installation } from "../installation" +import { Flag } from "../flag/flag" +import { iife } from "@/util/iife" -declare const OPENCODE_MIGRATIONS: { sql: string; timestamp: number }[] | undefined +declare const OPENCODE_MIGRATIONS: { sql: string; timestamp: number; name: string }[] | undefined export const NotFoundError = NamedError.create( "NotFoundError", @@ -25,13 +28,20 @@ export const NotFoundError = NamedError.create( const log = Log.create({ service: "db" }) export namespace Database { - export const Path = path.join(Global.Path.data, "opencode.db") + export const Path = iife(() => { + const channel = Installation.CHANNEL + if (["latest", "beta"].includes(channel) || Flag.OPENCODE_DISABLE_CHANNEL_DB) + return path.join(Global.Path.data, "opencode.db") + const safe = channel.replace(/[^a-zA-Z0-9._-]/g, "-") + return path.join(Global.Path.data, `opencode-${safe}.db`) + }) + type Schema = typeof schema export type Transaction = SQLiteTransaction<"sync", void, Schema> - type Client = SQLiteBunDatabase<Schema> + type Client = SQLiteBunDatabase - type Journal = { sql: string; timestamp: number }[] + type Journal = { sql: string; timestamp: number; name: string }[] const state = { sqlite: undefined as BunDatabase | undefined, @@ -62,6 +72,7 @@ export namespace Database { return { sql: readFileSync(file, "utf-8"), timestamp: time(name), + name, } }) .filter(Boolean) as Journal @@ -70,9 +81,9 @@ export namespace Database { } export const Client = lazy(() => { - log.info("opening database", { path: path.join(Global.Path.data, "opencode.db") }) + log.info("opening database", { path: Path }) - const sqlite = new BunDatabase(path.join(Global.Path.data, "opencode.db"), { create: true }) + const sqlite = new BunDatabase(Path, { create: true }) state.sqlite = sqlite sqlite.run("PRAGMA journal_mode = WAL") @@ -82,7 +93,7 @@ export namespace Database { sqlite.run("PRAGMA foreign_keys = ON") sqlite.run("PRAGMA wal_checkpoint(PASSIVE)") - const db = drizzle({ client: sqlite, schema }) + const db = drizzle({ client: sqlite }) // Apply schema migrations const entries = @@ -94,6 +105,11 @@ export namespace Database { count: entries.length, mode: typeof OPENCODE_MIGRATIONS !== "undefined" ? "bundled" : "dev", }) + if (Flag.OPENCODE_SKIP_MIGRATIONS) { + for (const item of entries) { + item.sql = "select 1;" + } + } migrate(db, entries) } @@ -108,7 +124,7 @@ export namespace Database { Client.reset() } - export type TxOrDb = Transaction | Client + export type TxOrDb = SQLiteTransaction<"sync", void, any, any> | Client const ctx = Context.create<{ tx: TxOrDb @@ -143,7 +159,7 @@ export namespace Database { } catch (err) { if (err instanceof Context.NotFound) { const effects: (() => void | Promise<void>)[] = [] - const result = Client().transaction((tx) => { + const result = (Client().transaction as any)((tx: TxOrDb) => { return ctx.provide({ tx, effects }, () => callback(tx)) }) for (const effect of effects) effect() diff --git a/packages/opencode/src/storage/schema.ts b/packages/opencode/src/storage/schema.ts index 4c1c2490e3..0c12cee622 100644 --- a/packages/opencode/src/storage/schema.ts +++ b/packages/opencode/src/storage/schema.ts @@ -1,5 +1,5 @@ -export { ControlAccountTable } from "../control/control.sql" +export { AccountTable, AccountStateTable, ControlAccountTable } from "../account/account.sql" +export { ProjectTable } from "../project/project.sql" export { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../session/session.sql" export { SessionShareTable } from "../share/share.sql" -export { ProjectTable } from "../project/project.sql" export { WorkspaceTable } from "../control-plane/workspace.sql" diff --git a/packages/opencode/src/storage/storage.ts b/packages/opencode/src/storage/storage.ts index a78ff04f43..a78607cdfd 100644 --- a/packages/opencode/src/storage/storage.ts +++ b/packages/opencode/src/storage/storage.ts @@ -5,10 +5,10 @@ import { Global } from "../global" import { Filesystem } from "../util/filesystem" import { lazy } from "../util/lazy" import { Lock } from "../util/lock" -import { $ } from "bun" import { NamedError } from "@opencode-ai/util/error" import z from "zod" import { Glob } from "../util/glob" +import { git } from "@/util/git" export namespace Storage { const log = Log.create({ service: "storage" }) @@ -49,18 +49,15 @@ export namespace Storage { } if (!worktree) continue if (!(await Filesystem.isDir(worktree))) continue - const [id] = await $`git rev-list --max-parents=0 --all` - .quiet() - .nothrow() - .cwd(worktree) + const result = await git(["rev-list", "--max-parents=0", "--all"], { + cwd: worktree, + }) + const [id] = result .text() - .then((x) => - x - .split("\n") - .filter(Boolean) - .map((x) => x.trim()) - .toSorted(), - ) + .split("\n") + .filter(Boolean) + .map((x) => x.trim()) + .toSorted() if (!id) continue projectID = id diff --git a/packages/opencode/src/tool/bash.ts b/packages/opencode/src/tool/bash.ts index 0751f789b7..109a665363 100644 --- a/packages/opencode/src/tool/bash.ts +++ b/packages/opencode/src/tool/bash.ts @@ -7,8 +7,8 @@ import { Log } from "../util/log" import { Instance } from "../project/instance" import { lazy } from "@/util/lazy" import { Language } from "web-tree-sitter" +import fs from "fs/promises" -import { $ } from "bun" import { Filesystem } from "@/util/filesystem" import { fileURLToPath } from "url" import { Flag } from "@/flag/flag.ts" @@ -116,12 +116,7 @@ export const BashTool = Tool.define("bash", async () => { if (["cd", "rm", "cp", "mv", "mkdir", "touch", "chmod", "chown", "cat"].includes(command[0])) { for (const arg of command.slice(1)) { if (arg.startsWith("-") || (command[0] === "chmod" && arg.startsWith("+"))) continue - const resolved = await $`realpath ${arg}` - .cwd(cwd) - .quiet() - .nothrow() - .text() - .then((x) => x.trim()) + const resolved = await fs.realpath(path.resolve(cwd, arg)).catch(() => "") log.info("resolved path", { arg, resolved }) if (resolved) { const normalized = @@ -178,6 +173,7 @@ export const BashTool = Tool.define("bash", async () => { }, stdio: ["ignore", "pipe", "pipe"], detached: process.platform !== "win32", + windowsHide: process.platform === "win32", }) let output = "" diff --git a/packages/opencode/src/tool/batch.ts b/packages/opencode/src/tool/batch.ts index eecbfe2990..00c22bfe6b 100644 --- a/packages/opencode/src/tool/batch.ts +++ b/packages/opencode/src/tool/batch.ts @@ -1,5 +1,6 @@ import z from "zod" import { Tool } from "./tool" +import { ProviderID, ModelID } from "../provider/schema" import DESCRIPTION from "./batch.txt" const DISALLOWED = new Set(["batch"]) @@ -31,18 +32,18 @@ export const BatchTool = Tool.define("batch", async () => { }, async execute(params, ctx) { const { Session } = await import("../session") - const { Identifier } = await import("../id/id") + const { PartID } = await import("../session/schema") const toolCalls = params.tool_calls.slice(0, 25) const discardedCalls = params.tool_calls.slice(25) const { ToolRegistry } = await import("./registry") - const availableTools = await ToolRegistry.tools({ modelID: "", providerID: "" }) + const availableTools = await ToolRegistry.tools({ modelID: ModelID.make(""), providerID: ProviderID.make("") }) const toolMap = new Map(availableTools.map((t) => [t.id, t])) const executeCall = async (call: (typeof toolCalls)[0]) => { const callStartTime = Date.now() - const partID = Identifier.ascending("part") + const partID = PartID.ascending() try { if (DISALLOWED.has(call.tool)) { @@ -79,7 +80,7 @@ export const BatchTool = Tool.define("batch", async () => { const result = await tool.execute(validatedParams, { ...ctx, callID: partID }) const attachments = result.attachments?.map((attachment) => ({ ...attachment, - id: Identifier.ascending("part"), + id: PartID.ascending(), sessionID: ctx.sessionID, messageID: ctx.messageID, })) @@ -134,7 +135,7 @@ export const BatchTool = Tool.define("batch", async () => { // Add discarded calls as errors const now = Date.now() for (const call of discardedCalls) { - const partID = Identifier.ascending("part") + const partID = PartID.ascending() await Session.updatePart({ id: partID, messageID: ctx.messageID, diff --git a/packages/opencode/src/tool/edit.ts b/packages/opencode/src/tool/edit.ts index 7a097d3fe1..c7b12378ed 100644 --- a/packages/opencode/src/tool/edit.ts +++ b/packages/opencode/src/tool/edit.ts @@ -24,6 +24,15 @@ function normalizeLineEndings(text: string): string { return text.replaceAll("\r\n", "\n") } +function detectLineEnding(text: string): "\n" | "\r\n" { + return text.includes("\r\n") ? "\r\n" : "\n" +} + +function convertToLineEnding(text: string, ending: "\n" | "\r\n"): string { + if (ending === "\n") return text + return text.replaceAll("\n", "\r\n") +} + export const EditTool = Tool.define("edit", { description: DESCRIPTION, parameters: z.object({ @@ -78,7 +87,12 @@ export const EditTool = Tool.define("edit", { if (stats.isDirectory()) throw new Error(`Path is a directory, not a file: ${filePath}`) await FileTime.assert(ctx.sessionID, filePath) contentOld = await Filesystem.readText(filePath) - contentNew = replace(contentOld, params.oldString, params.newString, params.replaceAll) + + const ending = detectLineEnding(contentOld) + const old = convertToLineEnding(normalizeLineEndings(params.oldString), ending) + const next = convertToLineEnding(normalizeLineEndings(params.newString), ending) + + contentNew = replace(contentOld, old, next, params.replaceAll) diff = trimDiff( createTwoFilesPatch(filePath, filePath, normalizeLineEndings(contentOld), normalizeLineEndings(contentNew)), diff --git a/packages/opencode/src/tool/plan.ts b/packages/opencode/src/tool/plan.ts index ff84dccec4..e91bc3faa2 100644 --- a/packages/opencode/src/tool/plan.ts +++ b/packages/opencode/src/tool/plan.ts @@ -4,12 +4,12 @@ import { Tool } from "./tool" import { Question } from "../question" import { Session } from "../session" import { MessageV2 } from "../session/message-v2" -import { Identifier } from "../id/id" import { Provider } from "../provider/provider" import { Instance } from "../project/instance" +import { type SessionID, MessageID, PartID } from "../session/schema" import EXIT_DESCRIPTION from "./plan-exit.txt" -async function getLastModel(sessionID: string) { +async function getLastModel(sessionID: SessionID) { for await (const item of MessageV2.stream(sessionID)) { if (item.info.role === "user" && item.info.model) return item.info.model } @@ -44,7 +44,7 @@ export const PlanExitTool = Tool.define("plan_exit", { const model = await getLastModel(ctx.sessionID) const userMsg: MessageV2.User = { - id: Identifier.ascending("message"), + id: MessageID.ascending(), sessionID: ctx.sessionID, role: "user", time: { @@ -55,7 +55,7 @@ export const PlanExitTool = Tool.define("plan_exit", { } await Session.updateMessage(userMsg) await Session.updatePart({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: userMsg.id, sessionID: ctx.sessionID, type: "text", @@ -102,7 +102,7 @@ export const PlanEnterTool = Tool.define("plan_enter", { const model = await getLastModel(ctx.sessionID) const userMsg: MessageV2.User = { - id: Identifier.ascending("message"), + id: MessageID.ascending(), sessionID: ctx.sessionID, role: "user", time: { @@ -113,7 +113,7 @@ export const PlanEnterTool = Tool.define("plan_enter", { } await Session.updateMessage(userMsg) await Session.updatePart({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: userMsg.id, sessionID: ctx.sessionID, type: "text", diff --git a/packages/opencode/src/tool/registry.ts b/packages/opencode/src/tool/registry.ts index f6473a0d41..68b4166e82 100644 --- a/packages/opencode/src/tool/registry.ts +++ b/packages/opencode/src/tool/registry.ts @@ -20,6 +20,7 @@ import path from "path" import { type ToolContext as PluginToolContext, type ToolDefinition } from "@opencode-ai/plugin" import z from "zod" import { Plugin } from "../plugin" +import { ProviderID, type ModelID } from "../provider/schema" import { WebSearchTool } from "./websearch" import { CodeSearchTool } from "./codesearch" import { Flag } from "@/flag/flag" @@ -287,8 +288,8 @@ export namespace ToolRegistry { export async function tools( model: { - providerID: string - modelID: string + providerID: ProviderID + modelID: ModelID }, agent?: Agent.Info, ) { @@ -298,7 +299,7 @@ export namespace ToolRegistry { .filter((t) => { // Enable websearch/codesearch for zen users OR via enable flag if (t.id === "codesearch" || t.id === "websearch") { - return model.providerID === "opencode" || Flag.OPENCODE_ENABLE_EXA + return model.providerID === ProviderID.opencode || Flag.OPENCODE_ENABLE_EXA } // use apply tool in same format as codex diff --git a/packages/opencode/src/tool/schema.ts b/packages/opencode/src/tool/schema.ts new file mode 100644 index 0000000000..93f0f9a71f --- /dev/null +++ b/packages/opencode/src/tool/schema.ts @@ -0,0 +1,17 @@ +import { Schema } from "effect" +import z from "zod" + +import { Identifier } from "@/id/id" +import { withStatics } from "@/util/schema" + +const toolIdSchema = Schema.String.pipe(Schema.brand("ToolID")) + +export type ToolID = typeof toolIdSchema.Type + +export const ToolID = toolIdSchema.pipe( + withStatics((schema: typeof toolIdSchema) => ({ + make: (id: string) => schema.makeUnsafe(id), + ascending: (id?: string) => schema.makeUnsafe(Identifier.ascending("tool", id)), + zod: Identifier.schema("tool").pipe(z.custom<ToolID>()), + })), +) diff --git a/packages/opencode/src/tool/skill.ts b/packages/opencode/src/tool/skill.ts index 8fcfb592de..17016b06f8 100644 --- a/packages/opencode/src/tool/skill.ts +++ b/packages/opencode/src/tool/skill.ts @@ -3,24 +3,14 @@ import { pathToFileURL } from "url" import z from "zod" import { Tool } from "./tool" import { Skill } from "../skill" -import { PermissionNext } from "../permission/next" import { Ripgrep } from "../file/ripgrep" import { iife } from "@/util/iife" export const SkillTool = Tool.define("skill", async (ctx) => { - const skills = await Skill.all() - - // Filter skills by agent permissions if agent provided - const agent = ctx?.agent - const accessibleSkills = agent - ? skills.filter((skill) => { - const rule = PermissionNext.evaluate("skill", skill.name, agent.permission) - return rule.action !== "deny" - }) - : skills + const list = await Skill.available(ctx?.agent) const description = - accessibleSkills.length === 0 + list.length === 0 ? "Load a specialized skill that provides domain-specific instructions and workflows. No skills are currently available." : [ "Load a specialized skill that provides domain-specific instructions and workflows.", @@ -34,18 +24,10 @@ export const SkillTool = Tool.define("skill", async (ctx) => { "The following skills provide specialized sets of instructions for particular tasks", "Invoke this tool to load a skill when a task matches one of the available skills listed below:", "", - "<available_skills>", - ...accessibleSkills.flatMap((skill) => [ - ` <skill>`, - ` <name>${skill.name}</name>`, - ` <description>${skill.description}</description>`, - ` <location>${pathToFileURL(skill.location).href}</location>`, - ` </skill>`, - ]), - "</available_skills>", + Skill.fmt(list, { verbose: false }), ].join("\n") - const examples = accessibleSkills + const examples = list .map((skill) => `'${skill.name}'`) .slice(0, 3) .join(", ") @@ -62,7 +44,7 @@ export const SkillTool = Tool.define("skill", async (ctx) => { const skill = await Skill.get(params.name) if (!skill) { - const available = await Skill.all().then((x) => Object.keys(x).join(", ")) + const available = await Skill.all().then((x) => x.map((skill) => skill.name).join(", ")) throw new Error(`Skill "${params.name}" not found. Available skills: ${available || "none"}`) } diff --git a/packages/opencode/src/tool/task.ts b/packages/opencode/src/tool/task.ts index 8c8cf827ab..68e44eb97e 100644 --- a/packages/opencode/src/tool/task.ts +++ b/packages/opencode/src/tool/task.ts @@ -2,6 +2,7 @@ import { Tool } from "./tool" import DESCRIPTION from "./task.txt" import z from "zod" import { Session } from "../session" +import { SessionID, MessageID } from "../session/schema" import { MessageV2 } from "../session/message-v2" import { Identifier } from "../id/id" import { Agent } from "../agent/agent" @@ -65,7 +66,7 @@ export const TaskTool = Tool.define("task", async (ctx) => { const session = await iife(async () => { if (params.task_id) { - const found = await Session.get(params.task_id).catch(() => {}) + const found = await Session.get(SessionID.make(params.task_id)).catch(() => {}) if (found) return found } @@ -116,7 +117,7 @@ export const TaskTool = Tool.define("task", async (ctx) => { }, }) - const messageID = Identifier.ascending("message") + const messageID = MessageID.ascending() function cancel() { SessionPrompt.cancel(session.id) diff --git a/packages/opencode/src/tool/tool.ts b/packages/opencode/src/tool/tool.ts index 0e78ba665c..8cc7b57d85 100644 --- a/packages/opencode/src/tool/tool.ts +++ b/packages/opencode/src/tool/tool.ts @@ -2,6 +2,7 @@ import z from "zod" import type { MessageV2 } from "../session/message-v2" import type { Agent } from "../agent/agent" import type { PermissionNext } from "../permission/next" +import type { SessionID, MessageID } from "../session/schema" import { Truncate } from "./truncation" export namespace Tool { @@ -14,8 +15,8 @@ export namespace Tool { } export type Context<M extends Metadata = Metadata> = { - sessionID: string - messageID: string + sessionID: SessionID + messageID: MessageID agent: string abort: AbortSignal callID?: string diff --git a/packages/opencode/src/tool/truncation.ts b/packages/opencode/src/tool/truncation.ts index 25d523b6ad..7c6a362a37 100644 --- a/packages/opencode/src/tool/truncation.ts +++ b/packages/opencode/src/tool/truncation.ts @@ -6,6 +6,8 @@ import { PermissionNext } from "../permission/next" import type { Agent } from "../agent/agent" import { Scheduler } from "../scheduler" import { Filesystem } from "../util/filesystem" +import { Glob } from "../util/glob" +import { ToolID } from "./schema" export namespace Truncate { export const MAX_LINES = 2000 @@ -34,15 +36,9 @@ export namespace Truncate { export async function cleanup() { const cutoff = Identifier.timestamp(Identifier.create("tool", false, Date.now() - RETENTION_MS)) - const entries = await fs.readdir(DIR).catch(() => [] as string[]) + const entries = await Glob.scan("tool_*", { cwd: DIR, include: "file" }).catch(() => [] as string[]) for (const entry of entries) { - if (!entry.startsWith("tool_")) continue - try { - if (Identifier.timestamp(entry) >= cutoff) continue - } catch { - // Skip malformed IDs (e.g. legacy format or descending IDs) - continue - } + if (Identifier.timestamp(entry) >= cutoff) continue await fs.unlink(path.join(DIR, entry)).catch(() => {}) } } @@ -95,7 +91,7 @@ export namespace Truncate { const unit = hitBytes ? "bytes" : "lines" const preview = out.join("\n") - const id = Identifier.ascending("tool") + const id = ToolID.ascending() const filepath = path.join(DIR, id) await Filesystem.write(filepath, text) diff --git a/packages/opencode/src/util/archive.ts b/packages/opencode/src/util/archive.ts index 34a1738a8c..f65ceba547 100644 --- a/packages/opencode/src/util/archive.ts +++ b/packages/opencode/src/util/archive.ts @@ -1,5 +1,5 @@ -import { $ } from "bun" import path from "path" +import { Process } from "./process" export namespace Archive { export async function extractZip(zipPath: string, destDir: string) { @@ -8,9 +8,10 @@ export namespace Archive { const winDestDir = path.resolve(destDir) // $global:ProgressPreference suppresses PowerShell's blue progress bar popup const cmd = `$global:ProgressPreference = 'SilentlyContinue'; Expand-Archive -Path '${winZipPath}' -DestinationPath '${winDestDir}' -Force` - await $`powershell -NoProfile -NonInteractive -Command ${cmd}`.quiet() - } else { - await $`unzip -o -q ${zipPath} -d ${destDir}`.quiet() + await Process.run(["powershell", "-NoProfile", "-NonInteractive", "-Command", cmd]) + return } + + await Process.run(["unzip", "-o", "-q", zipPath, "-d", destDir]) } } diff --git a/packages/opencode/src/util/data-url.ts b/packages/opencode/src/util/data-url.ts new file mode 100644 index 0000000000..0fafcbc63f --- /dev/null +++ b/packages/opencode/src/util/data-url.ts @@ -0,0 +1,9 @@ +export function decodeDataUrl(url: string) { + const idx = url.indexOf(",") + if (idx === -1) return "" + + const head = url.slice(0, idx) + const body = url.slice(idx + 1) + if (head.includes(";base64")) return Buffer.from(body, "base64").toString("utf8") + return decodeURIComponent(body) +} diff --git a/packages/opencode/src/util/effect-http-client.ts b/packages/opencode/src/util/effect-http-client.ts new file mode 100644 index 0000000000..0c95e34b52 --- /dev/null +++ b/packages/opencode/src/util/effect-http-client.ts @@ -0,0 +1,11 @@ +import { Schedule } from "effect" +import { HttpClient } from "effect/unstable/http" + +export const withTransientReadRetry = <E, R>(client: HttpClient.HttpClient.With<E, R>) => + client.pipe( + HttpClient.retryTransient({ + retryOn: "errors-and-responses", + times: 2, + schedule: Schedule.exponential(200).pipe(Schedule.jittered), + }), + ) diff --git a/packages/opencode/src/util/effect-zod.ts b/packages/opencode/src/util/effect-zod.ts new file mode 100644 index 0000000000..c1407594ca --- /dev/null +++ b/packages/opencode/src/util/effect-zod.ts @@ -0,0 +1,92 @@ +import { Schema, SchemaAST } from "effect" +import z from "zod" + +export function zod<S extends Schema.Top>(schema: S): z.ZodType<Schema.Schema.Type<S>> { + return walk(schema.ast) as z.ZodType<Schema.Schema.Type<S>> +} + +function walk(ast: SchemaAST.AST): z.ZodTypeAny { + const out = body(ast) + const desc = SchemaAST.resolveDescription(ast) + const ref = SchemaAST.resolveIdentifier(ast) + const next = desc ? out.describe(desc) : out + return ref ? next.meta({ ref }) : next +} + +function body(ast: SchemaAST.AST): z.ZodTypeAny { + if (SchemaAST.isOptional(ast)) return opt(ast) + + switch (ast._tag) { + case "String": + return z.string() + case "Number": + return z.number() + case "Boolean": + return z.boolean() + case "Null": + return z.null() + case "Undefined": + return z.undefined() + case "Any": + case "Unknown": + return z.unknown() + case "Never": + return z.never() + case "Literal": + return z.literal(ast.literal) + case "Union": + return union(ast) + case "Objects": + return object(ast) + case "Arrays": + return array(ast) + case "Declaration": + return decl(ast) + default: + return fail(ast) + } +} + +function opt(ast: SchemaAST.AST): z.ZodTypeAny { + if (ast._tag !== "Union") return fail(ast) + const items = ast.types.filter((item) => item._tag !== "Undefined") + if (items.length === 1) return walk(items[0]).optional() + if (items.length > 1) + return z.union(items.map(walk) as [z.ZodTypeAny, z.ZodTypeAny, ...Array<z.ZodTypeAny>]).optional() + return z.undefined().optional() +} + +function union(ast: SchemaAST.Union): z.ZodTypeAny { + const items = ast.types.map(walk) + if (items.length === 1) return items[0] + if (items.length < 2) return fail(ast) + return z.union(items as [z.ZodTypeAny, z.ZodTypeAny, ...Array<z.ZodTypeAny>]) +} + +function object(ast: SchemaAST.Objects): z.ZodTypeAny { + if (ast.propertySignatures.length === 0 && ast.indexSignatures.length === 1) { + const sig = ast.indexSignatures[0] + if (sig.parameter._tag !== "String") return fail(ast) + return z.record(z.string(), walk(sig.type)) + } + + if (ast.indexSignatures.length > 0) return fail(ast) + + return z.object(Object.fromEntries(ast.propertySignatures.map((sig) => [String(sig.name), walk(sig.type)]))) +} + +function array(ast: SchemaAST.Arrays): z.ZodTypeAny { + if (ast.elements.length > 0) return fail(ast) + if (ast.rest.length !== 1) return fail(ast) + return z.array(walk(ast.rest[0])) +} + +function decl(ast: SchemaAST.Declaration): z.ZodTypeAny { + if (ast.typeParameters.length !== 1) return fail(ast) + return walk(ast.typeParameters[0]) +} + +function fail(ast: SchemaAST.AST): never { + const ref = SchemaAST.resolveIdentifier(ast) + throw new Error(`unsupported effect schema: ${ref ?? ast._tag}`) +} diff --git a/packages/opencode/src/util/filesystem.ts b/packages/opencode/src/util/filesystem.ts index a87aaeb986..37f00c6b9c 100644 --- a/packages/opencode/src/util/filesystem.ts +++ b/packages/opencode/src/util/filesystem.ts @@ -2,7 +2,7 @@ import { chmod, mkdir, readFile, writeFile } from "fs/promises" import { createWriteStream, existsSync, statSync } from "fs" import { lookup } from "mime-types" import { realpathSync } from "fs" -import { dirname, join, relative } from "path" +import { dirname, join, relative, resolve as pathResolve } from "path" import { Readable } from "stream" import { pipeline } from "stream/promises" import { Glob } from "./glob" @@ -113,16 +113,30 @@ export namespace Filesystem { } } + // We cannot rely on path.resolve() here because git.exe may come from Git Bash, Cygwin, or MSYS2, so we need to translate these paths at the boundary. + // Also resolves symlinks so that callers using the result as a cache key + // always get the same canonical path for a given physical directory. + export function resolve(p: string): string { + const resolved = pathResolve(windowsPath(p)) + try { + return normalizePath(realpathSync(resolved)) + } catch (e) { + if (isEnoent(e)) return normalizePath(resolved) + throw e + } + } + export function windowsPath(p: string): string { if (process.platform !== "win32") return p return ( p + .replace(/^\/([a-zA-Z]):(?:[\\/]|$)/, (_, drive) => `${drive.toUpperCase()}:/`) // Git Bash for Windows paths are typically /<drive>/... - .replace(/^\/([a-zA-Z])\//, (_, drive) => `${drive.toUpperCase()}:/`) + .replace(/^\/([a-zA-Z])(?:\/|$)/, (_, drive) => `${drive.toUpperCase()}:/`) // Cygwin git paths are typically /cygdrive/<drive>/... - .replace(/^\/cygdrive\/([a-zA-Z])\//, (_, drive) => `${drive.toUpperCase()}:/`) + .replace(/^\/cygdrive\/([a-zA-Z])(?:\/|$)/, (_, drive) => `${drive.toUpperCase()}:/`) // WSL paths are typically /mnt/<drive>/... - .replace(/^\/mnt\/([a-zA-Z])\//, (_, drive) => `${drive.toUpperCase()}:/`) + .replace(/^\/mnt\/([a-zA-Z])(?:\/|$)/, (_, drive) => `${drive.toUpperCase()}:/`) ) } export function overlaps(a: string, b: string) { diff --git a/packages/opencode/src/util/fn.ts b/packages/opencode/src/util/fn.ts index 9efe4622fc..19c60265bb 100644 --- a/packages/opencode/src/util/fn.ts +++ b/packages/opencode/src/util/fn.ts @@ -2,7 +2,14 @@ import { z } from "zod" export function fn<T extends z.ZodType, Result>(schema: T, cb: (input: z.infer<T>) => Result) { const result = (input: z.infer<T>) => { - const parsed = schema.parse(input) + let parsed + try { + parsed = schema.parse(input) + } catch (e) { + console.trace("schema validation failure stack trace:") + throw e + } + return cb(parsed) } result.force = (input: z.infer<T>) => cb(input) diff --git a/packages/opencode/src/util/instance-state.ts b/packages/opencode/src/util/instance-state.ts new file mode 100644 index 0000000000..5d0ffbf793 --- /dev/null +++ b/packages/opencode/src/util/instance-state.ts @@ -0,0 +1,51 @@ +import { Effect, ScopedCache, Scope } from "effect" + +import { Instance } from "@/project/instance" + +const TypeId = Symbol.for("@opencode/InstanceState") + +type Task = (key: string) => Effect.Effect<void> + +const tasks = new Set<Task>() + +export namespace InstanceState { + export interface State<A, E = never, R = never> { + readonly [TypeId]: typeof TypeId + readonly cache: ScopedCache.ScopedCache<string, A, E, R> + } + + export const make = <A, E = never, R = never>(input: { + lookup: (key: string) => Effect.Effect<A, E, R> + release?: (value: A, key: string) => Effect.Effect<void> + }): Effect.Effect<State<A, E, R>, never, R | Scope.Scope> => + Effect.gen(function* () { + const cache = yield* ScopedCache.make<string, A, E, R>({ + capacity: Number.POSITIVE_INFINITY, + lookup: (key) => + Effect.acquireRelease(input.lookup(key), (value) => + input.release ? input.release(value, key) : Effect.void, + ), + }) + + const task: Task = (key) => ScopedCache.invalidate(cache, key) + tasks.add(task) + yield* Effect.addFinalizer(() => Effect.sync(() => void tasks.delete(task))) + + return { + [TypeId]: TypeId, + cache, + } + }) + + export const get = <A, E, R>(self: State<A, E, R>) => ScopedCache.get(self.cache, Instance.directory) + + export const has = <A, E, R>(self: State<A, E, R>) => ScopedCache.has(self.cache, Instance.directory) + + export const invalidate = <A, E, R>(self: State<A, E, R>) => ScopedCache.invalidate(self.cache, Instance.directory) + + export const dispose = (key: string) => + Effect.all( + [...tasks].map((task) => task(key)), + { concurrency: "unbounded" }, + ) +} diff --git a/packages/opencode/src/util/keybind.ts b/packages/opencode/src/util/keybind.ts index 59318a31b0..83c7945ae1 100644 --- a/packages/opencode/src/util/keybind.ts +++ b/packages/opencode/src/util/keybind.ts @@ -23,7 +23,7 @@ export namespace Keybind { */ export function fromParsedKey(key: ParsedKey, leader = false): Info { return { - name: key.name, + name: key.name === " " ? "space" : key.name, ctrl: key.ctrl, meta: key.meta, shift: key.shift, diff --git a/packages/opencode/src/util/process.ts b/packages/opencode/src/util/process.ts index 71f001a86a..0490969370 100644 --- a/packages/opencode/src/util/process.ts +++ b/packages/opencode/src/util/process.ts @@ -25,6 +25,10 @@ export namespace Process { stderr: Buffer } + export interface TextResult extends Result { + text: string + } + export class RunFailedError extends Error { readonly cmd: string[] readonly code: number @@ -56,6 +60,7 @@ export namespace Process { cwd: opts.cwd, env: opts.env === null ? {} : opts.env ? { ...process.env, ...opts.env } : undefined, stdio: [opts.stdin ?? "ignore", opts.stdout ?? "ignore", opts.stderr ?? "ignore"], + windowsHide: process.platform === "win32", }) let closed = false @@ -114,13 +119,33 @@ export namespace Process { if (!proc.stdout || !proc.stderr) throw new Error("Process output not available") - const [code, stdout, stderr] = await Promise.all([proc.exited, buffer(proc.stdout), buffer(proc.stderr)]) - const out = { - code, - stdout, - stderr, - } + const out = await Promise.all([proc.exited, buffer(proc.stdout), buffer(proc.stderr)]) + .then(([code, stdout, stderr]) => ({ + code, + stdout, + stderr, + })) + .catch((err: unknown) => { + if (!opts.nothrow) throw err + return { + code: 1, + stdout: Buffer.alloc(0), + stderr: Buffer.from(err instanceof Error ? err.message : String(err)), + } + }) if (out.code === 0 || opts.nothrow) return out throw new RunFailedError(cmd, out.code, out.stdout, out.stderr) } + + export async function text(cmd: string[], opts: RunOptions = {}): Promise<TextResult> { + const out = await run(cmd, opts) + return { + ...out, + text: out.stdout.toString(), + } + } + + export async function lines(cmd: string[], opts: RunOptions = {}): Promise<string[]> { + return (await text(cmd, opts)).text.split(/\r?\n/).filter(Boolean) + } } diff --git a/packages/opencode/src/util/schema.ts b/packages/opencode/src/util/schema.ts new file mode 100644 index 0000000000..180f952d73 --- /dev/null +++ b/packages/opencode/src/util/schema.ts @@ -0,0 +1,17 @@ +import { Schema } from "effect" + +/** + * Attach static methods to a schema object. Designed to be used with `.pipe()`: + * + * @example + * export const Foo = fooSchema.pipe( + * withStatics((schema) => ({ + * zero: schema.makeUnsafe(0), + * from: Schema.decodeUnknownOption(schema), + * })) + * ) + */ +export const withStatics = + <S extends object, M extends Record<string, unknown>>(methods: (schema: S) => M) => + (schema: S): S & M => + Object.assign(schema, methods(schema)) diff --git a/packages/opencode/src/util/which.ts b/packages/opencode/src/util/which.ts index 78e651e8e8..81da257217 100644 --- a/packages/opencode/src/util/which.ts +++ b/packages/opencode/src/util/which.ts @@ -3,8 +3,8 @@ import whichPkg from "which" export function which(cmd: string, env?: NodeJS.ProcessEnv) { const result = whichPkg.sync(cmd, { nothrow: true, - path: env?.PATH, - pathExt: env?.PATHEXT, + path: env?.PATH ?? env?.Path ?? process.env.PATH ?? process.env.Path, + pathExt: env?.PATHEXT ?? env?.PathExt ?? process.env.PATHEXT ?? process.env.PathExt, }) return typeof result === "string" ? result : null } diff --git a/packages/opencode/src/worktree/index.ts b/packages/opencode/src/worktree/index.ts index 15efcea386..6ed0e48202 100644 --- a/packages/opencode/src/worktree/index.ts +++ b/packages/opencode/src/worktree/index.ts @@ -1,4 +1,3 @@ -import { $ } from "bun" import fs from "fs/promises" import path from "path" import z from "zod" @@ -9,8 +8,11 @@ import { InstanceBootstrap } from "../project/bootstrap" import { Project } from "../project/project" import { Database, eq } from "../storage/db" import { ProjectTable } from "../project/project.sql" +import type { ProjectID } from "../project/schema" import { fn } from "../util/fn" import { Log } from "../util/log" +import { Process } from "../util/process" +import { git } from "../util/git" import { BusEvent } from "@/bus/bus-event" import { GlobalBus } from "@/bus/global" @@ -248,14 +250,14 @@ export namespace Worktree { } async function sweep(root: string) { - const first = await $`git clean -ffdx`.quiet().nothrow().cwd(root) + const first = await git(["clean", "-ffdx"], { cwd: root }) if (first.exitCode === 0) return first const entries = failed(first) if (!entries.length) return first await prune(root, entries) - return $`git clean -ffdx`.quiet().nothrow().cwd(root) + return git(["clean", "-ffdx"], { cwd: root }) } async function canonical(input: string) { @@ -274,7 +276,9 @@ export namespace Worktree { if (await exists(directory)) continue const ref = `refs/heads/${branch}` - const branchCheck = await $`git show-ref --verify --quiet ${ref}`.quiet().nothrow().cwd(Instance.worktree) + const branchCheck = await git(["show-ref", "--verify", "--quiet", ref], { + cwd: Instance.worktree, + }) if (branchCheck.exitCode === 0) continue return Info.parse({ name, branch, directory }) @@ -285,9 +289,9 @@ export namespace Worktree { async function runStartCommand(directory: string, cmd: string) { if (process.platform === "win32") { - return $`cmd /c ${cmd}`.nothrow().cwd(directory) + return Process.run(["cmd", "/c", cmd], { cwd: directory, nothrow: true }) } - return $`bash -lc ${cmd}`.nothrow().cwd(directory) + return Process.run(["bash", "-lc", cmd], { cwd: directory, nothrow: true }) } type StartKind = "project" | "worktree" @@ -297,7 +301,7 @@ export namespace Worktree { if (!text) return true const ran = await runStartCommand(directory, text) - if (ran.exitCode === 0) return true + if (ran.code === 0) return true log.error("worktree start command failed", { kind, @@ -307,7 +311,7 @@ export namespace Worktree { return false } - async function runStartScripts(directory: string, input: { projectID: string; extra?: string }) { + async function runStartScripts(directory: string, input: { projectID: ProjectID; extra?: string }) { const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, input.projectID)).get()) const project = row ? Project.fromRow(row) : undefined const startup = project?.commands?.start?.trim() ?? "" @@ -319,7 +323,7 @@ export namespace Worktree { return true } - function queueStartScripts(directory: string, input: { projectID: string; extra?: string }) { + function queueStartScripts(directory: string, input: { projectID: ProjectID; extra?: string }) { setTimeout(() => { const start = async () => { await runStartScripts(directory, input) @@ -344,10 +348,9 @@ export namespace Worktree { } export async function createFromInfo(info: Info, startCommand?: string) { - const created = await $`git worktree add --no-checkout -b ${info.branch} ${info.directory}` - .quiet() - .nothrow() - .cwd(Instance.worktree) + const created = await git(["worktree", "add", "--no-checkout", "-b", info.branch, info.directory], { + cwd: Instance.worktree, + }) if (created.exitCode !== 0) { throw new CreateFailedError({ message: errorText(created) || "Failed to create git worktree" }) } @@ -359,7 +362,7 @@ export namespace Worktree { return () => { const start = async () => { - const populated = await $`git reset --hard`.quiet().nothrow().cwd(info.directory) + const populated = await git(["reset", "--hard"], { cwd: info.directory }) if (populated.exitCode !== 0) { const message = errorText(populated) || "Failed to populate worktree" log.error("worktree checkout failed", { directory: info.directory, message }) @@ -411,7 +414,7 @@ export namespace Worktree { await runStartScripts(info.directory, { projectID, extra }) } - void start().catch((error) => { + return start().catch((error) => { log.error("worktree start task failed", { directory: info.directory, error }) }) } @@ -476,10 +479,10 @@ export namespace Worktree { const stop = async (target: string) => { if (!(await exists(target))) return - await $`git fsmonitor--daemon stop`.quiet().nothrow().cwd(target) + await git(["fsmonitor--daemon", "stop"], { cwd: target }) } - const list = await $`git worktree list --porcelain`.quiet().nothrow().cwd(Instance.worktree) + const list = await git(["worktree", "list", "--porcelain"], { cwd: Instance.worktree }) if (list.exitCode !== 0) { throw new RemoveFailedError({ message: errorText(list) || "Failed to read git worktrees" }) } @@ -496,9 +499,11 @@ export namespace Worktree { } await stop(entry.path) - const removed = await $`git worktree remove --force ${entry.path}`.quiet().nothrow().cwd(Instance.worktree) + const removed = await git(["worktree", "remove", "--force", entry.path], { + cwd: Instance.worktree, + }) if (removed.exitCode !== 0) { - const next = await $`git worktree list --porcelain`.quiet().nothrow().cwd(Instance.worktree) + const next = await git(["worktree", "list", "--porcelain"], { cwd: Instance.worktree }) if (next.exitCode !== 0) { throw new RemoveFailedError({ message: errorText(removed) || errorText(next) || "Failed to remove git worktree", @@ -515,7 +520,7 @@ export namespace Worktree { const branch = entry.branch?.replace(/^refs\/heads\//, "") if (branch) { - const deleted = await $`git branch -D ${branch}`.quiet().nothrow().cwd(Instance.worktree) + const deleted = await git(["branch", "-D", branch], { cwd: Instance.worktree }) if (deleted.exitCode !== 0) { throw new RemoveFailedError({ message: errorText(deleted) || "Failed to delete worktree branch" }) } @@ -535,7 +540,7 @@ export namespace Worktree { throw new ResetFailedError({ message: "Cannot reset the primary workspace" }) } - const list = await $`git worktree list --porcelain`.quiet().nothrow().cwd(Instance.worktree) + const list = await git(["worktree", "list", "--porcelain"], { cwd: Instance.worktree }) if (list.exitCode !== 0) { throw new ResetFailedError({ message: errorText(list) || "Failed to read git worktrees" }) } @@ -568,7 +573,7 @@ export namespace Worktree { throw new ResetFailedError({ message: "Worktree not found" }) } - const remoteList = await $`git remote`.quiet().nothrow().cwd(Instance.worktree) + const remoteList = await git(["remote"], { cwd: Instance.worktree }) if (remoteList.exitCode !== 0) { throw new ResetFailedError({ message: errorText(remoteList) || "Failed to list git remotes" }) } @@ -587,18 +592,19 @@ export namespace Worktree { : "" const remoteHead = remote - ? await $`git symbolic-ref refs/remotes/${remote}/HEAD`.quiet().nothrow().cwd(Instance.worktree) + ? await git(["symbolic-ref", `refs/remotes/${remote}/HEAD`], { cwd: Instance.worktree }) : { exitCode: 1, stdout: undefined, stderr: undefined } const remoteRef = remoteHead.exitCode === 0 ? outputText(remoteHead.stdout) : "" const remoteTarget = remoteRef ? remoteRef.replace(/^refs\/remotes\//, "") : "" const remoteBranch = remote && remoteTarget.startsWith(`${remote}/`) ? remoteTarget.slice(`${remote}/`.length) : "" - const mainCheck = await $`git show-ref --verify --quiet refs/heads/main`.quiet().nothrow().cwd(Instance.worktree) - const masterCheck = await $`git show-ref --verify --quiet refs/heads/master` - .quiet() - .nothrow() - .cwd(Instance.worktree) + const mainCheck = await git(["show-ref", "--verify", "--quiet", "refs/heads/main"], { + cwd: Instance.worktree, + }) + const masterCheck = await git(["show-ref", "--verify", "--quiet", "refs/heads/master"], { + cwd: Instance.worktree, + }) const localBranch = mainCheck.exitCode === 0 ? "main" : masterCheck.exitCode === 0 ? "master" : "" const target = remoteBranch ? `${remote}/${remoteBranch}` : localBranch @@ -607,7 +613,7 @@ export namespace Worktree { } if (remoteBranch) { - const fetch = await $`git fetch ${remote} ${remoteBranch}`.quiet().nothrow().cwd(Instance.worktree) + const fetch = await git(["fetch", remote, remoteBranch], { cwd: Instance.worktree }) if (fetch.exitCode !== 0) { throw new ResetFailedError({ message: errorText(fetch) || `Failed to fetch ${target}` }) } @@ -619,7 +625,7 @@ export namespace Worktree { const worktreePath = entry.path - const resetToTarget = await $`git reset --hard ${target}`.quiet().nothrow().cwd(worktreePath) + const resetToTarget = await git(["reset", "--hard", target], { cwd: worktreePath }) if (resetToTarget.exitCode !== 0) { throw new ResetFailedError({ message: errorText(resetToTarget) || "Failed to reset worktree to target" }) } @@ -629,22 +635,26 @@ export namespace Worktree { throw new ResetFailedError({ message: errorText(clean) || "Failed to clean worktree" }) } - const update = await $`git submodule update --init --recursive --force`.quiet().nothrow().cwd(worktreePath) + const update = await git(["submodule", "update", "--init", "--recursive", "--force"], { cwd: worktreePath }) if (update.exitCode !== 0) { throw new ResetFailedError({ message: errorText(update) || "Failed to update submodules" }) } - const subReset = await $`git submodule foreach --recursive git reset --hard`.quiet().nothrow().cwd(worktreePath) + const subReset = await git(["submodule", "foreach", "--recursive", "git", "reset", "--hard"], { + cwd: worktreePath, + }) if (subReset.exitCode !== 0) { throw new ResetFailedError({ message: errorText(subReset) || "Failed to reset submodules" }) } - const subClean = await $`git submodule foreach --recursive git clean -fdx`.quiet().nothrow().cwd(worktreePath) + const subClean = await git(["submodule", "foreach", "--recursive", "git", "clean", "-fdx"], { + cwd: worktreePath, + }) if (subClean.exitCode !== 0) { throw new ResetFailedError({ message: errorText(subClean) || "Failed to clean submodules" }) } - const status = await $`git -c core.fsmonitor=false status --porcelain=v1`.quiet().nothrow().cwd(worktreePath) + const status = await git(["-c", "core.fsmonitor=false", "status", "--porcelain=v1"], { cwd: worktreePath }) if (status.exitCode !== 0) { throw new ResetFailedError({ message: errorText(status) || "Failed to read git status" }) } diff --git a/packages/opencode/test/account/repo.test.ts b/packages/opencode/test/account/repo.test.ts new file mode 100644 index 0000000000..74a6d7a570 --- /dev/null +++ b/packages/opencode/test/account/repo.test.ts @@ -0,0 +1,338 @@ +import { expect } from "bun:test" +import { Effect, Layer, Option } from "effect" + +import { AccountRepo } from "../../src/account/repo" +import { AccessToken, AccountID, OrgID, RefreshToken } from "../../src/account/schema" +import { Database } from "../../src/storage/db" +import { testEffect } from "../fixture/effect" + +const truncate = Layer.effectDiscard( + Effect.sync(() => { + const db = Database.Client() + db.run(/*sql*/ `DELETE FROM account_state`) + db.run(/*sql*/ `DELETE FROM account`) + }), +) + +const it = testEffect(Layer.merge(AccountRepo.layer, truncate)) + +it.effect( + "list returns empty when no accounts exist", + Effect.gen(function* () { + const accounts = yield* AccountRepo.use((r) => r.list()) + expect(accounts).toEqual([]) + }), +) + +it.effect( + "active returns none when no accounts exist", + Effect.gen(function* () { + const active = yield* AccountRepo.use((r) => r.active()) + expect(Option.isNone(active)).toBe(true) + }), +) + +it.effect( + "persistAccount inserts and getRow retrieves", + Effect.gen(function* () { + const id = AccountID.make("user-1") + yield* AccountRepo.use((r) => + r.persistAccount({ + id, + email: "test@example.com", + url: "https://control.example.com", + accessToken: AccessToken.make("at_123"), + refreshToken: RefreshToken.make("rt_456"), + expiry: Date.now() + 3600_000, + orgID: Option.some(OrgID.make("org-1")), + }), + ) + + const row = yield* AccountRepo.use((r) => r.getRow(id)) + expect(Option.isSome(row)).toBe(true) + const value = Option.getOrThrow(row) + expect(value.id).toBe(AccountID.make("user-1")) + expect(value.email).toBe("test@example.com") + + const active = yield* AccountRepo.use((r) => r.active()) + expect(Option.getOrThrow(active).active_org_id).toBe(OrgID.make("org-1")) + }), +) + +it.effect( + "persistAccount sets the active account and org", + Effect.gen(function* () { + const id1 = AccountID.make("user-1") + const id2 = AccountID.make("user-2") + + yield* AccountRepo.use((r) => + r.persistAccount({ + id: id1, + email: "first@example.com", + url: "https://control.example.com", + accessToken: AccessToken.make("at_1"), + refreshToken: RefreshToken.make("rt_1"), + expiry: Date.now() + 3600_000, + orgID: Option.some(OrgID.make("org-1")), + }), + ) + + yield* AccountRepo.use((r) => + r.persistAccount({ + id: id2, + email: "second@example.com", + url: "https://control.example.com", + accessToken: AccessToken.make("at_2"), + refreshToken: RefreshToken.make("rt_2"), + expiry: Date.now() + 3600_000, + orgID: Option.some(OrgID.make("org-2")), + }), + ) + + // Last persisted account is active with its org + const active = yield* AccountRepo.use((r) => r.active()) + expect(Option.isSome(active)).toBe(true) + expect(Option.getOrThrow(active).id).toBe(AccountID.make("user-2")) + expect(Option.getOrThrow(active).active_org_id).toBe(OrgID.make("org-2")) + }), +) + +it.effect( + "list returns all accounts", + Effect.gen(function* () { + const id1 = AccountID.make("user-1") + const id2 = AccountID.make("user-2") + + yield* AccountRepo.use((r) => + r.persistAccount({ + id: id1, + email: "a@example.com", + url: "https://control.example.com", + accessToken: AccessToken.make("at_1"), + refreshToken: RefreshToken.make("rt_1"), + expiry: Date.now() + 3600_000, + orgID: Option.none(), + }), + ) + + yield* AccountRepo.use((r) => + r.persistAccount({ + id: id2, + email: "b@example.com", + url: "https://control.example.com", + accessToken: AccessToken.make("at_2"), + refreshToken: RefreshToken.make("rt_2"), + expiry: Date.now() + 3600_000, + orgID: Option.some(OrgID.make("org-1")), + }), + ) + + const accounts = yield* AccountRepo.use((r) => r.list()) + expect(accounts.length).toBe(2) + expect(accounts.map((a) => a.email).sort()).toEqual(["a@example.com", "b@example.com"]) + }), +) + +it.effect( + "remove deletes an account", + Effect.gen(function* () { + const id = AccountID.make("user-1") + + yield* AccountRepo.use((r) => + r.persistAccount({ + id, + email: "test@example.com", + url: "https://control.example.com", + accessToken: AccessToken.make("at_1"), + refreshToken: RefreshToken.make("rt_1"), + expiry: Date.now() + 3600_000, + orgID: Option.none(), + }), + ) + + yield* AccountRepo.use((r) => r.remove(id)) + + const row = yield* AccountRepo.use((r) => r.getRow(id)) + expect(Option.isNone(row)).toBe(true) + }), +) + +it.effect( + "use stores the selected org and marks the account active", + Effect.gen(function* () { + const id1 = AccountID.make("user-1") + const id2 = AccountID.make("user-2") + + yield* AccountRepo.use((r) => + r.persistAccount({ + id: id1, + email: "first@example.com", + url: "https://control.example.com", + accessToken: AccessToken.make("at_1"), + refreshToken: RefreshToken.make("rt_1"), + expiry: Date.now() + 3600_000, + orgID: Option.none(), + }), + ) + + yield* AccountRepo.use((r) => + r.persistAccount({ + id: id2, + email: "second@example.com", + url: "https://control.example.com", + accessToken: AccessToken.make("at_2"), + refreshToken: RefreshToken.make("rt_2"), + expiry: Date.now() + 3600_000, + orgID: Option.none(), + }), + ) + + yield* AccountRepo.use((r) => r.use(id1, Option.some(OrgID.make("org-99")))) + const active1 = yield* AccountRepo.use((r) => r.active()) + expect(Option.getOrThrow(active1).id).toBe(id1) + expect(Option.getOrThrow(active1).active_org_id).toBe(OrgID.make("org-99")) + + yield* AccountRepo.use((r) => r.use(id1, Option.none())) + const active2 = yield* AccountRepo.use((r) => r.active()) + expect(Option.getOrThrow(active2).active_org_id).toBeNull() + }), +) + +it.effect( + "persistToken updates token fields", + Effect.gen(function* () { + const id = AccountID.make("user-1") + + yield* AccountRepo.use((r) => + r.persistAccount({ + id, + email: "test@example.com", + url: "https://control.example.com", + accessToken: AccessToken.make("old_token"), + refreshToken: RefreshToken.make("old_refresh"), + expiry: 1000, + orgID: Option.none(), + }), + ) + + const expiry = Date.now() + 7200_000 + yield* AccountRepo.use((r) => + r.persistToken({ + accountID: id, + accessToken: AccessToken.make("new_token"), + refreshToken: RefreshToken.make("new_refresh"), + expiry: Option.some(expiry), + }), + ) + + const row = yield* AccountRepo.use((r) => r.getRow(id)) + const value = Option.getOrThrow(row) + expect(value.access_token).toBe(AccessToken.make("new_token")) + expect(value.refresh_token).toBe(RefreshToken.make("new_refresh")) + expect(value.token_expiry).toBe(expiry) + }), +) + +it.effect( + "persistToken with no expiry sets token_expiry to null", + Effect.gen(function* () { + const id = AccountID.make("user-1") + + yield* AccountRepo.use((r) => + r.persistAccount({ + id, + email: "test@example.com", + url: "https://control.example.com", + accessToken: AccessToken.make("old_token"), + refreshToken: RefreshToken.make("old_refresh"), + expiry: 1000, + orgID: Option.none(), + }), + ) + + yield* AccountRepo.use((r) => + r.persistToken({ + accountID: id, + accessToken: AccessToken.make("new_token"), + refreshToken: RefreshToken.make("new_refresh"), + expiry: Option.none(), + }), + ) + + const row = yield* AccountRepo.use((r) => r.getRow(id)) + expect(Option.getOrThrow(row).token_expiry).toBeNull() + }), +) + +it.effect( + "persistAccount upserts on conflict", + Effect.gen(function* () { + const id = AccountID.make("user-1") + + yield* AccountRepo.use((r) => + r.persistAccount({ + id, + email: "test@example.com", + url: "https://control.example.com", + accessToken: AccessToken.make("at_v1"), + refreshToken: RefreshToken.make("rt_v1"), + expiry: 1000, + orgID: Option.some(OrgID.make("org-1")), + }), + ) + + yield* AccountRepo.use((r) => + r.persistAccount({ + id, + email: "test@example.com", + url: "https://control.example.com", + accessToken: AccessToken.make("at_v2"), + refreshToken: RefreshToken.make("rt_v2"), + expiry: 2000, + orgID: Option.some(OrgID.make("org-2")), + }), + ) + + const accounts = yield* AccountRepo.use((r) => r.list()) + expect(accounts.length).toBe(1) + + const row = yield* AccountRepo.use((r) => r.getRow(id)) + const value = Option.getOrThrow(row) + expect(value.access_token).toBe(AccessToken.make("at_v2")) + + const active = yield* AccountRepo.use((r) => r.active()) + expect(Option.getOrThrow(active).active_org_id).toBe(OrgID.make("org-2")) + }), +) + +it.effect( + "remove clears active state when deleting the active account", + Effect.gen(function* () { + const id = AccountID.make("user-1") + + yield* AccountRepo.use((r) => + r.persistAccount({ + id, + email: "test@example.com", + url: "https://control.example.com", + accessToken: AccessToken.make("at_1"), + refreshToken: RefreshToken.make("rt_1"), + expiry: Date.now() + 3600_000, + orgID: Option.some(OrgID.make("org-1")), + }), + ) + + yield* AccountRepo.use((r) => r.remove(id)) + + const active = yield* AccountRepo.use((r) => r.active()) + expect(Option.isNone(active)).toBe(true) + }), +) + +it.effect( + "getRow returns none for nonexistent account", + Effect.gen(function* () { + const row = yield* AccountRepo.use((r) => r.getRow(AccountID.make("nope"))) + expect(Option.isNone(row)).toBe(true) + }), +) diff --git a/packages/opencode/test/account/service.test.ts b/packages/opencode/test/account/service.test.ts new file mode 100644 index 0000000000..5caa33235a --- /dev/null +++ b/packages/opencode/test/account/service.test.ts @@ -0,0 +1,224 @@ +import { expect } from "bun:test" +import { Duration, Effect, Layer, Option, Ref, Schema } from "effect" +import { HttpClient, HttpClientResponse } from "effect/unstable/http" + +import { AccountRepo } from "../../src/account/repo" +import { AccountService } from "../../src/account/service" +import { AccessToken, AccountID, DeviceCode, Login, Org, OrgID, RefreshToken, UserCode } from "../../src/account/schema" +import { Database } from "../../src/storage/db" +import { testEffect } from "../fixture/effect" + +const truncate = Layer.effectDiscard( + Effect.sync(() => { + const db = Database.Client() + db.run(/*sql*/ `DELETE FROM account_state`) + db.run(/*sql*/ `DELETE FROM account`) + }), +) + +const it = testEffect(Layer.merge(AccountRepo.layer, truncate)) + +const live = (client: HttpClient.HttpClient) => + AccountService.layer.pipe(Layer.provide(Layer.succeed(HttpClient.HttpClient, client))) + +const json = (req: Parameters<typeof HttpClientResponse.fromWeb>[0], body: unknown, status = 200) => + HttpClientResponse.fromWeb( + req, + new Response(JSON.stringify(body), { + status, + headers: { "content-type": "application/json" }, + }), + ) + +const encodeOrg = Schema.encodeSync(Org) + +const org = (id: string, name: string) => encodeOrg(new Org({ id: OrgID.make(id), name })) + +it.effect( + "orgsByAccount groups orgs per account", + Effect.gen(function* () { + yield* AccountRepo.use((r) => + r.persistAccount({ + id: AccountID.make("user-1"), + email: "one@example.com", + url: "https://one.example.com", + accessToken: AccessToken.make("at_1"), + refreshToken: RefreshToken.make("rt_1"), + expiry: Date.now() + 60_000, + orgID: Option.none(), + }), + ) + + yield* AccountRepo.use((r) => + r.persistAccount({ + id: AccountID.make("user-2"), + email: "two@example.com", + url: "https://two.example.com", + accessToken: AccessToken.make("at_2"), + refreshToken: RefreshToken.make("rt_2"), + expiry: Date.now() + 60_000, + orgID: Option.none(), + }), + ) + + const seen = yield* Ref.make<string[]>([]) + const client = HttpClient.make((req) => + Effect.gen(function* () { + yield* Ref.update(seen, (xs) => [...xs, `${req.method} ${req.url}`]) + + if (req.url === "https://one.example.com/api/orgs") { + return json(req, [org("org-1", "One")]) + } + + if (req.url === "https://two.example.com/api/orgs") { + return json(req, [org("org-2", "Two A"), org("org-3", "Two B")]) + } + + return json(req, [], 404) + }), + ) + + const rows = yield* AccountService.use((s) => s.orgsByAccount()).pipe(Effect.provide(live(client))) + + expect(rows.map((row) => [row.account.id, row.orgs.map((org) => org.id)]).map(([id, orgs]) => [id, orgs])).toEqual([ + [AccountID.make("user-1"), [OrgID.make("org-1")]], + [AccountID.make("user-2"), [OrgID.make("org-2"), OrgID.make("org-3")]], + ]) + expect(yield* Ref.get(seen)).toEqual([ + "GET https://one.example.com/api/orgs", + "GET https://two.example.com/api/orgs", + ]) + }), +) + +it.effect( + "token refresh persists the new token", + Effect.gen(function* () { + const id = AccountID.make("user-1") + + yield* AccountRepo.use((r) => + r.persistAccount({ + id, + email: "user@example.com", + url: "https://one.example.com", + accessToken: AccessToken.make("at_old"), + refreshToken: RefreshToken.make("rt_old"), + expiry: Date.now() - 1_000, + orgID: Option.none(), + }), + ) + + const client = HttpClient.make((req) => + Effect.succeed( + req.url === "https://one.example.com/auth/device/token" + ? json(req, { + access_token: "at_new", + refresh_token: "rt_new", + expires_in: 60, + }) + : json(req, {}, 404), + ), + ) + + const token = yield* AccountService.use((s) => s.token(id)).pipe(Effect.provide(live(client))) + + expect(Option.getOrThrow(token)).toBeDefined() + expect(String(Option.getOrThrow(token))).toBe("at_new") + + const row = yield* AccountRepo.use((r) => r.getRow(id)) + const value = Option.getOrThrow(row) + expect(value.access_token).toBe(AccessToken.make("at_new")) + expect(value.refresh_token).toBe(RefreshToken.make("rt_new")) + expect(value.token_expiry).toBeGreaterThan(Date.now()) + }), +) + +it.effect( + "config sends the selected org header", + Effect.gen(function* () { + const id = AccountID.make("user-1") + + yield* AccountRepo.use((r) => + r.persistAccount({ + id, + email: "user@example.com", + url: "https://one.example.com", + accessToken: AccessToken.make("at_1"), + refreshToken: RefreshToken.make("rt_1"), + expiry: Date.now() + 60_000, + orgID: Option.none(), + }), + ) + + const seen = yield* Ref.make<{ auth?: string; org?: string }>({}) + const client = HttpClient.make((req) => + Effect.gen(function* () { + yield* Ref.set(seen, { + auth: req.headers.authorization, + org: req.headers["x-org-id"], + }) + + if (req.url === "https://one.example.com/api/config") { + return json(req, { config: { theme: "light", seats: 5 } }) + } + + return json(req, {}, 404) + }), + ) + + const cfg = yield* AccountService.use((s) => s.config(id, OrgID.make("org-9"))).pipe(Effect.provide(live(client))) + + expect(Option.getOrThrow(cfg)).toEqual({ theme: "light", seats: 5 }) + expect(yield* Ref.get(seen)).toEqual({ + auth: "Bearer at_1", + org: "org-9", + }) + }), +) + +it.effect( + "poll stores the account and first org on success", + Effect.gen(function* () { + const login = new Login({ + code: DeviceCode.make("device-code"), + user: UserCode.make("user-code"), + url: "https://one.example.com/verify", + server: "https://one.example.com", + expiry: Duration.seconds(600), + interval: Duration.seconds(5), + }) + + const client = HttpClient.make((req) => + Effect.succeed( + req.url === "https://one.example.com/auth/device/token" + ? json(req, { + access_token: "at_1", + refresh_token: "rt_1", + token_type: "Bearer", + expires_in: 60, + }) + : req.url === "https://one.example.com/api/user" + ? json(req, { id: "user-1", email: "user@example.com" }) + : req.url === "https://one.example.com/api/orgs" + ? json(req, [org("org-1", "One")]) + : json(req, {}, 404), + ), + ) + + const res = yield* AccountService.use((s) => s.poll(login)).pipe(Effect.provide(live(client))) + + expect(res._tag).toBe("PollSuccess") + if (res._tag === "PollSuccess") { + expect(res.email).toBe("user@example.com") + } + + const active = yield* AccountRepo.use((r) => r.active()) + expect(Option.getOrThrow(active)).toEqual( + expect.objectContaining({ + id: "user-1", + email: "user@example.com", + active_org_id: "org-1", + }), + ) + }), +) diff --git a/packages/opencode/test/agent/agent.test.ts b/packages/opencode/test/agent/agent.test.ts index a524f64ded..007f3dceaf 100644 --- a/packages/opencode/test/agent/agent.test.ts +++ b/packages/opencode/test/agent/agent.test.ts @@ -141,8 +141,8 @@ test("custom agent from config creates new agent", async () => { fn: async () => { const custom = await Agent.get("my_custom_agent") expect(custom).toBeDefined() - expect(custom?.model?.providerID).toBe("openai") - expect(custom?.model?.modelID).toBe("gpt-4") + expect(String(custom?.model?.providerID)).toBe("openai") + expect(String(custom?.model?.modelID)).toBe("gpt-4") expect(custom?.description).toBe("My custom agent") expect(custom?.temperature).toBe(0.5) expect(custom?.topP).toBe(0.9) @@ -170,8 +170,8 @@ test("custom agent config overrides native agent properties", async () => { fn: async () => { const build = await Agent.get("builder") expect(build).toBeDefined() - expect(build?.model?.providerID).toBe("anthropic") - expect(build?.model?.modelID).toBe("claude-3") + expect(String(build?.model?.providerID)).toBe("anthropic") + expect(String(build?.model?.modelID)).toBe("claude-3") expect(build?.description).toBe("Custom build agent") expect(build?.temperature).toBe(0.7) expect(build?.color).toBe("#FF0000") diff --git a/packages/opencode/test/branding/branding.test.ts b/packages/opencode/test/branding/branding.test.ts index 87c984f16e..a7758b27c5 100644 --- a/packages/opencode/test/branding/branding.test.ts +++ b/packages/opencode/test/branding/branding.test.ts @@ -154,9 +154,9 @@ describe("User-Agent & Version", () => { // --------------------------------------------------------------------------- describe("Upstream Branding Leak Detection", () => { const leakedPatterns = [ - { pattern: /opencode\.ai/i, label: "opencode.ai domain" }, + { pattern: /opencode\.ai/i, label: "altimate.ai domain" }, { pattern: /anomalyco/i, label: "anomalyco GitHub org" }, - { pattern: /opncd\.ai/i, label: "opncd.ai short domain" }, + { pattern: /opncd\.ai/i, label: "altimate.ai short domain" }, ] // Lines matching any of these patterns are intentionally kept (internal identifiers) @@ -187,7 +187,7 @@ describe("Upstream Branding Leak Detection", () => { return false } - test("no opencode.ai domain references in src/ files", async () => { + test("no altimate.ai domain references in src/ files", async () => { const violations: string[] = [] const glob = new Glob("**/*.{ts,tsx,js}") for await (const file of glob.scan({ cwd: srcDir })) { @@ -223,7 +223,7 @@ describe("Upstream Branding Leak Detection", () => { expect(violations).toEqual([]) }) - test("no opncd.ai references in src/ files", async () => { + test("no altimate.ai references in src/ files", async () => { const violations: string[] = [] const glob = new Glob("**/*.{ts,tsx,js}") for await (const file of glob.scan({ cwd: srcDir })) { @@ -270,6 +270,51 @@ describe("Config Paths", () => { }) }) +// --------------------------------------------------------------------------- +// altimate_change start — regression: catch branding leaks in package root files and workflows +// --------------------------------------------------------------------------- + +describe("Package root branding", () => { + test("parsers-config.ts has no anomalyco references", () => { + const content = readText(join(pkgDir, "parsers-config.ts")) + const lines = content.split("\n") + const violations: string[] = [] + for (let i = 0; i < lines.length; i++) { + if (/anomalyco/i.test(lines[i])) { + violations.push(`parsers-config.ts:${i + 1}: ${lines[i].trim()}`) + } + } + expect(violations).toEqual([]) + }) +}) + +describe("Workflow branding", () => { + test("opencode.yml uses /altimate or /ac triggers, not /opencode", () => { + const content = readText(join(repoRoot, ".github", "workflows", "opencode.yml")) + // Should have /altimate triggers + expect(content).toContain("/altimate") + // Should NOT have /opencode triggers + expect(content).not.toMatch(/startsWith\(.*'\/opencode'\)/) + expect(content).not.toMatch(/contains\(.*'\/opencode'\)/) + }) + + test("beta.yml schedule is disabled", () => { + const content = readText(join(repoRoot, ".github", "workflows", "beta.yml")) + // Schedule should be commented out + const lines = content.split("\n") + const cronLines = lines.filter((l) => l.includes("cron:")) + for (const line of cronLines) { + expect(line.trimStart().startsWith("#")).toBe(true) + } + }) + + test("opencode.yml model reference does not use opencode/ prefix", () => { + const content = readText(join(repoRoot, ".github", "workflows", "opencode.yml")) + expect(content).not.toContain("model: opencode/") + }) +}) +// altimate_change end + // --------------------------------------------------------------------------- // 9. VSCode Extension // --------------------------------------------------------------------------- diff --git a/packages/opencode/test/branding/build-integrity.test.ts b/packages/opencode/test/branding/build-integrity.test.ts index 4cf39cfd6e..7136886085 100644 --- a/packages/opencode/test/branding/build-integrity.test.ts +++ b/packages/opencode/test/branding/build-integrity.test.ts @@ -181,44 +181,29 @@ describe("Binary Entry Points", () => { // 5. Skip Files / Keep Ours Consistency // --------------------------------------------------------------------------- +// altimate_change start — config.ts is now single source of truth (merge-config.json removed) describe("Skip Files / Keep Ours Consistency", () => { - test("config.ts and merge-config.json keepOurs share key patterns", () => { - // Extract keepOurs from config.ts by reading the source + test("config.ts contains critical keepOurs patterns", () => { const configPath = join(repoRoot, "script/upstream/utils/config.ts") const configSource = readFileSync(configPath, "utf-8") - // Extract keepOurs from merge-config.json - const mergeConfig = readJSON("script/upstream/merge-config.json") - const jsonKeepOurs: string[] = mergeConfig.keepOurs - - // Key patterns that should appear in both configs const criticalPatterns = ["packages/altimate-engine/**", "script/upstream/**"] - for (const pattern of criticalPatterns) { - // Check config.ts source contains the pattern expect(configSource).toContain(pattern) - - // Check merge-config.json contains the pattern - expect(jsonKeepOurs).toContain(pattern) } }) - test("config.ts and merge-config.json skipFiles share key patterns", () => { + test("config.ts contains critical skipFiles patterns", () => { const configPath = join(repoRoot, "script/upstream/utils/config.ts") const configSource = readFileSync(configPath, "utf-8") - const mergeConfig = readJSON("script/upstream/merge-config.json") - const jsonSkipFiles: string[] = mergeConfig.skipFiles - - // Key skip patterns that should appear in both const criticalSkipPatterns = ["packages/app/**", "packages/desktop/**", "packages/web/**"] - for (const pattern of criticalSkipPatterns) { expect(configSource).toContain(pattern) - expect(jsonSkipFiles).toContain(pattern) } }) }) +// altimate_change end // --------------------------------------------------------------------------- // 6. No Orphaned Package References diff --git a/packages/opencode/test/branding/upstream-guard.test.ts b/packages/opencode/test/branding/upstream-guard.test.ts index e8a85c5b80..08d9f6ee4e 100644 --- a/packages/opencode/test/branding/upstream-guard.test.ts +++ b/packages/opencode/test/branding/upstream-guard.test.ts @@ -4,11 +4,10 @@ import { join, resolve } from "path" const repoRoot = resolve(import.meta.dir, "..", "..", "..", "..") -const mergeConfigPath = join(repoRoot, "script", "upstream", "merge-config.json") -const mergeConfig = JSON.parse(readFileSync(mergeConfigPath, "utf-8")) - +// altimate_change start — config.ts is now single source of truth (merge-config.json removed) const brandingConfigPath = join(repoRoot, "script", "upstream", "utils", "config.ts") const brandingConfigText = readFileSync(brandingConfigPath, "utf-8") +// altimate_change end const rootPkgPath = join(repoRoot, "package.json") const rootPkg = JSON.parse(readFileSync(rootPkgPath, "utf-8")) @@ -38,11 +37,29 @@ describe("upstream merge guards", () => { "sst-env.d.ts", "specs/**", "README.*.md", + // Upstream project-specific configs + ".opencode/glossary/**", + ".opencode/agent/translator.md", + ".opencode/tool/github-triage.ts", + ".opencode/tool/github-triage.txt", + ".opencode/tool/github-pr-search.txt", + ".opencode/tool/github-pr-search.ts", + ".opencode/agent/duplicate-pr.md", + ".opencode/agent/triage.md", + ".opencode/agent/docs.md", + ".opencode/themes/mytheme.json", + ".opencode/env.d.ts", + ".opencode/command/rmslop.md", + ".opencode/command/ai-deps.md", + ".opencode/command/spellcheck.md", + ".github/workflows/storybook.yml", + "script/sync-zed.ts", + "AGENTS.md", ] for (const pattern of expectedSkipPatterns) { test(`skipFiles contains "${pattern}"`, () => { - expect(mergeConfig.skipFiles).toContain(pattern) + expect(brandingConfigText).toContain(pattern) }) } }) @@ -52,12 +69,12 @@ describe("upstream merge guards", () => { "packages/altimate-engine/**", "script/upstream/**", "README.md", - ".github/**", + ".github/workflows/**", ] for (const pattern of expectedKeepOurs) { test(`keepOurs contains "${pattern}"`, () => { - expect(mergeConfig.keepOurs).toContain(pattern) + expect(brandingConfigText).toContain(pattern) }) } }) @@ -103,7 +120,7 @@ describe("upstream merge guards", () => { }) describe("branding rules completeness", () => { - test('contains "opencode.ai" domain replacement rule', () => { + test('contains "altimate.ai" domain replacement rule', () => { // In regex patterns, dots are escaped as \. so check for the regex form expect(brandingConfigText).toMatch(/opencode\\?\.ai/) }) @@ -112,8 +129,8 @@ describe("upstream merge guards", () => { expect(brandingConfigText).toContain("anomalyco") }) - test('contains "OpenCode" product name replacement rule', () => { - expect(brandingConfigText).toContain("OpenCode") + test('contains "Altimate Code" product name replacement rule', () => { + expect(brandingConfigText).toContain("Altimate Code") }) test('contains "altimate.ai" as replacement target', () => { @@ -156,7 +173,7 @@ describe("upstream merge guards", () => { describe("change marker", () => { test('changeMarker is "altimate_change"', () => { - expect(mergeConfig.changeMarker).toBe("altimate_change") + expect(brandingConfigText).toContain('"altimate_change"') }) }) @@ -166,9 +183,25 @@ describe("upstream merge guards", () => { "flake.lock", "sst.config.ts", "sst-env.d.ts", + "AGENTS.md", + "script/sync-zed.ts", + ".github/workflows/storybook.yml", + ".opencode/agent/translator.md", + ".opencode/agent/duplicate-pr.md", + ".opencode/agent/triage.md", + ".opencode/agent/docs.md", + ".opencode/themes/mytheme.json", + ".opencode/env.d.ts", + ".opencode/command/rmslop.md", + ".opencode/command/ai-deps.md", + ".opencode/command/spellcheck.md", + ".opencode/tool/github-triage.ts", + ".opencode/tool/github-triage.txt", + ".opencode/tool/github-pr-search.txt", + ".opencode/tool/github-pr-search.ts", ] - const forbiddenDirs = ["nix", "specs", "infra", ".signpath"] + const forbiddenDirs = ["nix", "specs", "infra", ".signpath", ".opencode/glossary"] for (const file of forbiddenFiles) { test(`${file} should not exist at repo root`, () => { @@ -244,4 +277,35 @@ describe("upstream merge guards", () => { }) } }) + + // altimate_change start — marker guard safety: ensure src/ files are never excluded + describe("marker guard exclusions must never bypass src/ protection", () => { + test("markerExcludePatterns in analyze.ts must not match packages/opencode/src/**/*.ts", () => { + const analyzeContent = readFileSync(join(repoRoot, "script", "upstream", "analyze.ts"), "utf-8") + // Extract markerExcludePatterns array + const match = analyzeContent.match(/markerExcludePatterns\s*=\s*\[([\s\S]*?)\]/) + expect(match).not.toBeNull() + const patternsBlock = match![1] + + // These patterns must NEVER appear — they would bypass marker protection for source code + const dangerousPatterns = [ + "packages/opencode/src/**", + "packages/opencode/src/*.ts", + "**/src/**", + "**/*.ts", + ] + for (const dangerous of dangerousPatterns) { + expect(patternsBlock).not.toContain(`"${dangerous}"`) + } + }) + + test("CI marker guard runs in strict mode for non-merge PRs", () => { + const ciContent = readFileSync(join(repoRoot, ".github", "workflows", "ci.yml"), "utf-8") + // Must have --strict flag for regular PRs + expect(ciContent).toContain("--strict") + // Must detect merge branches to skip strict + expect(ciContent).toContain("merge-upstream-") + }) + }) + // altimate_change end }) diff --git a/packages/opencode/test/branding/upstream-merge-guard.test.ts b/packages/opencode/test/branding/upstream-merge-guard.test.ts new file mode 100644 index 0000000000..54f8a5f24f --- /dev/null +++ b/packages/opencode/test/branding/upstream-merge-guard.test.ts @@ -0,0 +1,391 @@ +import { describe, test, expect } from "bun:test" +import { readFileSync, existsSync } from "fs" +import { join, resolve } from "path" +import { Glob } from "bun" + +const repoRoot = resolve(import.meta.dir, "..", "..", "..", "..") +const pkgDir = resolve(import.meta.dir, "..", "..") +const srcDir = join(pkgDir, "src") + +function readText(filePath: string): string { + return readFileSync(filePath, "utf-8") +} + +function readJSON(filePath: string): any { + return JSON.parse(readFileSync(filePath, "utf-8")) +} + +// --------------------------------------------------------------------------- +// 1. Installation Script Branding +// --------------------------------------------------------------------------- +describe("Installation script branding", () => { + const installSrc = readText(join(srcDir, "installation", "index.ts")) + + test("USER_AGENT starts with `altimate-code/` not `opencode/`", () => { + expect(installSrc).toContain("USER_AGENT = `altimate-code/") + expect(installSrc).not.toMatch(/USER_AGENT\s*=\s*`opencode\//) + }) + + test("brew tap references AltimateAI/tap not anomalyco/tap", () => { + expect(installSrc).toContain("AltimateAI/tap") + expect(installSrc).not.toContain("anomalyco/tap") + }) + + test("npm package install uses @altimateai/altimate-code not opencode-ai", () => { + // npm/pnpm/bun install commands should reference our package + expect(installSrc).toContain("@altimateai/altimate-code") + + // Should not contain the upstream npm package name in install commands + // (note: @opencode-ai/ as internal scope is allowed, but `opencode-ai@` as + // an npm install target is not) + const installLines = installSrc.split("\n").filter( + (line) => + (line.includes("npm") || line.includes("pnpm") || line.includes("bun")) && + line.includes("install"), + ) + for (const line of installLines) { + expect(line).not.toMatch(/["'`]opencode-ai["'`@]/) + } + }) +}) + +// --------------------------------------------------------------------------- +// 2. Root package.json Integrity +// --------------------------------------------------------------------------- +describe("Root package.json integrity", () => { + const rootPkg = readJSON(join(repoRoot, "package.json")) + + test("workspaces list only explicit paths (no globs)", () => { + const packages: string[] = rootPkg.workspaces?.packages ?? [] + expect(packages.length).toBeGreaterThan(0) + for (const entry of packages) { + expect(entry).not.toContain("*") + expect(entry).not.toContain("?") + expect(entry).not.toContain("{") + } + }) + + test("no `sst` in devDependencies", () => { + const devDeps = rootPkg.devDependencies ?? {} + expect(devDeps).not.toHaveProperty("sst") + }) + + test("no `electron` in trustedDependencies", () => { + const trusted: string[] = rootPkg.trustedDependencies ?? [] + expect(trusted).not.toContain("electron") + }) + + test("no `@aws-sdk/client-s3` in dependencies", () => { + const deps = rootPkg.dependencies ?? {} + expect(deps).not.toHaveProperty("@aws-sdk/client-s3") + }) +}) + +// --------------------------------------------------------------------------- +// 3. Deleted Packages Stay Deleted +// --------------------------------------------------------------------------- +describe("Deleted packages stay deleted", () => { + const forbiddenDirs = [ + "packages/app", + "packages/console", + "packages/desktop", + "packages/desktop-electron", + "packages/enterprise", + "packages/extensions", + "packages/function", + "packages/identity", + "packages/slack", + "packages/storybook", + "packages/ui", + "packages/web", + "infra", + "nix", + ] + + for (const dir of forbiddenDirs) { + test(`${dir}/ should not exist`, () => { + expect(existsSync(join(repoRoot, dir))).toBe(false) + }) + } + + const forbiddenFiles = [ + "sst.config.ts", + "sst-env.d.ts", + "AGENTS.md", + "script/sync-zed.ts", + ".github/workflows/storybook.yml", + ] + + for (const file of forbiddenFiles) { + test(`${file} should not exist at repo root`, () => { + expect(existsSync(join(repoRoot, file))).toBe(false) + }) + } + + const forbiddenUpstreamConfigs = [ + ".opencode/glossary", + ".opencode/agent/translator.md", + ".opencode/agent/duplicate-pr.md", + ".opencode/agent/triage.md", + ".opencode/agent/docs.md", + ".opencode/themes/mytheme.json", + ".opencode/env.d.ts", + ".opencode/command/rmslop.md", + ".opencode/command/ai-deps.md", + ".opencode/command/spellcheck.md", + ".opencode/tool/github-triage.ts", + ".opencode/tool/github-triage.txt", + ".opencode/tool/github-pr-search.txt", + ".opencode/tool/github-pr-search.ts", + ] + + for (const item of forbiddenUpstreamConfigs) { + test(`${item} should not exist — upstream-only config`, () => { + expect(existsSync(join(repoRoot, item))).toBe(false) + }) + } + + test("no translated README.*.md files exist at repo root", () => { + const translatedPatterns = [ + "README.zh-CN.md", + "README.ja.md", + "README.ko.md", + "README.es.md", + "README.fr.md", + "README.de.md", + "README.pt.md", + "README.ru.md", + "README.ar.md", + "README.hi.md", + ] + for (const readme of translatedPatterns) { + expect(existsSync(join(repoRoot, readme))).toBe(false) + } + }) +}) + +// --------------------------------------------------------------------------- +// 4. OAuth/MCP Branding +// --------------------------------------------------------------------------- +describe("OAuth/MCP branding", () => { + const oauthProviderPath = join(srcDir, "mcp", "oauth-provider.ts") + const oauthCallbackPath = join(srcDir, "mcp", "oauth-callback.ts") + + test("oauth-provider.ts has client_name: \"Altimate Code\" not \"OpenCode\"", () => { + const content = readText(oauthProviderPath) + expect(content).toContain('client_name: "Altimate Code"') + expect(content).not.toMatch(/client_name:\s*"OpenCode"/) + }) + + test("oauth-callback.ts HTML titles contain \"Altimate Code\" not \"OpenCode\"", () => { + const content = readText(oauthCallbackPath) + // All <title> tags should reference Altimate Code + const titleMatches = content.match(/<title>[^<]+<\/title>/g) ?? [] + expect(titleMatches.length).toBeGreaterThan(0) + for (const title of titleMatches) { + expect(title).toContain("Altimate Code") + expect(title).not.toContain("OpenCode") + } + }) + + test("oauth-callback.ts body text references Altimate Code not OpenCode", () => { + const content = readText(oauthCallbackPath) + // User-facing strings mentioning the product + expect(content).toContain("Altimate Code") + // No user-facing "OpenCode" references (excluding internal identifiers) + const lines = content.split("\n") + for (const line of lines) { + // Skip import lines and internal identifiers + if (line.trim().startsWith("import ")) continue + if (line.includes("@opencode-ai/")) continue + if (line.includes("OPENCODE_")) continue + if (line.includes(".opencode")) continue + // Check user-facing HTML content for leaked branding + if (line.includes("<title>") || line.includes("<p>") || line.includes("<h")) { + expect(line).not.toMatch(/\bOpenCode\b/) + } + } + }) +}) + +// --------------------------------------------------------------------------- +// 5. No opencode.ai Domain Leaks in src/ +// --------------------------------------------------------------------------- +describe("No opencode.ai domain leaks in src/", () => { + function isExcludedLine(line: string, filePath: string): boolean { + const trimmed = line.trim() + if (trimmed.includes("@opencode-ai/")) return true + if (/OPENCODE_/.test(trimmed)) return true + if (trimmed.includes(".opencode/") || trimmed.includes('.opencode"') || trimmed.includes(".opencode\\")) return true + if (trimmed.includes("opencode.json") || trimmed.includes("opencode.jsonc")) return true + if (trimmed.includes("packages/opencode")) return true + if (trimmed.includes("window.__OPENCODE__")) return true + if (trimmed.startsWith("import ")) return true + if (trimmed.startsWith("//")) return true + if (/['"]\.opencode['"]/.test(trimmed)) return true + if (/\.opencode/.test(trimmed) && !/opencode\.ai/i.test(trimmed)) return true + if (filePath.includes("/test/")) return true + return false + } + + test("no opencode.ai domain references in any src/ .ts files", async () => { + const violations: string[] = [] + const glob = new Glob("**/*.ts") + for await (const file of glob.scan({ cwd: srcDir })) { + const filePath = join(srcDir, file) + const content = readText(filePath) + const lines = content.split("\n") + for (let i = 0; i < lines.length; i++) { + const line = lines[i] + if (isExcludedLine(line, filePath)) continue + if (/opencode\.ai/i.test(line)) { + violations.push(`${file}:${i + 1}: ${line.trim()}`) + } + } + } + expect(violations).toEqual([]) + }) + + test("no opencode.ai domain references in any src/ .tsx files", async () => { + const violations: string[] = [] + const glob = new Glob("**/*.tsx") + for await (const file of glob.scan({ cwd: srcDir })) { + const filePath = join(srcDir, file) + const content = readText(filePath) + const lines = content.split("\n") + for (let i = 0; i < lines.length; i++) { + const line = lines[i] + if (isExcludedLine(line, filePath)) continue + if (/opencode\.ai/i.test(line)) { + violations.push(`${file}:${i + 1}: ${line.trim()}`) + } + } + } + expect(violations).toEqual([]) + }) +}) + +// --------------------------------------------------------------------------- +// 6. Repository Hygiene +// --------------------------------------------------------------------------- +describe("Repository hygiene", () => { + test("__pycache__ is in .gitignore", () => { + const gitignore = readText(join(repoRoot, ".gitignore")) + expect(gitignore).toContain("__pycache__") + }) + + test("no __pycache__ directories are tracked in git", async () => { + const glob = new Glob("**/__pycache__/**") + const tracked: string[] = [] + for await (const file of glob.scan({ cwd: repoRoot })) { + // Only flag if not in .venv or node_modules (those are gitignored anyway) + if (!file.includes("node_modules") && !file.includes(".venv")) { + tracked.push(file) + } + } + // If any show up, they might be tracked — the gitignore should prevent new ones + // This test mostly validates the .gitignore entry is effective + }) + + test("altimate-engine package exists with pyproject.toml", () => { + expect(existsSync(join(repoRoot, "packages", "altimate-engine", "pyproject.toml"))).toBe(true) + }) + + test("altimate-engine has server.py (Python bridge entrypoint)", () => { + expect(existsSync(join(repoRoot, "packages", "altimate-engine", "src", "altimate_engine", "server.py"))).toBe(true) + }) + + test("bridge directory exists in opencode package", () => { + expect(existsSync(join(srcDir, "altimate", "bridge"))).toBe(true) + }) +}) + +// --------------------------------------------------------------------------- +// 7. Config Integrity +// --------------------------------------------------------------------------- +describe("Config integrity", () => { + const configTsPath = join(repoRoot, "script", "upstream", "utils", "config.ts") + const configTs = readText(configTsPath) + + test("config.ts contains critical keepOurs patterns", () => { + const criticalKeepOurs = [ + "packages/altimate-engine/**", + "script/upstream/**", + "packages/opencode/src/altimate/**", + "packages/opencode/src/bridge/**", + ] + for (const pattern of criticalKeepOurs) { + expect(configTs).toContain(`"${pattern}"`) + } + }) + + test("no pattern appears in both keepOurs and skipFiles", () => { + // Extract keepOurs array + const keepOursMatch = configTs.match(/keepOurs:\s*\[([\s\S]*?)\],/m) + expect(keepOursMatch).not.toBeNull() + const keepOursPatterns = (keepOursMatch![1].match(/"([^"]+)"/g) || []).map((s: string) => s.replace(/"/g, "")) + + // Extract skipFiles array + const skipFilesMatch = configTs.match(/skipFiles:\s*\[([\s\S]*?)\],/m) + expect(skipFilesMatch).not.toBeNull() + const skipFilesPatterns = (skipFilesMatch![1].match(/"([^"]+)"/g) || []).map((s: string) => s.replace(/"/g, "")) + + const overlaps = keepOursPatterns.filter((p: string) => skipFilesPatterns.includes(p)) + expect(overlaps).toEqual([]) + }) + + test("legacy merge-config.json does not exist (superseded by config.ts)", () => { + expect(existsSync(join(repoRoot, "script", "upstream", "merge-config.json"))).toBe(false) + }) + + test("transforms/ directory does not exist (logic is in merge.ts)", () => { + expect(existsSync(join(repoRoot, "script", "upstream", "transforms"))).toBe(false) + }) +}) + +// --------------------------------------------------------------------------- +// 8. altimate_change Marker Integrity +// --------------------------------------------------------------------------- +describe("altimate_change marker integrity", () => { + // Files that MUST have altimate_change markers (they contain custom logic in upstream-shared files) + const requiredMarkerFiles = [ + "src/session/compaction.ts", + "src/session/prompt.ts", + "src/installation/index.ts", + "src/flag/flag.ts", + "src/config/config.ts", + "src/config/paths.ts", + "src/index.ts", + "src/agent/agent.ts", + "src/tool/registry.ts", + "src/telemetry/index.ts", + "src/global/index.ts", + "src/util/token.ts", + ] + + for (const relPath of requiredMarkerFiles) { + const fullPath = join(pkgDir, relPath) + test(`${relPath} has altimate_change markers`, () => { + expect(existsSync(fullPath)).toBe(true) + const content = readText(fullPath) + expect(content).toContain("altimate_change") + }) + } + + test("all altimate_change start blocks have matching end blocks", () => { + const glob = new Glob("**/*.ts") + const mismatched: string[] = [] + + for (const file of glob.scanSync({ cwd: srcDir })) { + const fullPath = join(srcDir, file) + const content = readText(fullPath) + const starts = (content.match(/altimate_change start/g) || []).length + const ends = (content.match(/altimate_change end/g) || []).length + if (starts !== ends) { + mismatched.push(`${file}: ${starts} starts vs ${ends} ends`) + } + } + + expect(mismatched).toEqual([]) + }) +}) diff --git a/packages/opencode/test/cli/github-action.test.ts b/packages/opencode/test/cli/github-action.test.ts index cd64bb59ec..279ed27d08 100644 --- a/packages/opencode/test/cli/github-action.test.ts +++ b/packages/opencode/test/cli/github-action.test.ts @@ -1,13 +1,14 @@ import { test, expect, describe } from "bun:test" import { extractResponseText, formatPromptTooLargeError } from "../../src/cli/cmd/github" import type { MessageV2 } from "../../src/session/message-v2" +import { SessionID, MessageID, PartID } from "../../src/session/schema" // Helper to create minimal valid parts function createTextPart(text: string): MessageV2.Part { return { - id: "1", - sessionID: "s", - messageID: "m", + id: PartID.ascending(), + sessionID: SessionID.make("s"), + messageID: MessageID.make("m"), type: "text" as const, text, } @@ -15,9 +16,9 @@ function createTextPart(text: string): MessageV2.Part { function createReasoningPart(text: string): MessageV2.Part { return { - id: "1", - sessionID: "s", - messageID: "m", + id: PartID.ascending(), + sessionID: SessionID.make("s"), + messageID: MessageID.make("m"), type: "reasoning" as const, text, time: { start: 0 }, @@ -27,9 +28,9 @@ function createReasoningPart(text: string): MessageV2.Part { function createToolPart(tool: string, title: string, status: "completed" | "running" = "completed"): MessageV2.Part { if (status === "completed") { return { - id: "1", - sessionID: "s", - messageID: "m", + id: PartID.ascending(), + sessionID: SessionID.make("s"), + messageID: MessageID.make("m"), type: "tool" as const, callID: "c1", tool, @@ -44,9 +45,9 @@ function createToolPart(tool: string, title: string, status: "completed" | "runn } } return { - id: "1", - sessionID: "s", - messageID: "m", + id: PartID.ascending(), + sessionID: SessionID.make("s"), + messageID: MessageID.make("m"), type: "tool" as const, callID: "c1", tool, @@ -60,18 +61,18 @@ function createToolPart(tool: string, title: string, status: "completed" | "runn function createStepStartPart(): MessageV2.Part { return { - id: "1", - sessionID: "s", - messageID: "m", + id: PartID.ascending(), + sessionID: SessionID.make("s"), + messageID: MessageID.make("m"), type: "step-start" as const, } } function createStepFinishPart(): MessageV2.Part { return { - id: "1", - sessionID: "s", - messageID: "m", + id: PartID.ascending(), + sessionID: SessionID.make("s"), + messageID: MessageID.make("m"), type: "step-finish" as const, reason: "done", cost: 0, diff --git a/packages/opencode/test/cli/import.test.ts b/packages/opencode/test/cli/import.test.ts index a1a69dc094..8726c3a8d2 100644 --- a/packages/opencode/test/cli/import.test.ts +++ b/packages/opencode/test/cli/import.test.ts @@ -1,20 +1,36 @@ import { test, expect } from "bun:test" -import { parseShareUrl, transformShareData, type ShareData } from "../../src/cli/cmd/import" +import { + parseShareUrl, + shouldAttachShareAuthHeaders, + transformShareData, + type ShareData, +} from "../../src/cli/cmd/import" // parseShareUrl tests test("parses valid share URLs", () => { - expect(parseShareUrl("https://opncd.ai/share/Jsj3hNIW")).toBe("Jsj3hNIW") + expect(parseShareUrl("https://altimate.ai/share/Jsj3hNIW")).toBe("Jsj3hNIW") expect(parseShareUrl("https://custom.example.com/share/abc123")).toBe("abc123") expect(parseShareUrl("http://localhost:3000/share/test_id-123")).toBe("test_id-123") }) test("rejects invalid URLs", () => { - expect(parseShareUrl("https://opncd.ai/s/Jsj3hNIW")).toBeNull() // legacy format - expect(parseShareUrl("https://opncd.ai/share/")).toBeNull() - expect(parseShareUrl("https://opncd.ai/share/id/extra")).toBeNull() + expect(parseShareUrl("https://altimate.ai/s/Jsj3hNIW")).toBeNull() // legacy format + expect(parseShareUrl("https://altimate.ai/share/")).toBeNull() + expect(parseShareUrl("https://altimate.ai/share/id/extra")).toBeNull() expect(parseShareUrl("not-a-url")).toBeNull() }) +test("only attaches share auth headers for same-origin URLs", () => { + expect(shouldAttachShareAuthHeaders("https://control.example.com/share/abc", "https://control.example.com")).toBe( + true, + ) + expect(shouldAttachShareAuthHeaders("https://other.example.com/share/abc", "https://control.example.com")).toBe(false) + expect(shouldAttachShareAuthHeaders("https://control.example.com:443/share/abc", "https://control.example.com")).toBe( + true, + ) + expect(shouldAttachShareAuthHeaders("not-a-url", "https://control.example.com")).toBe(false) +}) + // transformShareData tests test("transforms share data to storage format", () => { const data: ShareData[] = [ diff --git a/packages/opencode/test/cli/plugin-auth-picker.test.ts b/packages/opencode/test/cli/plugin-auth-picker.test.ts index 3ce9094e92..5a1cf059d5 100644 --- a/packages/opencode/test/cli/plugin-auth-picker.test.ts +++ b/packages/opencode/test/cli/plugin-auth-picker.test.ts @@ -1,5 +1,5 @@ import { test, expect, describe } from "bun:test" -import { resolvePluginProviders } from "../../src/cli/cmd/auth" +import { resolvePluginProviders } from "../../src/cli/cmd/providers" import type { Hooks } from "@opencode-ai/plugin" function hookWithAuth(provider: string): Hooks { diff --git a/packages/opencode/test/cli/tui/thread.test.ts b/packages/opencode/test/cli/tui/thread.test.ts new file mode 100644 index 0000000000..d3de7c3183 --- /dev/null +++ b/packages/opencode/test/cli/tui/thread.test.ts @@ -0,0 +1,157 @@ +import { describe, expect, mock, test } from "bun:test" +import fs from "fs/promises" +import path from "path" +import { tmpdir } from "../../fixture/fixture" + +const stop = new Error("stop") +const seen = { + tui: [] as string[], + inst: [] as string[], +} + +mock.module("../../../src/cli/cmd/tui/app", () => ({ + tui: async (input: { directory: string }) => { + seen.tui.push(input.directory) + throw stop + }, +})) + +mock.module("@/util/rpc", () => ({ + Rpc: { + client: () => ({ + call: async () => ({ url: "http://127.0.0.1" }), + on: () => {}, + }), + }, +})) + +mock.module("@/cli/ui", () => ({ + UI: { + error: () => {}, + }, +})) + +mock.module("@/util/log", () => ({ + Log: { + init: async () => {}, + create: () => ({ + error: () => {}, + info: () => {}, + warn: () => {}, + debug: () => {}, + time: () => ({ stop: () => {} }), + }), + Default: { + error: () => {}, + info: () => {}, + warn: () => {}, + debug: () => {}, + }, + }, +})) + +mock.module("@/util/timeout", () => ({ + withTimeout: <T>(input: Promise<T>) => input, +})) + +mock.module("@/cli/network", () => ({ + withNetworkOptions: <T>(input: T) => input, + resolveNetworkOptions: async () => ({ + mdns: false, + port: 0, + hostname: "127.0.0.1", + }), +})) + +mock.module("../../../src/cli/cmd/tui/win32", () => ({ + win32DisableProcessedInput: () => {}, + win32InstallCtrlCGuard: () => undefined, +})) + +mock.module("@/config/tui", () => ({ + TuiConfig: { + get: () => ({}), + }, +})) + +mock.module("@/project/instance", () => ({ + Instance: { + provide: async (input: { directory: string; fn: () => Promise<unknown> | unknown }) => { + seen.inst.push(input.directory) + return input.fn() + }, + }, +})) + +describe("tui thread", () => { + async function call(project?: string) { + const { TuiThreadCommand } = await import("../../../src/cli/cmd/tui/thread") + const args: Parameters<NonNullable<typeof TuiThreadCommand.handler>>[0] = { + _: [], + $0: "opencode", + project, + prompt: "hi", + model: undefined, + agent: undefined, + session: undefined, + continue: false, + fork: false, + port: 0, + hostname: "127.0.0.1", + mdns: false, + "mdns-domain": "opencode.local", + mdnsDomain: "opencode.local", + cors: [], + } + return TuiThreadCommand.handler(args) + } + + async function check(project?: string) { + await using tmp = await tmpdir({ git: true }) + const cwd = process.cwd() + const pwd = process.env.PWD + const worker = globalThis.Worker + const tty = Object.getOwnPropertyDescriptor(process.stdin, "isTTY") + const link = path.join(path.dirname(tmp.path), path.basename(tmp.path) + "-link") + const type = process.platform === "win32" ? "junction" : "dir" + seen.tui.length = 0 + seen.inst.length = 0 + await fs.symlink(tmp.path, link, type) + + Object.defineProperty(process.stdin, "isTTY", { + configurable: true, + value: true, + }) + globalThis.Worker = class extends EventTarget { + onerror = null + onmessage = null + onmessageerror = null + postMessage() {} + terminate() {} + } as unknown as typeof Worker + + try { + process.chdir(tmp.path) + process.env.PWD = link + await expect(call(project)).rejects.toBe(stop) + expect(seen.inst[0]).toBe(tmp.path) + expect(seen.tui[0]).toBe(tmp.path) + } finally { + process.chdir(cwd) + if (pwd === undefined) delete process.env.PWD + else process.env.PWD = pwd + if (tty) Object.defineProperty(process.stdin, "isTTY", tty) + else delete (process.stdin as { isTTY?: boolean }).isTTY + globalThis.Worker = worker + await fs.rm(link, { recursive: true, force: true }).catch(() => undefined) + } + } + + test("uses the real cwd when PWD points at a symlink", async () => { + await check() + }) + + test("uses the real cwd after resolving a relative project from PWD", async () => { + await check(".") + }) +}) diff --git a/packages/opencode/test/config/agent-color.test.ts b/packages/opencode/test/config/agent-color.test.ts index b9c7cccc48..b360aff2aa 100644 --- a/packages/opencode/test/config/agent-color.test.ts +++ b/packages/opencode/test/config/agent-color.test.ts @@ -12,7 +12,7 @@ test("agent color parsed from project config", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", agent: { build: { color: "#FFA500" }, plan: { color: "primary" }, @@ -37,7 +37,7 @@ test("Agent.get includes color from config", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", agent: { plan: { color: "#A855F7" }, build: { color: "accent" }, diff --git a/packages/opencode/test/config/config.test.ts b/packages/opencode/test/config/config.test.ts index 40ab97449f..e2361690ad 100644 --- a/packages/opencode/test/config/config.test.ts +++ b/packages/opencode/test/config/config.test.ts @@ -1,13 +1,16 @@ -import { test, expect, describe, mock, afterEach } from "bun:test" +import { test, expect, describe, mock, afterEach, spyOn } from "bun:test" import { Config } from "../../src/config/config" import { Instance } from "../../src/project/instance" import { Auth } from "../../src/auth" +import { AccessToken, Account, AccountID, OrgID } from "../../src/account" import { tmpdir } from "../fixture/fixture" import path from "path" import fs from "fs/promises" import { pathToFileURL } from "url" import { Global } from "../../src/global" +import { ProjectID } from "../../src/project/schema" import { Filesystem } from "../../src/util/filesystem" +import { BunProc } from "../../src/bun" // Get managed config directory from environment (set in preload.ts) const managedConfigDir = process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR! @@ -25,6 +28,34 @@ async function writeConfig(dir: string, config: object, name = "opencode.json") await Filesystem.write(path.join(dir, name), JSON.stringify(config)) } +async function check(map: (dir: string) => string) { + if (process.platform !== "win32") return + await using globalTmp = await tmpdir() + await using tmp = await tmpdir({ git: true, config: { snapshot: true } }) + const prev = Global.Path.config + ;(Global.Path as { config: string }).config = globalTmp.path + Config.global.reset() + try { + await writeConfig(globalTmp.path, { + $schema: "https://altimate.ai/config.json", + snapshot: false, + }) + await Instance.provide({ + directory: map(tmp.path), + fn: async () => { + const cfg = await Config.get() + expect(cfg.snapshot).toBe(true) + expect(Instance.directory).toBe(Filesystem.resolve(tmp.path)) + expect(Instance.project.id).not.toBe(ProjectID.global) + }, + }) + } finally { + await Instance.disposeAll() + ;(Global.Path as { config: string }).config = prev + Config.global.reset() + } +} + test("loads config with defaults when no files exist", async () => { await using tmp = await tmpdir() await Instance.provide({ @@ -40,7 +71,7 @@ test("loads JSON config file", async () => { await using tmp = await tmpdir({ init: async (dir) => { await writeConfig(dir, { - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", model: "test/model", username: "testuser", }) @@ -56,11 +87,28 @@ test("loads JSON config file", async () => { }) }) +test("loads project config from Git Bash and MSYS2 paths on Windows", async () => { + // Git Bash and MSYS2 both use /<drive>/... paths on Windows. + await check((dir) => { + const drive = dir[0].toLowerCase() + const rest = dir.slice(2).replaceAll("\\", "/") + return `/${drive}${rest}` + }) +}) + +test("loads project config from Cygwin paths on Windows", async () => { + await check((dir) => { + const drive = dir[0].toLowerCase() + const rest = dir.slice(2).replaceAll("\\", "/") + return `/cygdrive/${drive}${rest}` + }) +}) + test("ignores legacy tui keys in opencode config", async () => { await using tmp = await tmpdir({ init: async (dir) => { await writeConfig(dir, { - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", model: "test/model", theme: "legacy", tui: { scroll_speed: 4 }, @@ -85,7 +133,7 @@ test("loads JSONC config file", async () => { path.join(dir, "opencode.jsonc"), `{ // This is a comment - "$schema": "https://opencode.ai/config.json", + "$schema": "https://altimate.ai/config.json", "model": "test/model", "username": "testuser" }`, @@ -108,14 +156,14 @@ test("merges multiple config files with correct precedence", async () => { await writeConfig( dir, { - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", model: "base", username: "base", }, "opencode.jsonc", ) await writeConfig(dir, { - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", model: "override", }) }, @@ -138,7 +186,7 @@ test("handles environment variable substitution", async () => { await using tmp = await tmpdir({ init: async (dir) => { await writeConfig(dir, { - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", username: "{env:TEST_VAR}", }) }, @@ -197,12 +245,58 @@ test("preserves env variables when adding $schema to config", async () => { } }) +test("resolves env templates in account config with account token", async () => { + const originalActive = Account.active + const originalConfig = Account.config + const originalToken = Account.token + const originalControlToken = process.env["OPENCODE_CONSOLE_TOKEN"] + + Account.active = mock(() => ({ + id: AccountID.make("account-1"), + email: "user@example.com", + url: "https://control.example.com", + active_org_id: OrgID.make("org-1"), + })) + + Account.config = mock(async () => ({ + provider: { + opencode: { + options: { + apiKey: "{env:OPENCODE_CONSOLE_TOKEN}", + }, + }, + }, + })) + + Account.token = mock(async () => AccessToken.make("st_test_token")) + + try { + await using tmp = await tmpdir() + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const config = await Config.get() + expect(config.provider?.["opencode"]?.options?.apiKey).toBe("st_test_token") + }, + }) + } finally { + Account.active = originalActive + Account.config = originalConfig + Account.token = originalToken + if (originalControlToken !== undefined) { + process.env["OPENCODE_CONSOLE_TOKEN"] = originalControlToken + } else { + delete process.env["OPENCODE_CONSOLE_TOKEN"] + } + } +}) + test("handles file inclusion substitution", async () => { await using tmp = await tmpdir({ init: async (dir) => { await Filesystem.write(path.join(dir, "included.txt"), "test-user") await writeConfig(dir, { - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", username: "{file:included.txt}", }) }, @@ -221,7 +315,7 @@ test("handles file inclusion with replacement tokens", async () => { init: async (dir) => { await Filesystem.write(path.join(dir, "included.md"), "const out = await Bun.$`echo hi`") await writeConfig(dir, { - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", username: "{file:included.md}", }) }, @@ -239,7 +333,7 @@ test("validates config schema and throws on invalid fields", async () => { await using tmp = await tmpdir({ init: async (dir) => { await writeConfig(dir, { - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", invalid_field: "should cause error", }) }, @@ -271,7 +365,7 @@ test("handles agent configuration", async () => { await using tmp = await tmpdir({ init: async (dir) => { await writeConfig(dir, { - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", agent: { test_agent: { model: "test/model", @@ -301,7 +395,7 @@ test("treats agent variant as model-scoped setting (not provider option)", async await using tmp = await tmpdir({ init: async (dir) => { await writeConfig(dir, { - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", agent: { test_agent: { model: "openai/gpt-5.2", @@ -332,7 +426,7 @@ test("handles command configuration", async () => { await using tmp = await tmpdir({ init: async (dir) => { await writeConfig(dir, { - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", command: { test_command: { template: "test template", @@ -362,7 +456,7 @@ test("migrates autoshare to share field", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", autoshare: true, }), ) @@ -384,7 +478,7 @@ test("migrates mode field to agent field", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", mode: { test_mode: { model: "test/model", @@ -670,6 +764,39 @@ test("installs dependencies in writable OPENCODE_CONFIG_DIR", async () => { } }) +test("serializes concurrent config dependency installs", async () => { + await using tmp = await tmpdir() + const dirs = [path.join(tmp.path, "a"), path.join(tmp.path, "b")] + await Promise.all(dirs.map((dir) => fs.mkdir(dir, { recursive: true }))) + + const seen: string[] = [] + let active = 0 + let max = 0 + const run = spyOn(BunProc, "run").mockImplementation(async (_cmd, opts) => { + active++ + max = Math.max(max, active) + seen.push(opts?.cwd ?? "") + await new Promise((resolve) => setTimeout(resolve, 25)) + active-- + return { + code: 0, + stdout: Buffer.alloc(0), + stderr: Buffer.alloc(0), + } + }) + + try { + await Promise.all(dirs.map((dir) => Config.installDependencies(dir))) + } finally { + run.mockRestore() + } + + expect(max).toBe(1) + expect(seen.toSorted()).toEqual(dirs.toSorted()) + expect(await Filesystem.exists(path.join(dirs[0], "package.json"))).toBe(true) + expect(await Filesystem.exists(path.join(dirs[1], "package.json"))).toBe(true) +}) + test("resolves scoped npm plugins in config", async () => { await using tmp = await tmpdir({ init: async (dir) => { @@ -699,7 +826,7 @@ test("resolves scoped npm plugins in config", async () => { await Filesystem.write( path.join(dir, "opencode.json"), - JSON.stringify({ $schema: "https://opencode.ai/config.json", plugin: ["@scope/plugin"] }, null, 2), + JSON.stringify({ $schema: "https://altimate.ai/config.json", plugin: ["@scope/plugin"] }, null, 2), ) }, }) @@ -734,7 +861,7 @@ test("merges plugin arrays from global and local configs", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", plugin: ["global-plugin-1", "global-plugin-2"], }), ) @@ -743,7 +870,7 @@ test("merges plugin arrays from global and local configs", async () => { await Filesystem.write( path.join(opencodeDir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", plugin: ["local-plugin-1"], }), ) @@ -810,7 +937,7 @@ test("merges instructions arrays from global and local configs", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", instructions: ["global-instructions.md", "shared-rules.md"], }), ) @@ -818,7 +945,7 @@ test("merges instructions arrays from global and local configs", async () => { await Filesystem.write( path.join(opencodeDir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", instructions: ["local-instructions.md"], }), ) @@ -849,7 +976,7 @@ test("deduplicates duplicate instructions from global and local configs", async await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", instructions: ["duplicate.md", "global-only.md"], }), ) @@ -857,7 +984,7 @@ test("deduplicates duplicate instructions from global and local configs", async await Filesystem.write( path.join(opencodeDir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", instructions: ["duplicate.md", "local-only.md"], }), ) @@ -893,7 +1020,7 @@ test("deduplicates duplicate plugins from global and local configs", async () => await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", plugin: ["duplicate-plugin", "global-plugin-1"], }), ) @@ -902,7 +1029,7 @@ test("deduplicates duplicate plugins from global and local configs", async () => await Filesystem.write( path.join(opencodeDir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", plugin: ["duplicate-plugin", "local-plugin-1"], }), ) @@ -941,7 +1068,7 @@ test("migrates legacy tools config to permissions - allow", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", agent: { test: { tools: { @@ -972,7 +1099,7 @@ test("migrates legacy tools config to permissions - deny", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", agent: { test: { tools: { @@ -1003,7 +1130,7 @@ test("migrates legacy write tool to edit permission", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", agent: { test: { tools: { @@ -1033,7 +1160,7 @@ test("managed settings override user settings", async () => { await using tmp = await tmpdir({ init: async (dir) => { await writeConfig(dir, { - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", model: "user/model", share: "auto", username: "testuser", @@ -1042,7 +1169,7 @@ test("managed settings override user settings", async () => { }) await writeManagedSettings({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", model: "managed/model", share: "disabled", }) @@ -1062,7 +1189,7 @@ test("managed settings override project settings", async () => { await using tmp = await tmpdir({ init: async (dir) => { await writeConfig(dir, { - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", autoupdate: true, disabled_providers: [], }) @@ -1070,7 +1197,7 @@ test("managed settings override project settings", async () => { }) await writeManagedSettings({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", autoupdate: false, disabled_providers: ["openai"], }) @@ -1089,7 +1216,7 @@ test("missing managed settings file is not an error", async () => { await using tmp = await tmpdir({ init: async (dir) => { await writeConfig(dir, { - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", model: "user/model", }) }, @@ -1110,7 +1237,7 @@ test("migrates legacy edit tool to edit permission", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", agent: { test: { tools: { @@ -1139,7 +1266,7 @@ test("migrates legacy patch tool to edit permission", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", agent: { test: { tools: { @@ -1168,7 +1295,7 @@ test("migrates legacy multiedit tool to edit permission", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", agent: { test: { tools: { @@ -1197,7 +1324,7 @@ test("migrates mixed legacy tools config", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", agent: { test: { tools: { @@ -1232,7 +1359,7 @@ test("merges legacy tools with existing permission config", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", agent: { test: { permission: { @@ -1265,7 +1392,7 @@ test("permission config preserves key order", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", permission: { "*": "deny", edit: "ask", @@ -1313,7 +1440,7 @@ test("project config can override MCP server enabled status", async () => { await Filesystem.write( path.join(dir, "opencode.jsonc"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", mcp: { jira: { type: "remote", @@ -1332,7 +1459,7 @@ test("project config can override MCP server enabled status", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", mcp: { jira: { type: "remote", @@ -1371,7 +1498,7 @@ test("MCP config deep merges preserving base config properties", async () => { await Filesystem.write( path.join(dir, "opencode.jsonc"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", mcp: { myserver: { type: "remote", @@ -1388,7 +1515,7 @@ test("MCP config deep merges preserving base config properties", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", mcp: { myserver: { type: "remote", @@ -1423,7 +1550,7 @@ test("local .opencode config can override MCP from project config", async () => await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", mcp: { docs: { type: "remote", @@ -1439,7 +1566,7 @@ test("local .opencode config can override MCP from project config", async () => await Filesystem.write( path.join(opencodeDir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", mcp: { docs: { type: "remote", @@ -1507,7 +1634,7 @@ test("project config overrides remote well-known config", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", mcp: { jira: { type: "remote", @@ -1581,7 +1708,7 @@ test("wellknown URL with trailing slash is normalized", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -1665,7 +1792,7 @@ describe("deduplicatePlugins", () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", plugin: ["my-plugin@1.0.0"], }), ) @@ -1700,7 +1827,7 @@ describe("OPENCODE_DISABLE_PROJECT_CONFIG", () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", model: "project/model", username: "project-user", }), @@ -1795,7 +1922,7 @@ describe("OPENCODE_DISABLE_PROJECT_CONFIG", () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", instructions: ["./CUSTOM.md"], }), ) @@ -1841,7 +1968,7 @@ describe("OPENCODE_DISABLE_PROJECT_CONFIG", () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", model: "configdir/model", }), ) @@ -1854,7 +1981,7 @@ describe("OPENCODE_DISABLE_PROJECT_CONFIG", () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", model: "project/model", }), ) @@ -1893,7 +2020,7 @@ describe("OPENCODE_CONFIG_CONTENT token substitution", () => { const originalTestVar = process.env["TEST_CONFIG_VAR"] process.env["TEST_CONFIG_VAR"] = "test_api_key_12345" process.env["OPENCODE_CONFIG_CONTENT"] = JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", username: "{env:TEST_CONFIG_VAR}", }) @@ -1928,7 +2055,7 @@ describe("OPENCODE_CONFIG_CONTENT token substitution", () => { init: async (dir) => { await Filesystem.write(path.join(dir, "api_key.txt"), "secret_key_from_file") process.env["OPENCODE_CONFIG_CONTENT"] = JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", username: "{file:./api_key.txt}", }) }, diff --git a/packages/opencode/test/control-plane/session-proxy-middleware.test.ts b/packages/opencode/test/control-plane/session-proxy-middleware.test.ts index 369b9152ae..d4d152a1c6 100644 --- a/packages/opencode/test/control-plane/session-proxy-middleware.test.ts +++ b/packages/opencode/test/control-plane/session-proxy-middleware.test.ts @@ -1,5 +1,5 @@ import { afterEach, describe, expect, mock, test } from "bun:test" -import { Identifier } from "../../src/id/id" +import { WorkspaceID } from "../../src/control-plane/schema" import { Hono } from "hono" import { tmpdir } from "../fixture/fixture" import { Project } from "../../src/project/project" @@ -10,12 +10,22 @@ import { Database } from "../../src/storage/db" import { resetDatabase } from "../fixture/db" import * as adaptors from "../../src/control-plane/adaptors" import type { Adaptor } from "../../src/control-plane/types" +import { Flag } from "../../src/flag/flag" afterEach(async () => { mock.restore() await resetDatabase() }) +const original = Flag.OPENCODE_EXPERIMENTAL_WORKSPACES +// @ts-expect-error don't do this normally, but it works +Flag.OPENCODE_EXPERIMENTAL_WORKSPACES = true + +afterEach(() => { + // @ts-expect-error don't do this normally, but it works + Flag.OPENCODE_EXPERIMENTAL_WORKSPACES = original +}) + type State = { workspace?: "first" | "second" calls: Array<{ method: string; url: string; body?: string }> @@ -54,8 +64,8 @@ async function setup(state: State) { await using tmp = await tmpdir({ git: true }) const { project } = await Project.fromDirectory(tmp.path) - const id1 = Identifier.descending("workspace") - const id2 = Identifier.descending("workspace") + const id1 = WorkspaceID.ascending() + const id2 = WorkspaceID.ascending() Database.use((db) => db diff --git a/packages/opencode/test/control-plane/workspace-sync.test.ts b/packages/opencode/test/control-plane/workspace-sync.test.ts index 899118920f..0f8d608fb3 100644 --- a/packages/opencode/test/control-plane/workspace-sync.test.ts +++ b/packages/opencode/test/control-plane/workspace-sync.test.ts @@ -1,5 +1,5 @@ import { afterEach, describe, expect, mock, test } from "bun:test" -import { Identifier } from "../../src/id/id" +import { WorkspaceID } from "../../src/control-plane/schema" import { Log } from "../../src/util/log" import { tmpdir } from "../fixture/fixture" import { Project } from "../../src/project/project" @@ -52,8 +52,8 @@ describe("control-plane/workspace.startSyncing", () => { await using tmp = await tmpdir({ git: true }) const { project } = await Project.fromDirectory(tmp.path) - const id1 = Identifier.descending("workspace") - const id2 = Identifier.descending("workspace") + const id1 = WorkspaceID.ascending() + const id2 = WorkspaceID.ascending() Database.use((db) => db diff --git a/packages/opencode/test/file/ripgrep.test.ts b/packages/opencode/test/file/ripgrep.test.ts index ac46f1131b..5eb56e53de 100644 --- a/packages/opencode/test/file/ripgrep.test.ts +++ b/packages/opencode/test/file/ripgrep.test.ts @@ -36,4 +36,19 @@ describe("file.ripgrep", () => { expect(hasVisible).toBe(true) expect(hasHidden).toBe(false) }) + + test("search returns empty when nothing matches", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write(path.join(dir, "match.ts"), "const value = 'other'\n") + }, + }) + + const hits = await Ripgrep.search({ + cwd: tmp.path, + pattern: "needle", + }) + + expect(hits).toEqual([]) + }) }) diff --git a/packages/opencode/test/fixture/effect.ts b/packages/opencode/test/fixture/effect.ts new file mode 100644 index 0000000000..b75610139f --- /dev/null +++ b/packages/opencode/test/fixture/effect.ts @@ -0,0 +1,7 @@ +import { test } from "bun:test" +import { Effect, Layer } from "effect" + +export const testEffect = <R, E>(layer: Layer.Layer<R, E, never>) => ({ + effect: <A, E2>(name: string, value: Effect.Effect<A, E2, R>) => + test(name, () => Effect.runPromise(value.pipe(Effect.provide(layer)))), +}) diff --git a/packages/opencode/test/fixture/fixture.ts b/packages/opencode/test/fixture/fixture.ts index 63f93bcafe..c7bb9658c9 100644 --- a/packages/opencode/test/fixture/fixture.ts +++ b/packages/opencode/test/fixture/fixture.ts @@ -42,13 +42,15 @@ export async function tmpdir<T>(options?: TmpDirOptions<T>) { if (options?.git) { await $`git init`.cwd(dirpath).quiet() await $`git config core.fsmonitor false`.cwd(dirpath).quiet() + await $`git config user.email "test@opencode.test"`.cwd(dirpath).quiet() + await $`git config user.name "Test"`.cwd(dirpath).quiet() await $`git commit --allow-empty -m "root commit ${dirpath}"`.cwd(dirpath).quiet() } if (options?.config) { await Bun.write( path.join(dirpath, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", ...options.config, }), ) diff --git a/packages/opencode/test/install/publish-package.test.ts b/packages/opencode/test/install/publish-package.test.ts index 08655374fe..a626ba1fc7 100644 --- a/packages/opencode/test/install/publish-package.test.ts +++ b/packages/opencode/test/install/publish-package.test.ts @@ -42,7 +42,7 @@ describe("publish package validation", () => { test("bin entries are correct", () => { const pkg = JSON.parse(fs.readFileSync(path.join(REPO_PKG_DIR, "package.json"), "utf-8")) expect(pkg.bin).toBeDefined() - expect(pkg.bin["altimate"]).toBe("./bin/altimate") + expect(pkg.bin["altimate"]).toBe("./bin/altimate-code") expect(pkg.bin["altimate-code"]).toBe("./bin/altimate-code") }) diff --git a/packages/opencode/test/installation/installation.test.ts b/packages/opencode/test/installation/installation.test.ts new file mode 100644 index 0000000000..a7cfe50d95 --- /dev/null +++ b/packages/opencode/test/installation/installation.test.ts @@ -0,0 +1,47 @@ +import { afterEach, describe, expect, test } from "bun:test" +import { Installation } from "../../src/installation" + +const fetch0 = globalThis.fetch + +afterEach(() => { + globalThis.fetch = fetch0 +}) + +describe("installation", () => { + test("reads release version from GitHub releases", async () => { + globalThis.fetch = (async () => + new Response(JSON.stringify({ tag_name: "v1.2.3" }), { + status: 200, + headers: { "content-type": "application/json" }, + })) as unknown as typeof fetch + + expect(await Installation.latest("unknown")).toBe("1.2.3") + }) + + test("reads scoop manifest versions", async () => { + globalThis.fetch = (async () => + new Response(JSON.stringify({ version: "2.3.4" }), { + status: 200, + headers: { "content-type": "application/json" }, + })) as unknown as typeof fetch + + expect(await Installation.latest("scoop")).toBe("2.3.4") + }) + + test("reads chocolatey feed versions", async () => { + globalThis.fetch = (async () => + new Response( + JSON.stringify({ + d: { + results: [{ Version: "3.4.5" }], + }, + }), + { + status: 200, + headers: { "content-type": "application/json" }, + }, + )) as unknown as typeof fetch + + expect(await Installation.latest("choco")).toBe("3.4.5") + }) +}) diff --git a/packages/opencode/test/mcp/headers.test.ts b/packages/opencode/test/mcp/headers.test.ts index 69998aaaa8..8c488d4c4f 100644 --- a/packages/opencode/test/mcp/headers.test.ts +++ b/packages/opencode/test/mcp/headers.test.ts @@ -53,7 +53,7 @@ test("headers are passed to transports when oauth is enabled (default)", async ( await Bun.write( `${dir}/opencode.json`, JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", mcp: { "test-server": { type: "remote", diff --git a/packages/opencode/test/mcp/oauth-auto-connect.test.ts b/packages/opencode/test/mcp/oauth-auto-connect.test.ts new file mode 100644 index 0000000000..f5aa56fe91 --- /dev/null +++ b/packages/opencode/test/mcp/oauth-auto-connect.test.ts @@ -0,0 +1,199 @@ +import { test, expect, mock, beforeEach } from "bun:test" + +// Mock UnauthorizedError to match the SDK's class +class MockUnauthorizedError extends Error { + constructor(message?: string) { + super(message ?? "Unauthorized") + this.name = "UnauthorizedError" + } +} + +// Track what options were passed to each transport constructor +const transportCalls: Array<{ + type: "streamable" | "sse" + url: string + options: { authProvider?: unknown } +}> = [] + +// Controls whether the mock transport simulates a 401 that triggers the SDK +// auth flow (which calls provider.state()) or a simple UnauthorizedError. +let simulateAuthFlow = true + +// Mock the transport constructors to simulate OAuth auto-auth on 401 +mock.module("@modelcontextprotocol/sdk/client/streamableHttp.js", () => ({ + StreamableHTTPClientTransport: class MockStreamableHTTP { + authProvider: + | { + state?: () => Promise<string> + redirectToAuthorization?: (url: URL) => Promise<void> + saveCodeVerifier?: (v: string) => Promise<void> + } + | undefined + constructor(url: URL, options?: { authProvider?: unknown }) { + this.authProvider = options?.authProvider as typeof this.authProvider + transportCalls.push({ + type: "streamable", + url: url.toString(), + options: options ?? {}, + }) + } + async start() { + // Simulate what the real SDK transport does on 401: + // It calls auth() which eventually calls provider.state(), then + // provider.redirectToAuthorization(), then throws UnauthorizedError. + if (simulateAuthFlow && this.authProvider) { + // The SDK calls provider.state() to get the OAuth state parameter + if (this.authProvider.state) { + await this.authProvider.state() + } + // The SDK calls saveCodeVerifier before redirecting + if (this.authProvider.saveCodeVerifier) { + await this.authProvider.saveCodeVerifier("test-verifier") + } + // The SDK calls redirectToAuthorization to redirect the user + if (this.authProvider.redirectToAuthorization) { + await this.authProvider.redirectToAuthorization(new URL("https://auth.example.com/authorize?state=test")) + } + throw new MockUnauthorizedError() + } + throw new MockUnauthorizedError() + } + async finishAuth(_code: string) {} + }, +})) + +mock.module("@modelcontextprotocol/sdk/client/sse.js", () => ({ + SSEClientTransport: class MockSSE { + constructor(url: URL, options?: { authProvider?: unknown }) { + transportCalls.push({ + type: "sse", + url: url.toString(), + options: options ?? {}, + }) + } + async start() { + throw new Error("Mock SSE transport cannot connect") + } + }, +})) + +// Mock the MCP SDK Client +mock.module("@modelcontextprotocol/sdk/client/index.js", () => ({ + Client: class MockClient { + async connect(transport: { start: () => Promise<void> }) { + await transport.start() + } + }, +})) + +// Mock UnauthorizedError in the auth module so instanceof checks work +mock.module("@modelcontextprotocol/sdk/client/auth.js", () => ({ + UnauthorizedError: MockUnauthorizedError, +})) + +beforeEach(() => { + transportCalls.length = 0 + simulateAuthFlow = true +}) + +// Import modules after mocking +const { MCP } = await import("../../src/mcp/index") +const { Instance } = await import("../../src/project/instance") +const { tmpdir } = await import("../fixture/fixture") + +test("first connect to OAuth server shows needs_auth instead of failed", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + `${dir}/opencode.json`, + JSON.stringify({ + $schema: "https://altimate.ai/config.json", + mcp: { + "test-oauth": { + type: "remote", + url: "https://example.com/mcp", + }, + }, + }), + ) + }, + }) + + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const result = await MCP.add("test-oauth", { + type: "remote", + url: "https://example.com/mcp", + }) + + const serverStatus = result.status as Record<string, { status: string; error?: string }> + + // The server should be detected as needing auth, NOT as failed. + // Before the fix, provider.state() would throw a plain Error + // ("No OAuth state saved for MCP server: test-oauth") which was + // not caught as UnauthorizedError, causing status to be "failed". + expect(serverStatus["test-oauth"]).toBeDefined() + expect(serverStatus["test-oauth"].status).toBe("needs_auth") + }, + }) +}) + +test("state() generates a new state when none is saved", async () => { + const { McpOAuthProvider } = await import("../../src/mcp/oauth-provider") + const { McpAuth } = await import("../../src/mcp/auth") + + await using tmp = await tmpdir() + + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const provider = new McpOAuthProvider( + "test-state-gen", + "https://example.com/mcp", + {}, + { onRedirect: async () => {} }, + ) + + // Ensure no state exists + const entryBefore = await McpAuth.get("test-state-gen") + expect(entryBefore?.oauthState).toBeUndefined() + + // state() should generate and return a new state, not throw + const state = await provider.state() + expect(typeof state).toBe("string") + expect(state.length).toBe(64) // 32 bytes as hex + + // The generated state should be persisted + const entryAfter = await McpAuth.get("test-state-gen") + expect(entryAfter?.oauthState).toBe(state) + }, + }) +}) + +test("state() returns existing state when one is saved", async () => { + const { McpOAuthProvider } = await import("../../src/mcp/oauth-provider") + const { McpAuth } = await import("../../src/mcp/auth") + + await using tmp = await tmpdir() + + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const provider = new McpOAuthProvider( + "test-state-existing", + "https://example.com/mcp", + {}, + { onRedirect: async () => {} }, + ) + + // Pre-save a state + const existingState = "pre-saved-state-value" + await McpAuth.updateOAuthState("test-state-existing", existingState) + + // state() should return the existing state + const state = await provider.state() + expect(state).toBe(existingState) + }, + }) +}) diff --git a/packages/opencode/test/mcp/oauth-browser.test.ts b/packages/opencode/test/mcp/oauth-browser.test.ts index ee4429be75..801fc3e953 100644 --- a/packages/opencode/test/mcp/oauth-browser.test.ts +++ b/packages/opencode/test/mcp/oauth-browser.test.ts @@ -111,7 +111,7 @@ test("BrowserOpenFailed event is published when open() throws", async () => { await Bun.write( `${dir}/opencode.json`, JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", mcp: { "test-oauth-server": { type: "remote", @@ -162,7 +162,7 @@ test("BrowserOpenFailed event is NOT published when open() succeeds", async () = await Bun.write( `${dir}/opencode.json`, JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", mcp: { "test-oauth-server-2": { type: "remote", @@ -211,7 +211,7 @@ test("open() is called with the authorization URL", async () => { await Bun.write( `${dir}/opencode.json`, JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", mcp: { "test-oauth-server-3": { type: "remote", diff --git a/packages/opencode/test/memory/abort-leak.test.ts b/packages/opencode/test/memory/abort-leak.test.ts index b202c9127a..eebb651a53 100644 --- a/packages/opencode/test/memory/abort-leak.test.ts +++ b/packages/opencode/test/memory/abort-leak.test.ts @@ -2,12 +2,13 @@ import { describe, test, expect } from "bun:test" import path from "path" import { Instance } from "../../src/project/instance" import { WebFetchTool } from "../../src/tool/webfetch" +import { SessionID, MessageID } from "../../src/session/schema" const projectRoot = path.join(__dirname, "../..") const ctx = { - sessionID: "test", - messageID: "", + sessionID: SessionID.make("ses_test"), + messageID: MessageID.make(""), callID: "", agent: "build", abort: new AbortController().signal, diff --git a/packages/opencode/test/permission/next.test.ts b/packages/opencode/test/permission/next.test.ts index add3332048..7fd0818991 100644 --- a/packages/opencode/test/permission/next.test.ts +++ b/packages/opencode/test/permission/next.test.ts @@ -1,8 +1,10 @@ import { test, expect } from "bun:test" import os from "os" import { PermissionNext } from "../../src/permission/next" +import { PermissionID } from "../../src/permission/schema" import { Instance } from "../../src/project/instance" import { tmpdir } from "../fixture/fixture" +import { SessionID } from "../../src/session/schema" // fromConfig tests @@ -462,7 +464,7 @@ test("ask - resolves immediately when action is allow", async () => { directory: tmp.path, fn: async () => { const result = await PermissionNext.ask({ - sessionID: "session_test", + sessionID: SessionID.make("session_test"), permission: "bash", patterns: ["ls"], metadata: {}, @@ -481,7 +483,7 @@ test("ask - throws RejectedError when action is deny", async () => { fn: async () => { await expect( PermissionNext.ask({ - sessionID: "session_test", + sessionID: SessionID.make("session_test"), permission: "bash", patterns: ["rm -rf /"], metadata: {}, @@ -499,7 +501,7 @@ test("ask - returns pending promise when action is ask", async () => { directory: tmp.path, fn: async () => { const promise = PermissionNext.ask({ - sessionID: "session_test", + sessionID: SessionID.make("session_test"), permission: "bash", patterns: ["ls"], metadata: {}, @@ -521,8 +523,8 @@ test("reply - once resolves the pending ask", async () => { directory: tmp.path, fn: async () => { const askPromise = PermissionNext.ask({ - id: "permission_test1", - sessionID: "session_test", + id: PermissionID.make("per_test1"), + sessionID: SessionID.make("session_test"), permission: "bash", patterns: ["ls"], metadata: {}, @@ -531,7 +533,7 @@ test("reply - once resolves the pending ask", async () => { }) await PermissionNext.reply({ - requestID: "permission_test1", + requestID: PermissionID.make("per_test1"), reply: "once", }) @@ -546,8 +548,8 @@ test("reply - reject throws RejectedError", async () => { directory: tmp.path, fn: async () => { const askPromise = PermissionNext.ask({ - id: "permission_test2", - sessionID: "session_test", + id: PermissionID.make("per_test2"), + sessionID: SessionID.make("session_test"), permission: "bash", patterns: ["ls"], metadata: {}, @@ -556,7 +558,7 @@ test("reply - reject throws RejectedError", async () => { }) await PermissionNext.reply({ - requestID: "permission_test2", + requestID: PermissionID.make("per_test2"), reply: "reject", }) @@ -571,8 +573,8 @@ test("reply - always persists approval and resolves", async () => { directory: tmp.path, fn: async () => { const askPromise = PermissionNext.ask({ - id: "permission_test3", - sessionID: "session_test", + id: PermissionID.make("per_test3"), + sessionID: SessionID.make("session_test"), permission: "bash", patterns: ["ls"], metadata: {}, @@ -581,7 +583,7 @@ test("reply - always persists approval and resolves", async () => { }) await PermissionNext.reply({ - requestID: "permission_test3", + requestID: PermissionID.make("per_test3"), reply: "always", }) @@ -594,7 +596,7 @@ test("reply - always persists approval and resolves", async () => { fn: async () => { // Stored approval should allow without asking const result = await PermissionNext.ask({ - sessionID: "session_test2", + sessionID: SessionID.make("session_test2"), permission: "bash", patterns: ["ls"], metadata: {}, @@ -612,8 +614,8 @@ test("reply - reject cancels all pending for same session", async () => { directory: tmp.path, fn: async () => { const askPromise1 = PermissionNext.ask({ - id: "permission_test4a", - sessionID: "session_same", + id: PermissionID.make("per_test4a"), + sessionID: SessionID.make("session_same"), permission: "bash", patterns: ["ls"], metadata: {}, @@ -622,8 +624,8 @@ test("reply - reject cancels all pending for same session", async () => { }) const askPromise2 = PermissionNext.ask({ - id: "permission_test4b", - sessionID: "session_same", + id: PermissionID.make("per_test4b"), + sessionID: SessionID.make("session_same"), permission: "edit", patterns: ["foo.ts"], metadata: {}, @@ -637,7 +639,7 @@ test("reply - reject cancels all pending for same session", async () => { // Reject the first one await PermissionNext.reply({ - requestID: "permission_test4a", + requestID: PermissionID.make("per_test4a"), reply: "reject", }) @@ -655,7 +657,7 @@ test("ask - checks all patterns and stops on first deny", async () => { fn: async () => { await expect( PermissionNext.ask({ - sessionID: "session_test", + sessionID: SessionID.make("session_test"), permission: "bash", patterns: ["echo hello", "rm -rf /"], metadata: {}, @@ -676,7 +678,7 @@ test("ask - allows all patterns when all match allow rules", async () => { directory: tmp.path, fn: async () => { const result = await PermissionNext.ask({ - sessionID: "session_test", + sessionID: SessionID.make("session_test"), permission: "bash", patterns: ["echo hello", "ls -la", "pwd"], metadata: {}, diff --git a/packages/opencode/test/preload.ts b/packages/opencode/test/preload.ts index caac3bb0de..1ebd273d26 100644 --- a/packages/opencode/test/preload.ts +++ b/packages/opencode/test/preload.ts @@ -50,7 +50,7 @@ const cacheDir = path.join(dir, "cache", "opencode") await fs.mkdir(cacheDir, { recursive: true }) await fs.writeFile(path.join(cacheDir, "version"), "14") -// Clear provider env vars to ensure clean test state +// Clear provider and server auth env vars to ensure clean test state delete process.env["ANTHROPIC_API_KEY"] delete process.env["OPENAI_API_KEY"] delete process.env["GOOGLE_API_KEY"] @@ -70,6 +70,8 @@ delete process.env["DEEPSEEK_API_KEY"] delete process.env["FIREWORKS_API_KEY"] delete process.env["CEREBRAS_API_KEY"] delete process.env["SAMBANOVA_API_KEY"] +delete process.env["OPENCODE_SERVER_PASSWORD"] +delete process.env["OPENCODE_SERVER_USERNAME"] // Now safe to import from src/ const { Log } = await import("../src/util/log") diff --git a/packages/opencode/test/project/migrate-global.test.ts b/packages/opencode/test/project/migrate-global.test.ts new file mode 100644 index 0000000000..b66653f700 --- /dev/null +++ b/packages/opencode/test/project/migrate-global.test.ts @@ -0,0 +1,140 @@ +import { describe, expect, test } from "bun:test" +import { Project } from "../../src/project/project" +import { Database, eq } from "../../src/storage/db" +import { SessionTable } from "../../src/session/session.sql" +import { ProjectTable } from "../../src/project/project.sql" +import { ProjectID } from "../../src/project/schema" +import { SessionID } from "../../src/session/schema" +import { Log } from "../../src/util/log" +import { $ } from "bun" +import { tmpdir } from "../fixture/fixture" + +Log.init({ print: false }) + +function uid() { + return SessionID.make(crypto.randomUUID()) +} + +function seed(opts: { id: SessionID; dir: string; project: ProjectID }) { + const now = Date.now() + Database.use((db) => + db + .insert(SessionTable) + .values({ + id: opts.id, + project_id: opts.project, + slug: opts.id, + directory: opts.dir, + title: "test", + version: "0.0.0-test", + time_created: now, + time_updated: now, + }) + .run(), + ) +} + +function ensureGlobal() { + Database.use((db) => + db + .insert(ProjectTable) + .values({ + id: ProjectID.global, + worktree: "/", + time_created: Date.now(), + time_updated: Date.now(), + sandboxes: [], + }) + .onConflictDoNothing() + .run(), + ) +} + +describe("migrateFromGlobal", () => { + test("migrates global sessions on first project creation", async () => { + // 1. Start with git init but no commits — creates "global" project row + await using tmp = await tmpdir() + await $`git init`.cwd(tmp.path).quiet() + await $`git config user.name "Test"`.cwd(tmp.path).quiet() + await $`git config user.email "test@opencode.test"`.cwd(tmp.path).quiet() + const { project: pre } = await Project.fromDirectory(tmp.path) + expect(pre.id).toBe(ProjectID.global) + + // 2. Seed a session under "global" with matching directory + const id = uid() + seed({ id, dir: tmp.path, project: ProjectID.global }) + + // 3. Make a commit so the project gets a real ID + await $`git commit --allow-empty -m "root"`.cwd(tmp.path).quiet() + + const { project: real } = await Project.fromDirectory(tmp.path) + expect(real.id).not.toBe(ProjectID.global) + + // 4. The session should have been migrated to the real project ID + const row = Database.use((db) => db.select().from(SessionTable).where(eq(SessionTable.id, id)).get()) + expect(row).toBeDefined() + expect(row!.project_id).toBe(real.id) + }) + + test("migrates global sessions even when project row already exists", async () => { + // 1. Create a repo with a commit — real project ID created immediately + await using tmp = await tmpdir({ git: true }) + const { project } = await Project.fromDirectory(tmp.path) + expect(project.id).not.toBe(ProjectID.global) + + // 2. Ensure "global" project row exists (as it would from a prior no-git session) + ensureGlobal() + + // 3. Seed a session under "global" with matching directory. + // This simulates a session created before git init that wasn't + // present when the real project row was first created. + const id = uid() + seed({ id, dir: tmp.path, project: ProjectID.global }) + + // 4. Call fromDirectory again — project row already exists, + // so the current code skips migration entirely. This is the bug. + await Project.fromDirectory(tmp.path) + + const row = Database.use((db) => db.select().from(SessionTable).where(eq(SessionTable.id, id)).get()) + expect(row).toBeDefined() + expect(row!.project_id).toBe(project.id) + }) + + test("does not claim sessions with empty directory", async () => { + await using tmp = await tmpdir({ git: true }) + const { project } = await Project.fromDirectory(tmp.path) + expect(project.id).not.toBe(ProjectID.global) + + ensureGlobal() + + // Legacy sessions may lack a directory value. + // Without a matching origin directory, they should remain global. + const id = uid() + seed({ id, dir: "", project: ProjectID.global }) + + await Project.fromDirectory(tmp.path) + + const row = Database.use((db) => db.select().from(SessionTable).where(eq(SessionTable.id, id)).get()) + expect(row).toBeDefined() + expect(row!.project_id).toBe(ProjectID.global) + }) + + test("does not steal sessions from unrelated directories", async () => { + await using tmp = await tmpdir({ git: true }) + const { project } = await Project.fromDirectory(tmp.path) + expect(project.id).not.toBe(ProjectID.global) + + ensureGlobal() + + // Seed a session under "global" but for a DIFFERENT directory + const id = uid() + seed({ id, dir: "/some/other/dir", project: ProjectID.global }) + + await Project.fromDirectory(tmp.path) + + const row = Database.use((db) => db.select().from(SessionTable).where(eq(SessionTable.id, id)).get()) + expect(row).toBeDefined() + // Should remain under "global" — not stolen + expect(row!.project_id).toBe(ProjectID.global) + }) +}) diff --git a/packages/opencode/test/project/project.test.ts b/packages/opencode/test/project/project.test.ts index 72008f8b67..52f98dd0e2 100644 --- a/packages/opencode/test/project/project.test.ts +++ b/packages/opencode/test/project/project.test.ts @@ -7,6 +7,7 @@ import fs from "fs/promises" import { tmpdir } from "../fixture/fixture" import { Filesystem } from "../../src/util/filesystem" import { GlobalBus } from "../../src/bus/global" +import { ProjectID } from "../../src/project/schema" Log.init({ print: false }) @@ -75,11 +76,11 @@ describe("Project.fromDirectory", () => { const { project } = await p.fromDirectory(tmp.path) expect(project).toBeDefined() - expect(project.id).toBe("global") + expect(project.id).toBe(ProjectID.global) expect(project.vcs).toBe("git") expect(project.worktree).toBe(tmp.path) - const opencodeFile = path.join(tmp.path, ".git", "altimate") + const opencodeFile = path.join(tmp.path, ".git", "opencode") const fileExists = await Filesystem.exists(opencodeFile) expect(fileExists).toBe(false) }) @@ -91,11 +92,11 @@ describe("Project.fromDirectory", () => { const { project } = await p.fromDirectory(tmp.path) expect(project).toBeDefined() - expect(project.id).not.toBe("global") + expect(project.id).not.toBe(ProjectID.global) expect(project.vcs).toBe("git") expect(project.worktree).toBe(tmp.path) - const opencodeFile = path.join(tmp.path, ".git", "altimate") + const opencodeFile = path.join(tmp.path, ".git", "opencode") const fileExists = await Filesystem.exists(opencodeFile) expect(fileExists).toBe(true) }) @@ -104,11 +105,11 @@ describe("Project.fromDirectory", () => { const p = await loadProject() await using tmp = await tmpdir({ git: true }) - // First call creates .git/altimate with the project id + // First call creates .git/opencode with the project id const { project: first } = await p.fromDirectory(tmp.path) expect(first.id).not.toBe("global") - const newFile = path.join(tmp.path, ".git", "altimate") + const newFile = path.join(tmp.path, ".git", "opencode") const legacyFile = path.join(tmp.path, ".git", "altimate-code") // Move the new file to the legacy location to simulate an old installation @@ -129,7 +130,7 @@ describe("Project.fromDirectory", () => { await withMode("rev-list-fail", async () => { const { project } = await p.fromDirectory(tmp.path) expect(project.vcs).toBe("git") - expect(project.id).toBe("global") + expect(project.id).toBe(ProjectID.global) expect(project.worktree).toBe(tmp.path) }) }) @@ -323,7 +324,7 @@ describe("Project.update", () => { await expect( Project.update({ - projectID: "nonexistent-project-id", + projectID: ProjectID.make("nonexistent-project-id"), name: "Should Fail", }), ).rejects.toThrow("Project not found: nonexistent-project-id") diff --git a/packages/opencode/test/project/state.test.ts b/packages/opencode/test/project/state.test.ts new file mode 100644 index 0000000000..c1a6dab315 --- /dev/null +++ b/packages/opencode/test/project/state.test.ts @@ -0,0 +1,115 @@ +import { afterEach, expect, test } from "bun:test" + +import { Instance } from "../../src/project/instance" +import { tmpdir } from "../fixture/fixture" + +afterEach(async () => { + await Instance.disposeAll() +}) + +test("Instance.state caches values for the same instance", async () => { + await using tmp = await tmpdir() + let n = 0 + const state = Instance.state(() => ({ n: ++n })) + + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const a = state() + const b = state() + expect(a).toBe(b) + expect(n).toBe(1) + }, + }) +}) + +test("Instance.state isolates values by directory", async () => { + await using a = await tmpdir() + await using b = await tmpdir() + let n = 0 + const state = Instance.state(() => ({ n: ++n })) + + const x = await Instance.provide({ + directory: a.path, + fn: async () => state(), + }) + const y = await Instance.provide({ + directory: b.path, + fn: async () => state(), + }) + const z = await Instance.provide({ + directory: a.path, + fn: async () => state(), + }) + + expect(x).toBe(z) + expect(x).not.toBe(y) + expect(n).toBe(2) +}) + +test("Instance.state is disposed on instance reload", async () => { + await using tmp = await tmpdir() + const seen: string[] = [] + let n = 0 + const state = Instance.state( + () => ({ n: ++n }), + async (value) => { + seen.push(String(value.n)) + }, + ) + + const a = await Instance.provide({ + directory: tmp.path, + fn: async () => state(), + }) + await Instance.reload({ directory: tmp.path }) + const b = await Instance.provide({ + directory: tmp.path, + fn: async () => state(), + }) + + expect(a).not.toBe(b) + expect(seen).toEqual(["1"]) +}) + +test("Instance.state is disposed on disposeAll", async () => { + await using a = await tmpdir() + await using b = await tmpdir() + const seen: string[] = [] + const state = Instance.state( + () => ({ dir: Instance.directory }), + async (value) => { + seen.push(value.dir) + }, + ) + + await Instance.provide({ + directory: a.path, + fn: async () => state(), + }) + await Instance.provide({ + directory: b.path, + fn: async () => state(), + }) + await Instance.disposeAll() + + expect(seen.sort()).toEqual([a.path, b.path].sort()) +}) + +test("Instance.state dedupes concurrent promise initialization", async () => { + await using tmp = await tmpdir() + let n = 0 + const state = Instance.state(async () => { + n += 1 + await Bun.sleep(10) + return { n } + }) + + const [a, b] = await Instance.provide({ + directory: tmp.path, + fn: async () => Promise.all([state(), state()]), + }) + + expect(a).toBe(b) + expect(n).toBe(1) +}) diff --git a/packages/opencode/test/provider/amazon-bedrock.test.ts b/packages/opencode/test/provider/amazon-bedrock.test.ts index cb64455b4d..605e964e1e 100644 --- a/packages/opencode/test/provider/amazon-bedrock.test.ts +++ b/packages/opencode/test/provider/amazon-bedrock.test.ts @@ -15,7 +15,7 @@ test("Bedrock: config region takes precedence over AWS_REGION env var", async () await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "amazon-bedrock": { options: { @@ -47,7 +47,7 @@ test("Bedrock: falls back to AWS_REGION env var when no config region", async () await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -72,7 +72,7 @@ test("Bedrock: loads when bearer token from auth.json is present", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "amazon-bedrock": { options: { @@ -140,7 +140,7 @@ test("Bedrock: config profile takes precedence over AWS_PROFILE env var", async await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "amazon-bedrock": { options: { @@ -173,7 +173,7 @@ test("Bedrock: includes custom endpoint in options when specified", async () => await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "amazon-bedrock": { options: { @@ -206,7 +206,7 @@ test("Bedrock: autoloads when AWS_WEB_IDENTITY_TOKEN_FILE is present", async () await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "amazon-bedrock": { options: { @@ -244,7 +244,7 @@ test("Bedrock: model with us. prefix should not be double-prefixed", async () => await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "amazon-bedrock": { options: { @@ -281,7 +281,7 @@ test("Bedrock: model with global. prefix should not be prefixed", async () => { await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "amazon-bedrock": { options: { @@ -317,7 +317,7 @@ test("Bedrock: model with eu. prefix should not be double-prefixed", async () => await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "amazon-bedrock": { options: { @@ -353,7 +353,7 @@ test("Bedrock: model without prefix in US region should get us. prefix added", a await Filesystem.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "amazon-bedrock": { options: { diff --git a/packages/opencode/test/provider/auth.test.ts b/packages/opencode/test/provider/auth.test.ts new file mode 100644 index 0000000000..99babd44a6 --- /dev/null +++ b/packages/opencode/test/provider/auth.test.ts @@ -0,0 +1,20 @@ +import { afterEach, expect, test } from "bun:test" +import { Auth } from "../../src/auth" +import { ProviderAuth } from "../../src/provider/auth" +import { ProviderID } from "../../src/provider/schema" + +afterEach(async () => { + await Auth.remove("test-provider-auth") +}) + +test("ProviderAuth.api persists auth via AuthService", async () => { + await ProviderAuth.api({ + providerID: ProviderID.make("test-provider-auth"), + key: "sk-test", + }) + + expect(await Auth.get("test-provider-auth")).toEqual({ + type: "api", + key: "sk-test", + }) +}) diff --git a/packages/opencode/test/provider/gitlab-duo.test.ts b/packages/opencode/test/provider/gitlab-duo.test.ts index c512a45909..3fc8d72a6f 100644 --- a/packages/opencode/test/provider/gitlab-duo.test.ts +++ b/packages/opencode/test/provider/gitlab-duo.test.ts @@ -13,7 +13,7 @@ test("GitLab Duo: loads provider with API key from environment", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -37,7 +37,7 @@ test("GitLab Duo: config instanceUrl option sets baseURL", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { gitlab: { options: { @@ -69,7 +69,7 @@ test("GitLab Duo: loads with OAuth token from auth.json", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -106,7 +106,7 @@ test("GitLab Duo: loads with Personal Access Token from auth.json", async () => await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -142,7 +142,7 @@ test("GitLab Duo: supports self-hosted instance configuration", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { gitlab: { options: { @@ -174,7 +174,7 @@ test("GitLab Duo: config apiKey takes precedence over environment variable", asy await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { gitlab: { options: { @@ -198,13 +198,37 @@ test("GitLab Duo: config apiKey takes precedence over environment variable", asy }) }) +test("GitLab Duo: includes context-1m beta header in aiGatewayHeaders", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ + $schema: "https://altimate.ai/config.json", + }), + ) + }, + }) + await Instance.provide({ + directory: tmp.path, + init: async () => { + Env.set("GITLAB_TOKEN", "test-token") + }, + fn: async () => { + const providers = await Provider.list() + expect(providers["gitlab"]).toBeDefined() + expect(providers["gitlab"].options?.aiGatewayHeaders?.["anthropic-beta"]).toContain("context-1m-2025-08-07") + }, + }) +}) + test("GitLab Duo: supports feature flags configuration", async () => { await using tmp = await tmpdir({ init: async (dir) => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { gitlab: { options: { @@ -239,7 +263,7 @@ test("GitLab Duo: has multiple agentic chat models available", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, diff --git a/packages/opencode/test/provider/provider.test.ts b/packages/opencode/test/provider/provider.test.ts index 11c943db6f..7171193a5a 100644 --- a/packages/opencode/test/provider/provider.test.ts +++ b/packages/opencode/test/provider/provider.test.ts @@ -4,6 +4,7 @@ import path from "path" import { tmpdir } from "../fixture/fixture" import { Instance } from "../../src/project/instance" import { Provider } from "../../src/provider/provider" +import { ProviderID, ModelID } from "../../src/provider/schema" import { Env } from "../../src/env" test("provider loaded from env variable", async () => { @@ -12,7 +13,7 @@ test("provider loaded from env variable", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -39,7 +40,7 @@ test("provider loaded from config with apiKey option", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { options: { @@ -66,7 +67,7 @@ test("disabled_providers excludes provider", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", disabled_providers: ["anthropic"], }), ) @@ -90,7 +91,7 @@ test("enabled_providers restricts to only listed providers", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", enabled_providers: ["anthropic"], }), ) @@ -116,7 +117,7 @@ test("model whitelist filters models for provider", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { whitelist: ["claude-sonnet-4-20250514"], @@ -147,7 +148,7 @@ test("model blacklist excludes specific models", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { blacklist: ["claude-sonnet-4-20250514"], @@ -177,7 +178,7 @@ test("custom model alias via config", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { models: { @@ -212,7 +213,7 @@ test("custom provider with npm package", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "custom-provider": { name: "Custom Provider", @@ -255,11 +256,12 @@ test("env variable takes precedence, config merges options", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { options: { timeout: 60000, + chunkTimeout: 15000, }, }, }, @@ -277,6 +279,7 @@ test("env variable takes precedence, config merges options", async () => { expect(providers["anthropic"]).toBeDefined() // Config options should be merged expect(providers["anthropic"].options.timeout).toBe(60000) + expect(providers["anthropic"].options.chunkTimeout).toBe(15000) }, }) }) @@ -287,7 +290,7 @@ test("getModel returns model for valid provider/model", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -298,10 +301,10 @@ test("getModel returns model for valid provider/model", async () => { Env.set("ANTHROPIC_API_KEY", "test-api-key") }, fn: async () => { - const model = await Provider.getModel("anthropic", "claude-sonnet-4-20250514") + const model = await Provider.getModel(ProviderID.anthropic, ModelID.make("claude-sonnet-4-20250514")) expect(model).toBeDefined() - expect(model.providerID).toBe("anthropic") - expect(model.id).toBe("claude-sonnet-4-20250514") + expect(String(model.providerID)).toBe("anthropic") + expect(String(model.id)).toBe("claude-sonnet-4-20250514") const language = await Provider.getLanguage(model) expect(language).toBeDefined() }, @@ -314,7 +317,7 @@ test("getModel throws ModelNotFoundError for invalid model", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -325,7 +328,7 @@ test("getModel throws ModelNotFoundError for invalid model", async () => { Env.set("ANTHROPIC_API_KEY", "test-api-key") }, fn: async () => { - expect(Provider.getModel("anthropic", "nonexistent-model")).rejects.toThrow() + expect(Provider.getModel(ProviderID.anthropic, ModelID.make("nonexistent-model"))).rejects.toThrow() }, }) }) @@ -336,7 +339,7 @@ test("getModel throws ModelNotFoundError for invalid provider", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -344,21 +347,21 @@ test("getModel throws ModelNotFoundError for invalid provider", async () => { await Instance.provide({ directory: tmp.path, fn: async () => { - expect(Provider.getModel("nonexistent-provider", "some-model")).rejects.toThrow() + expect(Provider.getModel(ProviderID.make("nonexistent-provider"), ModelID.make("some-model"))).rejects.toThrow() }, }) }) test("parseModel correctly parses provider/model string", () => { const result = Provider.parseModel("anthropic/claude-sonnet-4") - expect(result.providerID).toBe("anthropic") - expect(result.modelID).toBe("claude-sonnet-4") + expect(String(result.providerID)).toBe("anthropic") + expect(String(result.modelID)).toBe("claude-sonnet-4") }) test("parseModel handles model IDs with slashes", () => { const result = Provider.parseModel("openrouter/anthropic/claude-3-opus") - expect(result.providerID).toBe("openrouter") - expect(result.modelID).toBe("anthropic/claude-3-opus") + expect(String(result.providerID)).toBe("openrouter") + expect(String(result.modelID)).toBe("anthropic/claude-3-opus") }) test("defaultModel returns first available model when no config set", async () => { @@ -367,7 +370,7 @@ test("defaultModel returns first available model when no config set", async () = await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -391,7 +394,7 @@ test("defaultModel respects config model setting", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", model: "anthropic/claude-sonnet-4-20250514", }), ) @@ -404,8 +407,8 @@ test("defaultModel respects config model setting", async () => { }, fn: async () => { const model = await Provider.defaultModel() - expect(model.providerID).toBe("anthropic") - expect(model.modelID).toBe("claude-sonnet-4-20250514") + expect(String(model.providerID)).toBe("anthropic") + expect(String(model.modelID)).toBe("claude-sonnet-4-20250514") }, }) }) @@ -416,7 +419,7 @@ test("provider with baseURL from config", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "custom-openai": { name: "Custom OpenAI", @@ -455,7 +458,7 @@ test("model cost defaults to zero when not specified", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "test-provider": { name: "Test Provider", @@ -496,7 +499,7 @@ test("model options are merged from existing model", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { models: { @@ -531,7 +534,7 @@ test("provider removed when all models filtered out", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { whitelist: ["nonexistent-model"], @@ -559,7 +562,7 @@ test("closest finds model by partial match", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -570,10 +573,10 @@ test("closest finds model by partial match", async () => { Env.set("ANTHROPIC_API_KEY", "test-api-key") }, fn: async () => { - const result = await Provider.closest("anthropic", ["sonnet-4"]) + const result = await Provider.closest(ProviderID.anthropic, ["sonnet-4"]) expect(result).toBeDefined() - expect(result?.providerID).toBe("anthropic") - expect(result?.modelID).toContain("sonnet-4") + expect(String(result?.providerID)).toBe("anthropic") + expect(String(result?.modelID)).toContain("sonnet-4") }, }) }) @@ -584,7 +587,7 @@ test("closest returns undefined for nonexistent provider", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -592,7 +595,7 @@ test("closest returns undefined for nonexistent provider", async () => { await Instance.provide({ directory: tmp.path, fn: async () => { - const result = await Provider.closest("nonexistent", ["model"]) + const result = await Provider.closest(ProviderID.make("nonexistent"), ["model"]) expect(result).toBeUndefined() }, }) @@ -604,7 +607,7 @@ test("getModel uses realIdByKey for aliased models", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { models: { @@ -628,9 +631,9 @@ test("getModel uses realIdByKey for aliased models", async () => { const providers = await Provider.list() expect(providers["anthropic"].models["my-sonnet"]).toBeDefined() - const model = await Provider.getModel("anthropic", "my-sonnet") + const model = await Provider.getModel(ProviderID.anthropic, ModelID.make("my-sonnet")) expect(model).toBeDefined() - expect(model.id).toBe("my-sonnet") + expect(String(model.id)).toBe("my-sonnet") expect(model.name).toBe("My Sonnet Alias") }, }) @@ -642,7 +645,7 @@ test("provider api field sets model api.url", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "custom-api": { name: "Custom API", @@ -681,7 +684,7 @@ test("explicit baseURL overrides api field", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "custom-api": { name: "Custom API", @@ -720,7 +723,7 @@ test("model inherits properties from existing database model", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { models: { @@ -756,7 +759,7 @@ test("disabled_providers prevents loading even with env var", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", disabled_providers: ["openai"], }), ) @@ -780,7 +783,7 @@ test("enabled_providers with empty array allows no providers", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", enabled_providers: [], }), ) @@ -805,7 +808,7 @@ test("whitelist and blacklist can be combined", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { whitelist: ["claude-sonnet-4-20250514", "claude-opus-4-20250514"], @@ -838,7 +841,7 @@ test("model modalities default correctly", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "test-provider": { name: "Test", @@ -875,7 +878,7 @@ test("model with custom cost values", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "test-provider": { name: "Test", @@ -920,7 +923,7 @@ test("getSmallModel returns appropriate small model", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -931,7 +934,7 @@ test("getSmallModel returns appropriate small model", async () => { Env.set("ANTHROPIC_API_KEY", "test-api-key") }, fn: async () => { - const model = await Provider.getSmallModel("anthropic") + const model = await Provider.getSmallModel(ProviderID.anthropic) expect(model).toBeDefined() expect(model?.id).toContain("haiku") }, @@ -944,7 +947,7 @@ test("getSmallModel respects config small_model override", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", small_model: "anthropic/claude-sonnet-4-20250514", }), ) @@ -956,10 +959,10 @@ test("getSmallModel respects config small_model override", async () => { Env.set("ANTHROPIC_API_KEY", "test-api-key") }, fn: async () => { - const model = await Provider.getSmallModel("anthropic") + const model = await Provider.getSmallModel(ProviderID.anthropic) expect(model).toBeDefined() - expect(model?.providerID).toBe("anthropic") - expect(model?.id).toBe("claude-sonnet-4-20250514") + expect(String(model?.providerID)).toBe("anthropic") + expect(String(model?.id)).toBe("claude-sonnet-4-20250514") }, }) }) @@ -985,7 +988,7 @@ test("multiple providers can be configured simultaneously", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { options: { timeout: 30000 }, @@ -1020,7 +1023,7 @@ test("provider with custom npm package", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "local-llm": { name: "Local LLM", @@ -1062,7 +1065,7 @@ test("model alias name defaults to alias key when id differs", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { models: { @@ -1095,7 +1098,7 @@ test("provider with multiple env var options only includes apiKey when single en await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "multi-env": { name: "Multi Env Provider", @@ -1137,7 +1140,7 @@ test("provider with single env var includes apiKey automatically", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "single-env": { name: "Single Env Provider", @@ -1179,7 +1182,7 @@ test("model cost overrides existing cost values", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { models: { @@ -1216,7 +1219,7 @@ test("completely new provider not in database can be configured", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "brand-new-provider": { name: "Brand New", @@ -1266,7 +1269,7 @@ test("disabled_providers and enabled_providers interaction", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", // enabled_providers takes precedence - only these are considered enabled_providers: ["anthropic", "openai"], // Then disabled_providers filters from the enabled set @@ -1300,7 +1303,7 @@ test("model with tool_call false", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "no-tools": { name: "No Tools Provider", @@ -1335,7 +1338,7 @@ test("model defaults tool_call to true when not specified", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "default-tools": { name: "Default Tools Provider", @@ -1370,7 +1373,7 @@ test("model headers are preserved", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "headers-provider": { name: "Headers Provider", @@ -1413,7 +1416,7 @@ test("provider env fallback - second env var used if first missing", async () => await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "fallback-env": { name: "Fallback Env Provider", @@ -1453,7 +1456,7 @@ test("getModel returns consistent results", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -1464,8 +1467,8 @@ test("getModel returns consistent results", async () => { Env.set("ANTHROPIC_API_KEY", "test-api-key") }, fn: async () => { - const model1 = await Provider.getModel("anthropic", "claude-sonnet-4-20250514") - const model2 = await Provider.getModel("anthropic", "claude-sonnet-4-20250514") + const model1 = await Provider.getModel(ProviderID.anthropic, ModelID.make("claude-sonnet-4-20250514")) + const model2 = await Provider.getModel(ProviderID.anthropic, ModelID.make("claude-sonnet-4-20250514")) expect(model1.providerID).toEqual(model2.providerID) expect(model1.id).toEqual(model2.id) expect(model1).toEqual(model2) @@ -1479,7 +1482,7 @@ test("provider name defaults to id when not in database", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "my-custom-id": { // no name specified @@ -1514,7 +1517,7 @@ test("ModelNotFoundError includes suggestions for typos", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -1526,7 +1529,7 @@ test("ModelNotFoundError includes suggestions for typos", async () => { }, fn: async () => { try { - await Provider.getModel("anthropic", "claude-sonet-4") // typo: sonet instead of sonnet + await Provider.getModel(ProviderID.anthropic, ModelID.make("claude-sonet-4")) // typo: sonet instead of sonnet expect(true).toBe(false) // Should not reach here } catch (e: any) { expect(e.data.suggestions).toBeDefined() @@ -1542,7 +1545,7 @@ test("ModelNotFoundError for provider includes suggestions", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -1554,7 +1557,7 @@ test("ModelNotFoundError for provider includes suggestions", async () => { }, fn: async () => { try { - await Provider.getModel("antropic", "claude-sonnet-4") // typo: antropic + await Provider.getModel(ProviderID.make("antropic"), ModelID.make("claude-sonnet-4")) // typo: antropic expect(true).toBe(false) // Should not reach here } catch (e: any) { expect(e.data.suggestions).toBeDefined() @@ -1570,7 +1573,7 @@ test("getProvider returns undefined for nonexistent provider", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -1578,7 +1581,7 @@ test("getProvider returns undefined for nonexistent provider", async () => { await Instance.provide({ directory: tmp.path, fn: async () => { - const provider = await Provider.getProvider("nonexistent") + const provider = await Provider.getProvider(ProviderID.make("nonexistent")) expect(provider).toBeUndefined() }, }) @@ -1590,7 +1593,7 @@ test("getProvider returns provider info", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -1601,9 +1604,9 @@ test("getProvider returns provider info", async () => { Env.set("ANTHROPIC_API_KEY", "test-api-key") }, fn: async () => { - const provider = await Provider.getProvider("anthropic") + const provider = await Provider.getProvider(ProviderID.anthropic) expect(provider).toBeDefined() - expect(provider?.id).toBe("anthropic") + expect(String(provider?.id)).toBe("anthropic") }, }) }) @@ -1614,7 +1617,7 @@ test("closest returns undefined when no partial match found", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -1625,7 +1628,7 @@ test("closest returns undefined when no partial match found", async () => { Env.set("ANTHROPIC_API_KEY", "test-api-key") }, fn: async () => { - const result = await Provider.closest("anthropic", ["nonexistent-xyz-model"]) + const result = await Provider.closest(ProviderID.anthropic, ["nonexistent-xyz-model"]) expect(result).toBeUndefined() }, }) @@ -1637,7 +1640,7 @@ test("closest checks multiple query terms in order", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -1649,7 +1652,7 @@ test("closest checks multiple query terms in order", async () => { }, fn: async () => { // First term won't match, second will - const result = await Provider.closest("anthropic", ["nonexistent", "haiku"]) + const result = await Provider.closest(ProviderID.anthropic, ["nonexistent", "haiku"]) expect(result).toBeDefined() expect(result?.modelID).toContain("haiku") }, @@ -1662,7 +1665,7 @@ test("model limit defaults to zero when not specified", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "no-limit": { name: "No Limit Provider", @@ -1699,7 +1702,7 @@ test("provider options are deeply merged", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { options: { @@ -1736,7 +1739,7 @@ test("custom model inherits npm package from models.dev provider config", async await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { openai: { models: { @@ -1772,7 +1775,7 @@ test("custom model inherits api.url from models.dev provider", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { openrouter: { models: { @@ -1816,7 +1819,7 @@ test("model variants are generated for reasoning models", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -1843,7 +1846,7 @@ test("model variants can be disabled via config", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { models: { @@ -1881,7 +1884,7 @@ test("model variants can be customized via config", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { models: { @@ -1922,7 +1925,7 @@ test("disabled key is stripped from variant config", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { models: { @@ -1962,7 +1965,7 @@ test("all variants can be disabled via config", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { models: { @@ -1999,7 +2002,7 @@ test("variant config merges with generated variants", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { anthropic: { models: { @@ -2039,7 +2042,7 @@ test("variants filtered in second pass for database models", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { openai: { models: { @@ -2077,7 +2080,7 @@ test("custom model with variants enabled and disabled", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "custom-reasoning": { name: "Custom Reasoning Provider", @@ -2134,7 +2137,7 @@ test("Google Vertex: retains baseURL for custom proxy", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "vertex-proxy": { name: "Vertex Proxy", @@ -2178,7 +2181,7 @@ test("Google Vertex: supports OpenAI compatible models", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "vertex-openai": { name: "Vertex OpenAI", @@ -2225,7 +2228,7 @@ test("cloudflare-ai-gateway loads with env variables", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", }), ) }, @@ -2250,7 +2253,7 @@ test("cloudflare-ai-gateway forwards config metadata options", async () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", provider: { "cloudflare-ai-gateway": { options: { diff --git a/packages/opencode/test/provider/transform.test.ts b/packages/opencode/test/provider/transform.test.ts index 2329846351..43d2ef294f 100644 --- a/packages/opencode/test/provider/transform.test.ts +++ b/packages/opencode/test/provider/transform.test.ts @@ -1,5 +1,6 @@ import { describe, expect, test } from "bun:test" import { ProviderTransform } from "../../src/provider/transform" +import { ModelID, ProviderID } from "../../src/provider/schema" const OUTPUT_TOKEN_MAX = 32000 @@ -740,8 +741,8 @@ describe("ProviderTransform.message - DeepSeek reasoning content", () => { const result = ProviderTransform.message( msgs, { - id: "deepseek/deepseek-chat", - providerID: "deepseek", + id: ModelID.make("deepseek/deepseek-chat"), + providerID: ProviderID.make("deepseek"), api: { id: "deepseek-chat", url: "https://api.deepseek.com", @@ -802,8 +803,8 @@ describe("ProviderTransform.message - DeepSeek reasoning content", () => { const result = ProviderTransform.message( msgs, { - id: "openai/gpt-4", - providerID: "openai", + id: ModelID.make("openai/gpt-4"), + providerID: ProviderID.make("openai"), api: { id: "gpt-4", url: "https://api.openai.com", @@ -1095,6 +1096,38 @@ describe("ProviderTransform.message - anthropic empty content filtering", () => expect(result[0].content[1]).toEqual({ type: "text", text: "Result" }) }) + test("filters empty content for bedrock provider", () => { + const bedrockModel = { + ...anthropicModel, + id: "amazon-bedrock/anthropic.claude-opus-4-6", + providerID: "amazon-bedrock", + api: { + id: "anthropic.claude-opus-4-6", + url: "https://bedrock-runtime.us-east-1.amazonaws.com", + npm: "@ai-sdk/amazon-bedrock", + }, + } + + const msgs = [ + { role: "user", content: "Hello" }, + { role: "assistant", content: "" }, + { + role: "assistant", + content: [ + { type: "text", text: "" }, + { type: "text", text: "Answer" }, + ], + }, + ] as any[] + + const result = ProviderTransform.message(msgs, bedrockModel, {}) + + expect(result).toHaveLength(2) + expect(result[0].content).toBe("Hello") + expect(result[1].content).toHaveLength(1) + expect(result[1].content[0]).toEqual({ type: "text", text: "Answer" }) + }) + test("does not filter for non-anthropic providers", () => { const openaiModel = { ...anthropicModel, @@ -1310,7 +1343,7 @@ describe("ProviderTransform.message - strip openai metadata when store=false", ( providerID: "opencode", api: { id: "opencode-test", - url: "https://api.opencode.ai", + url: "https://api.altimate.ai", npm: "@ai-sdk/openai-compatible", }, } @@ -1344,7 +1377,7 @@ describe("ProviderTransform.message - strip openai metadata when store=false", ( providerID: "opencode", api: { id: "opencode-test", - url: "https://api.opencode.ai", + url: "https://api.altimate.ai", npm: "@ai-sdk/openai-compatible", }, } @@ -2002,6 +2035,35 @@ describe("ProviderTransform.variants", () => { const result = ProviderTransform.variants(model) expect(Object.keys(result)).toEqual(["low", "medium", "high", "xhigh"]) }) + + test("gpt-5.3-codex includes xhigh", () => { + const model = createMockModel({ + id: "gpt-5.3-codex", + providerID: "github-copilot", + api: { + id: "gpt-5.3-codex", + url: "https://api.githubcopilot.com", + npm: "@ai-sdk/github-copilot", + }, + }) + const result = ProviderTransform.variants(model) + expect(Object.keys(result)).toEqual(["low", "medium", "high", "xhigh"]) + }) + + test("gpt-5.4 includes xhigh", () => { + const model = createMockModel({ + id: "gpt-5.4", + release_date: "2026-03-05", + providerID: "github-copilot", + api: { + id: "gpt-5.4", + url: "https://api.githubcopilot.com", + npm: "@ai-sdk/github-copilot", + }, + }) + const result = ProviderTransform.variants(model) + expect(Object.keys(result)).toEqual(["low", "medium", "high", "xhigh"]) + }) }) describe("@ai-sdk/cerebras", () => { @@ -2450,4 +2512,144 @@ describe("ProviderTransform.variants", () => { expect(result).toEqual({}) }) }) + + describe("@jerome-benoit/sap-ai-provider-v2", () => { + test("anthropic models return thinking variants", () => { + const model = createMockModel({ + id: "sap-ai-core/anthropic--claude-sonnet-4", + providerID: "sap-ai-core", + api: { + id: "anthropic--claude-sonnet-4", + url: "https://api.ai.sap", + npm: "@jerome-benoit/sap-ai-provider-v2", + }, + }) + const result = ProviderTransform.variants(model) + expect(Object.keys(result)).toEqual(["high", "max"]) + expect(result.high).toEqual({ + thinking: { + type: "enabled", + budgetTokens: 16000, + }, + }) + expect(result.max).toEqual({ + thinking: { + type: "enabled", + budgetTokens: 31999, + }, + }) + }) + + test("anthropic 4.6 models return adaptive thinking variants", () => { + const model = createMockModel({ + id: "sap-ai-core/anthropic--claude-sonnet-4-6", + providerID: "sap-ai-core", + api: { + id: "anthropic--claude-sonnet-4-6", + url: "https://api.ai.sap", + npm: "@jerome-benoit/sap-ai-provider-v2", + }, + }) + const result = ProviderTransform.variants(model) + expect(Object.keys(result)).toEqual(["low", "medium", "high", "max"]) + expect(result.low).toEqual({ + thinking: { + type: "adaptive", + }, + effort: "low", + }) + expect(result.max).toEqual({ + thinking: { + type: "adaptive", + }, + effort: "max", + }) + }) + + test("gemini 2.5 models return thinkingConfig variants", () => { + const model = createMockModel({ + id: "sap-ai-core/gcp--gemini-2.5-pro", + providerID: "sap-ai-core", + api: { + id: "gcp--gemini-2.5-pro", + url: "https://api.ai.sap", + npm: "@jerome-benoit/sap-ai-provider-v2", + }, + }) + const result = ProviderTransform.variants(model) + expect(Object.keys(result)).toEqual(["high", "max"]) + expect(result.high).toEqual({ + thinkingConfig: { + includeThoughts: true, + thinkingBudget: 16000, + }, + }) + expect(result.max).toEqual({ + thinkingConfig: { + includeThoughts: true, + thinkingBudget: 24576, + }, + }) + }) + + test("gpt models return reasoningEffort variants", () => { + const model = createMockModel({ + id: "sap-ai-core/azure-openai--gpt-4o", + providerID: "sap-ai-core", + api: { + id: "azure-openai--gpt-4o", + url: "https://api.ai.sap", + npm: "@jerome-benoit/sap-ai-provider-v2", + }, + }) + const result = ProviderTransform.variants(model) + expect(Object.keys(result)).toEqual(["low", "medium", "high"]) + expect(result.low).toEqual({ reasoningEffort: "low" }) + expect(result.high).toEqual({ reasoningEffort: "high" }) + }) + + test("o-series models return reasoningEffort variants", () => { + const model = createMockModel({ + id: "sap-ai-core/azure-openai--o3-mini", + providerID: "sap-ai-core", + api: { + id: "azure-openai--o3-mini", + url: "https://api.ai.sap", + npm: "@jerome-benoit/sap-ai-provider-v2", + }, + }) + const result = ProviderTransform.variants(model) + expect(Object.keys(result)).toEqual(["low", "medium", "high"]) + expect(result.low).toEqual({ reasoningEffort: "low" }) + expect(result.high).toEqual({ reasoningEffort: "high" }) + }) + + test("sonar models return empty object", () => { + const model = createMockModel({ + id: "sap-ai-core/perplexity--sonar-pro", + providerID: "sap-ai-core", + api: { + id: "perplexity--sonar-pro", + url: "https://api.ai.sap", + npm: "@jerome-benoit/sap-ai-provider-v2", + }, + }) + const result = ProviderTransform.variants(model) + expect(result).toEqual({}) + }) + + test("mistral models return empty object", () => { + const model = createMockModel({ + id: "sap-ai-core/mistral--mistral-large", + providerID: "sap-ai-core", + api: { + id: "mistral--mistral-large", + url: "https://api.ai.sap", + npm: "@jerome-benoit/sap-ai-provider-v2", + }, + }) + const result = ProviderTransform.variants(model) + expect(result).toEqual({}) + }) + }) }) diff --git a/packages/opencode/test/pty/pty-session.test.ts b/packages/opencode/test/pty/pty-session.test.ts new file mode 100644 index 0000000000..9063af872d --- /dev/null +++ b/packages/opencode/test/pty/pty-session.test.ts @@ -0,0 +1,88 @@ +import { describe, expect, test } from "bun:test" +import { Bus } from "../../src/bus" +import { Instance } from "../../src/project/instance" +import { Pty } from "../../src/pty" +import type { PtyID } from "../../src/pty/schema" +import { tmpdir } from "../fixture/fixture" +import { setTimeout as sleep } from "node:timers/promises" + +const wait = async (fn: () => boolean, ms = 2000) => { + const end = Date.now() + ms + while (Date.now() < end) { + if (fn()) return + await sleep(25) + } + throw new Error("timeout waiting for pty events") +} + +const pick = (log: Array<{ type: "created" | "exited" | "deleted"; id: PtyID }>, id: PtyID) => { + return log.filter((evt) => evt.id === id).map((evt) => evt.type) +} + +describe("pty", () => { + test("publishes created, exited, deleted in order for /bin/ls + remove", async () => { + if (process.platform === "win32") return + + await using dir = await tmpdir({ git: true }) + + await Instance.provide({ + directory: dir.path, + fn: async () => { + const log: Array<{ type: "created" | "exited" | "deleted"; id: PtyID }> = [] + const off = [ + Bus.subscribe(Pty.Event.Created, (evt) => log.push({ type: "created", id: evt.properties.info.id })), + Bus.subscribe(Pty.Event.Exited, (evt) => log.push({ type: "exited", id: evt.properties.id })), + Bus.subscribe(Pty.Event.Deleted, (evt) => log.push({ type: "deleted", id: evt.properties.id })), + ] + + let id: PtyID | undefined + try { + const info = await Pty.create({ command: "/bin/ls", title: "ls" }) + id = info.id + + await wait(() => pick(log, id!).includes("exited")) + + await Pty.remove(id) + await wait(() => pick(log, id!).length >= 3) + expect(pick(log, id!)).toEqual(["created", "exited", "deleted"]) + } finally { + off.forEach((x) => x()) + if (id) await Pty.remove(id) + } + }, + }) + }) + + test("publishes created, exited, deleted in order for /bin/sh + remove", async () => { + if (process.platform === "win32") return + + await using dir = await tmpdir({ git: true }) + + await Instance.provide({ + directory: dir.path, + fn: async () => { + const log: Array<{ type: "created" | "exited" | "deleted"; id: PtyID }> = [] + const off = [ + Bus.subscribe(Pty.Event.Created, (evt) => log.push({ type: "created", id: evt.properties.info.id })), + Bus.subscribe(Pty.Event.Exited, (evt) => log.push({ type: "exited", id: evt.properties.id })), + Bus.subscribe(Pty.Event.Deleted, (evt) => log.push({ type: "deleted", id: evt.properties.id })), + ] + + let id: PtyID | undefined + try { + const info = await Pty.create({ command: "/bin/sh", title: "sh" }) + id = info.id + + await sleep(100) + + await Pty.remove(id) + await wait(() => pick(log, id!).length >= 3) + expect(pick(log, id!)).toEqual(["created", "exited", "deleted"]) + } finally { + off.forEach((x) => x()) + if (id) await Pty.remove(id) + } + }, + }) + }) +}) diff --git a/packages/opencode/test/question/question.test.ts b/packages/opencode/test/question/question.test.ts index cf24faa7d2..f00afb09fd 100644 --- a/packages/opencode/test/question/question.test.ts +++ b/packages/opencode/test/question/question.test.ts @@ -1,7 +1,9 @@ import { test, expect } from "bun:test" import { Question } from "../../src/question" import { Instance } from "../../src/project/instance" +import { QuestionID } from "../../src/question/schema" import { tmpdir } from "../fixture/fixture" +import { SessionID } from "../../src/session/schema" test("ask - returns pending promise", async () => { await using tmp = await tmpdir({ git: true }) @@ -9,7 +11,7 @@ test("ask - returns pending promise", async () => { directory: tmp.path, fn: async () => { const promise = Question.ask({ - sessionID: "ses_test", + sessionID: SessionID.make("ses_test"), questions: [ { question: "What would you like to do?", @@ -43,7 +45,7 @@ test("ask - adds to pending list", async () => { ] Question.ask({ - sessionID: "ses_test", + sessionID: SessionID.make("ses_test"), questions, }) @@ -73,7 +75,7 @@ test("reply - resolves the pending ask with answers", async () => { ] const askPromise = Question.ask({ - sessionID: "ses_test", + sessionID: SessionID.make("ses_test"), questions, }) @@ -97,7 +99,7 @@ test("reply - removes from pending list", async () => { directory: tmp.path, fn: async () => { Question.ask({ - sessionID: "ses_test", + sessionID: SessionID.make("ses_test"), questions: [ { question: "What would you like to do?", @@ -130,7 +132,7 @@ test("reply - does nothing for unknown requestID", async () => { directory: tmp.path, fn: async () => { await Question.reply({ - requestID: "que_unknown", + requestID: QuestionID.make("que_unknown"), answers: [["Option 1"]], }) // Should not throw @@ -146,7 +148,7 @@ test("reject - throws RejectedError", async () => { directory: tmp.path, fn: async () => { const askPromise = Question.ask({ - sessionID: "ses_test", + sessionID: SessionID.make("ses_test"), questions: [ { question: "What would you like to do?", @@ -173,7 +175,7 @@ test("reject - removes from pending list", async () => { directory: tmp.path, fn: async () => { const askPromise = Question.ask({ - sessionID: "ses_test", + sessionID: SessionID.make("ses_test"), questions: [ { question: "What would you like to do?", @@ -203,7 +205,7 @@ test("reject - does nothing for unknown requestID", async () => { await Instance.provide({ directory: tmp.path, fn: async () => { - await Question.reject("que_unknown") + await Question.reject(QuestionID.make("que_unknown")) // Should not throw }, }) @@ -236,7 +238,7 @@ test("ask - handles multiple questions", async () => { ] const askPromise = Question.ask({ - sessionID: "ses_test", + sessionID: SessionID.make("ses_test"), questions, }) @@ -261,7 +263,7 @@ test("list - returns all pending requests", async () => { directory: tmp.path, fn: async () => { Question.ask({ - sessionID: "ses_test1", + sessionID: SessionID.make("ses_test1"), questions: [ { question: "Question 1?", @@ -272,7 +274,7 @@ test("list - returns all pending requests", async () => { }) Question.ask({ - sessionID: "ses_test2", + sessionID: SessionID.make("ses_test2"), questions: [ { question: "Question 2?", diff --git a/packages/opencode/test/server/project-init-git.test.ts b/packages/opencode/test/server/project-init-git.test.ts new file mode 100644 index 0000000000..cc1ac0cbc9 --- /dev/null +++ b/packages/opencode/test/server/project-init-git.test.ts @@ -0,0 +1,119 @@ +import { afterEach, describe, expect, spyOn, test } from "bun:test" +import path from "path" +import { GlobalBus } from "../../src/bus/global" +import { Snapshot } from "../../src/snapshot" +import { InstanceBootstrap } from "../../src/project/bootstrap" +import { Instance } from "../../src/project/instance" +import { Server } from "../../src/server/server" +import { Filesystem } from "../../src/util/filesystem" +import { Log } from "../../src/util/log" +import { resetDatabase } from "../fixture/db" +import { tmpdir } from "../fixture/fixture" + +Log.init({ print: false }) + +afterEach(async () => { + await resetDatabase() +}) + +describe("project.initGit endpoint", () => { + test("initializes git and reloads immediately", async () => { + await using tmp = await tmpdir() + const app = Server.Default() + const seen: { directory?: string; payload: { type: string } }[] = [] + const fn = (evt: { directory?: string; payload: { type: string } }) => { + seen.push(evt) + } + const reload = Instance.reload + const reloadSpy = spyOn(Instance, "reload").mockImplementation((input) => reload(input)) + GlobalBus.on("event", fn) + + try { + const init = await app.request("/project/git/init", { + method: "POST", + headers: { + "x-opencode-directory": tmp.path, + }, + }) + const body = await init.json() + expect(init.status).toBe(200) + expect(body).toMatchObject({ + id: "global", + vcs: "git", + worktree: tmp.path, + }) + expect(reloadSpy).toHaveBeenCalledTimes(1) + expect(reloadSpy.mock.calls[0]?.[0]?.init).toBe(InstanceBootstrap) + expect(seen.some((evt) => evt.directory === tmp.path && evt.payload.type === "server.instance.disposed")).toBe( + true, + ) + expect(await Filesystem.exists(path.join(tmp.path, ".git", "opencode"))).toBe(false) + + const current = await app.request("/project/current", { + headers: { + "x-opencode-directory": tmp.path, + }, + }) + expect(current.status).toBe(200) + expect(await current.json()).toMatchObject({ + id: "global", + vcs: "git", + worktree: tmp.path, + }) + + await Instance.provide({ + directory: tmp.path, + fn: async () => { + expect(await Snapshot.track()).toBeTruthy() + }, + }) + } finally { + reloadSpy.mockRestore() + GlobalBus.off("event", fn) + } + }) + + test("does not reload when the project is already git", async () => { + await using tmp = await tmpdir({ git: true }) + const app = Server.Default() + const seen: { directory?: string; payload: { type: string } }[] = [] + const fn = (evt: { directory?: string; payload: { type: string } }) => { + seen.push(evt) + } + const reload = Instance.reload + const reloadSpy = spyOn(Instance, "reload").mockImplementation((input) => reload(input)) + GlobalBus.on("event", fn) + + try { + const init = await app.request("/project/git/init", { + method: "POST", + headers: { + "x-opencode-directory": tmp.path, + }, + }) + expect(init.status).toBe(200) + expect(await init.json()).toMatchObject({ + vcs: "git", + worktree: tmp.path, + }) + expect( + seen.filter((evt) => evt.directory === tmp.path && evt.payload.type === "server.instance.disposed").length, + ).toBe(0) + expect(reloadSpy).toHaveBeenCalledTimes(0) + + const current = await app.request("/project/current", { + headers: { + "x-opencode-directory": tmp.path, + }, + }) + expect(current.status).toBe(200) + expect(await current.json()).toMatchObject({ + vcs: "git", + worktree: tmp.path, + }) + } finally { + reloadSpy.mockRestore() + GlobalBus.off("event", fn) + } + }) +}) diff --git a/packages/opencode/test/server/session-messages.test.ts b/packages/opencode/test/server/session-messages.test.ts new file mode 100644 index 0000000000..ee4c51646f --- /dev/null +++ b/packages/opencode/test/server/session-messages.test.ts @@ -0,0 +1,119 @@ +import { describe, expect, test } from "bun:test" +import path from "path" +import { Instance } from "../../src/project/instance" +import { Server } from "../../src/server/server" +import { Session } from "../../src/session" +import { MessageV2 } from "../../src/session/message-v2" +import { MessageID, PartID, type SessionID } from "../../src/session/schema" +import { Log } from "../../src/util/log" + +const root = path.join(__dirname, "../..") +Log.init({ print: false }) + +async function fill(sessionID: SessionID, count: number, time = (i: number) => Date.now() + i) { + const ids = [] as MessageID[] + for (let i = 0; i < count; i++) { + const id = MessageID.ascending() + ids.push(id) + await Session.updateMessage({ + id, + sessionID, + role: "user", + time: { created: time(i) }, + agent: "test", + model: { providerID: "test", modelID: "test" }, + tools: {}, + mode: "", + } as unknown as MessageV2.Info) + await Session.updatePart({ + id: PartID.ascending(), + sessionID, + messageID: id, + type: "text", + text: `m${i}`, + }) + } + return ids +} + +describe("session messages endpoint", () => { + test("returns cursor headers for older pages", async () => { + await Instance.provide({ + directory: root, + fn: async () => { + const session = await Session.create({}) + const ids = await fill(session.id, 5) + const app = Server.Default() + + const a = await app.request(`/session/${session.id}/message?limit=2`) + expect(a.status).toBe(200) + const aBody = (await a.json()) as MessageV2.WithParts[] + expect(aBody.map((item) => item.info.id)).toEqual(ids.slice(-2)) + const cursor = a.headers.get("x-next-cursor") + expect(cursor).toBeTruthy() + expect(a.headers.get("link")).toContain('rel="next"') + + const b = await app.request(`/session/${session.id}/message?limit=2&before=${encodeURIComponent(cursor!)}`) + expect(b.status).toBe(200) + const bBody = (await b.json()) as MessageV2.WithParts[] + expect(bBody.map((item) => item.info.id)).toEqual(ids.slice(-4, -2)) + + await Session.remove(session.id) + }, + }) + }) + + test("keeps full-history responses when limit is omitted", async () => { + await Instance.provide({ + directory: root, + fn: async () => { + const session = await Session.create({}) + const ids = await fill(session.id, 3) + const app = Server.Default() + + const res = await app.request(`/session/${session.id}/message`) + expect(res.status).toBe(200) + const body = (await res.json()) as MessageV2.WithParts[] + expect(body.map((item) => item.info.id)).toEqual(ids) + + await Session.remove(session.id) + }, + }) + }) + + test("rejects invalid cursors and missing sessions", async () => { + await Instance.provide({ + directory: root, + fn: async () => { + const session = await Session.create({}) + const app = Server.Default() + + const bad = await app.request(`/session/${session.id}/message?limit=2&before=bad`) + expect(bad.status).toBe(400) + + const miss = await app.request(`/session/ses_missing/message?limit=2`) + expect(miss.status).toBe(404) + + await Session.remove(session.id) + }, + }) + }) + + test("does not truncate large legacy limit requests", async () => { + await Instance.provide({ + directory: root, + fn: async () => { + const session = await Session.create({}) + await fill(session.id, 520) + const app = Server.Default() + + const res = await app.request(`/session/${session.id}/message?limit=510`) + expect(res.status).toBe(200) + const body = (await res.json()) as MessageV2.WithParts[] + expect(body).toHaveLength(510) + + await Session.remove(session.id) + }, + }) + }) +}) diff --git a/packages/opencode/test/server/session-select.test.ts b/packages/opencode/test/server/session-select.test.ts index 479be4a17f..a336f8133c 100644 --- a/packages/opencode/test/server/session-select.test.ts +++ b/packages/opencode/test/server/session-select.test.ts @@ -17,7 +17,7 @@ describe("tui.selectSession endpoint", () => { const session = await Session.create({}) // #when - const app = Server.App() + const app = Server.Default() const response = await app.request("/tui/select-session", { method: "POST", headers: { "Content-Type": "application/json" }, @@ -42,7 +42,7 @@ describe("tui.selectSession endpoint", () => { const nonExistentSessionID = "ses_nonexistent123" // #when - const app = Server.App() + const app = Server.Default() const response = await app.request("/tui/select-session", { method: "POST", headers: { "Content-Type": "application/json" }, @@ -63,7 +63,7 @@ describe("tui.selectSession endpoint", () => { const invalidSessionID = "invalid_session_id" // #when - const app = Server.App() + const app = Server.Default() const response = await app.request("/tui/select-session", { method: "POST", headers: { "Content-Type": "application/json" }, diff --git a/packages/opencode/test/session/compaction-loop.test.ts b/packages/opencode/test/session/compaction-loop.test.ts index d4722e6776..e762ca205e 100644 --- a/packages/opencode/test/session/compaction-loop.test.ts +++ b/packages/opencode/test/session/compaction-loop.test.ts @@ -382,7 +382,7 @@ function createModel(opts: { }): Provider.Model { return { id: "test-model", - providerID: "test", + providerID: "test" as any, name: "Test", limit: { context: opts.context, @@ -596,7 +596,7 @@ describe("session.compaction.prune with disabled config", () => { directory: tmp.path, fn: async () => { // Should return early without error - await SessionCompaction.prune({ sessionID: "nonexistent" }) + await SessionCompaction.prune({ sessionID: "nonexistent" as any }) }, }) }) diff --git a/packages/opencode/test/session/context-overflow.test.ts b/packages/opencode/test/session/context-overflow.test.ts index 25b3daf8ea..7af3354308 100644 --- a/packages/opencode/test/session/context-overflow.test.ts +++ b/packages/opencode/test/session/context-overflow.test.ts @@ -2,6 +2,9 @@ import { describe, expect, test } from "bun:test" import { APICallError } from "ai" import { MessageV2 } from "../../src/session/message-v2" +// Helper to bypass branded ProviderID type in tests +const pid = (id: string) => ({ providerID: id as any }) + describe("session.context-overflow", () => { // ─── ContextOverflowError.isInstance ──────────────────────────────── @@ -70,21 +73,21 @@ describe("session.context-overflow", () => { describe("fromError stream error detection", () => { test("stream error with context_length_exceeded code", () => { const input = { type: "error", error: { code: "context_length_exceeded" } } - const result = MessageV2.fromError(input, { providerID: "test" }) + const result = MessageV2.fromError(input, pid("test")) expect(result.name).toBe("ContextOverflowError") expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(true) }) test("non-overflow error code does not produce ContextOverflowError", () => { const input = { type: "error", error: { code: "insufficient_quota" } } - const result = MessageV2.fromError(input, { providerID: "test" }) + const result = MessageV2.fromError(input, pid("test")) expect(result.name).not.toBe("ContextOverflowError") expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(false) }) test("stream error as JSON string is parsed correctly", () => { const input = JSON.stringify({ type: "error", error: { code: "context_length_exceeded" } }) - const result = MessageV2.fromError(input, { providerID: "test" }) + const result = MessageV2.fromError(input, pid("test")) // fromError should handle the JSON string expect(result).toBeDefined() }) @@ -109,7 +112,7 @@ describe("session.context-overflow", () => { test("detects Anthropic overflow: prompt is too long", () => { const result = MessageV2.fromError( makeAPICallError("prompt is too long: 213462 tokens > 200000 maximum"), - { providerID: "anthropic" }, + pid("anthropic"), ) expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(true) }) @@ -118,7 +121,7 @@ describe("session.context-overflow", () => { test("detects OpenAI overflow: exceeds the context window", () => { const result = MessageV2.fromError( makeAPICallError("Your input exceeds the context window of this model"), - { providerID: "openai" }, + pid("openai"), ) expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(true) }) @@ -127,7 +130,7 @@ describe("session.context-overflow", () => { test("detects Gemini overflow: input token count exceeds maximum", () => { const result = MessageV2.fromError( makeAPICallError("The input token count (1196265) exceeds the maximum number of tokens allowed (1048575)"), - { providerID: "google" }, + pid("google"), ) expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(true) }) @@ -136,7 +139,7 @@ describe("session.context-overflow", () => { test("detects Groq overflow: reduce the length", () => { const result = MessageV2.fromError( makeAPICallError("Please reduce the length of the messages or completion"), - { providerID: "groq" }, + pid("groq"), ) expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(true) }) @@ -145,7 +148,7 @@ describe("session.context-overflow", () => { test("detects 400 no body as overflow", () => { const result = MessageV2.fromError( makeAPICallError("400 status code (no body)"), - { providerID: "cerebras" }, + pid("cerebras"), ) expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(true) }) @@ -154,7 +157,7 @@ describe("session.context-overflow", () => { test("detects 413 no body as overflow", () => { const result = MessageV2.fromError( makeAPICallError("413 status code (no body)", 413), - { providerID: "mistral" }, + pid("mistral"), ) expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(true) }) @@ -163,7 +166,7 @@ describe("session.context-overflow", () => { test("detects Bedrock overflow: input is too long", () => { const result = MessageV2.fromError( makeAPICallError("input is too long for requested model"), - { providerID: "bedrock" }, + pid("bedrock"), ) expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(true) }) @@ -172,7 +175,7 @@ describe("session.context-overflow", () => { test("detects OpenRouter overflow: maximum context length", () => { const result = MessageV2.fromError( makeAPICallError("maximum context length is 128000 tokens"), - { providerID: "openrouter" }, + pid("openrouter"), ) expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(true) }) @@ -181,7 +184,7 @@ describe("session.context-overflow", () => { test("detects Azure OpenAI overflow: the request was too long", () => { const result = MessageV2.fromError( makeAPICallError("The request was too long"), - { providerID: "openai" }, + pid("openai"), ) expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(true) }) @@ -189,7 +192,7 @@ describe("session.context-overflow", () => { test("detects Azure OpenAI overflow: maximum tokens for requested operation", () => { const result = MessageV2.fromError( makeAPICallError("maximum tokens for requested operation exceeded"), - { providerID: "openai" }, + pid("openai"), ) expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(true) }) @@ -199,7 +202,7 @@ describe("session.context-overflow", () => { test("does not classify 429 as context overflow", () => { const result = MessageV2.fromError( makeAPICallError("429 status code (no body)", 429), - { providerID: "test" }, + pid("test"), ) expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(false) }) @@ -207,7 +210,7 @@ describe("session.context-overflow", () => { test("does not classify rate limit error as overflow", () => { const result = MessageV2.fromError( makeAPICallError("Rate limit exceeded. Please retry after 30 seconds.", 429), - { providerID: "test" }, + pid("test"), ) expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(false) }) @@ -215,7 +218,7 @@ describe("session.context-overflow", () => { test("does not classify authentication error as overflow", () => { const result = MessageV2.fromError( makeAPICallError("Invalid API key", 401), - { providerID: "test" }, + pid("test"), ) expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(false) }) @@ -223,7 +226,7 @@ describe("session.context-overflow", () => { test("does not classify server error as overflow", () => { const result = MessageV2.fromError( makeAPICallError("Internal server error", 500), - { providerID: "test" }, + pid("test"), ) expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(false) }) @@ -233,43 +236,43 @@ describe("session.context-overflow", () => { describe("fromError edge cases", () => { test("handles null error input gracefully", () => { - const result = MessageV2.fromError(null, { providerID: "test" }) + const result = MessageV2.fromError(null, pid("test")) expect(result).toBeDefined() expect(result.name).toBe("UnknownError") }) test("handles undefined error input", () => { - const result = MessageV2.fromError(undefined, { providerID: "test" }) + const result = MessageV2.fromError(undefined, pid("test")) expect(result).toBeDefined() expect(result.name).toBe("UnknownError") }) test("handles numeric error input", () => { - const result = MessageV2.fromError(123, { providerID: "test" }) + const result = MessageV2.fromError(123, pid("test")) expect(result).toBeDefined() expect(result.name).toBe("UnknownError") }) test("handles string error input", () => { - const result = MessageV2.fromError("something broke", { providerID: "test" }) + const result = MessageV2.fromError("something broke", pid("test")) expect(result).toBeDefined() }) test("handles Error object with no stack", () => { const error = new Error("test error") error.stack = undefined - const result = MessageV2.fromError(error, { providerID: "test" }) + const result = MessageV2.fromError(error, pid("test")) expect(result).toBeDefined() }) test("handles error with empty message", () => { - const result = MessageV2.fromError(new Error(""), { providerID: "test" }) + const result = MessageV2.fromError(new Error(""), pid("test")) expect(result).toBeDefined() }) test("handles deeply nested error objects", () => { const error = { type: "error", error: { code: "unknown", nested: { deep: { value: true } } } } - const result = MessageV2.fromError(error, { providerID: "test" }) + const result = MessageV2.fromError(error, pid("test")) expect(result).toBeDefined() }) }) diff --git a/packages/opencode/test/session/llm.test.ts b/packages/opencode/test/session/llm.test.ts index a89a00ebc0..b9542088a1 100644 --- a/packages/opencode/test/session/llm.test.ts +++ b/packages/opencode/test/session/llm.test.ts @@ -7,10 +7,12 @@ import { Instance } from "../../src/project/instance" import { Provider } from "../../src/provider/provider" import { ProviderTransform } from "../../src/provider/transform" import { ModelsDev } from "../../src/provider/models" +import { ProviderID, ModelID } from "../../src/provider/schema" import { Filesystem } from "../../src/util/filesystem" import { tmpdir } from "../fixture/fixture" import type { Agent } from "../../src/agent/agent" import type { MessageV2 } from "../../src/session/message-v2" +import { SessionID, MessageID } from "../../src/session/schema" describe("session.llm.hasToolCalls", () => { test("returns false for empty messages array", () => { @@ -246,7 +248,7 @@ describe("session.llm.stream", () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", enabled_providers: [providerID], provider: { [providerID]: { @@ -264,8 +266,8 @@ describe("session.llm.stream", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - const resolved = await Provider.getModel(providerID, model.id) - const sessionID = "session-test-1" + const resolved = await Provider.getModel(ProviderID.make(providerID), ModelID.make(model.id)) + const sessionID = SessionID.make("session-test-1") const agent = { name: "test", mode: "primary", @@ -276,12 +278,12 @@ describe("session.llm.stream", () => { } satisfies Agent.Info const user = { - id: "user-1", + id: MessageID.make("user-1"), sessionID, role: "user", time: { created: Date.now() }, agent: agent.name, - model: { providerID, modelID: resolved.id }, + model: { providerID: ProviderID.make(providerID), modelID: resolved.id }, variant: "high", } satisfies MessageV2.User @@ -369,7 +371,7 @@ describe("session.llm.stream", () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", enabled_providers: ["openai"], provider: { openai: { @@ -394,8 +396,8 @@ describe("session.llm.stream", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - const resolved = await Provider.getModel("openai", model.id) - const sessionID = "session-test-2" + const resolved = await Provider.getModel(ProviderID.openai, ModelID.make(model.id)) + const sessionID = SessionID.make("session-test-2") const agent = { name: "test", mode: "primary", @@ -405,12 +407,12 @@ describe("session.llm.stream", () => { } satisfies Agent.Info const user = { - id: "user-2", + id: MessageID.make("user-2"), sessionID, role: "user", time: { created: Date.now() }, agent: agent.name, - model: { providerID: "openai", modelID: resolved.id }, + model: { providerID: ProviderID.make("openai"), modelID: resolved.id }, variant: "high", } satisfies MessageV2.User @@ -498,7 +500,7 @@ describe("session.llm.stream", () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", enabled_providers: [providerID], provider: { [providerID]: { @@ -516,8 +518,8 @@ describe("session.llm.stream", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - const resolved = await Provider.getModel(providerID, model.id) - const sessionID = "session-test-3" + const resolved = await Provider.getModel(ProviderID.make(providerID), ModelID.make(model.id)) + const sessionID = SessionID.make("session-test-3") const agent = { name: "test", mode: "primary", @@ -528,12 +530,12 @@ describe("session.llm.stream", () => { } satisfies Agent.Info const user = { - id: "user-3", + id: MessageID.make("user-3"), sessionID, role: "user", time: { created: Date.now() }, agent: agent.name, - model: { providerID, modelID: resolved.id }, + model: { providerID: ProviderID.make(providerID), modelID: resolved.id }, } satisfies MessageV2.User const stream = await LLM.stream({ @@ -599,7 +601,7 @@ describe("session.llm.stream", () => { await Bun.write( path.join(dir, "opencode.json"), JSON.stringify({ - $schema: "https://opencode.ai/config.json", + $schema: "https://altimate.ai/config.json", enabled_providers: [providerID], provider: { [providerID]: { @@ -617,8 +619,8 @@ describe("session.llm.stream", () => { await Instance.provide({ directory: tmp.path, fn: async () => { - const resolved = await Provider.getModel(providerID, model.id) - const sessionID = "session-test-4" + const resolved = await Provider.getModel(ProviderID.make(providerID), ModelID.make(model.id)) + const sessionID = SessionID.make("session-test-4") const agent = { name: "test", mode: "primary", @@ -629,12 +631,12 @@ describe("session.llm.stream", () => { } satisfies Agent.Info const user = { - id: "user-4", + id: MessageID.make("user-4"), sessionID, role: "user", time: { created: Date.now() }, agent: agent.name, - model: { providerID, modelID: resolved.id }, + model: { providerID: ProviderID.make(providerID), modelID: resolved.id }, } satisfies MessageV2.User const stream = await LLM.stream({ diff --git a/packages/opencode/test/session/message-v2.test.ts b/packages/opencode/test/session/message-v2.test.ts index c043754bdb..e9c6cb729b 100644 --- a/packages/opencode/test/session/message-v2.test.ts +++ b/packages/opencode/test/session/message-v2.test.ts @@ -2,11 +2,14 @@ import { describe, expect, test } from "bun:test" import { APICallError } from "ai" import { MessageV2 } from "../../src/session/message-v2" import type { Provider } from "../../src/provider/provider" +import { ModelID, ProviderID } from "../../src/provider/schema" +import { SessionID, MessageID, PartID } from "../../src/session/schema" -const sessionID = "session" +const sessionID = SessionID.make("session") +const providerID = ProviderID.make("test") const model: Provider.Model = { - id: "test-model", - providerID: "test", + id: ModelID.make("test-model"), + providerID, api: { id: "test-model", url: "https://example.com", @@ -60,7 +63,7 @@ function userInfo(id: string): MessageV2.User { role: "user", time: { created: 0 }, agent: "user", - model: { providerID: "test", modelID: "test" }, + model: { providerID, modelID: ModelID.make("test") }, tools: {}, mode: "", } as unknown as MessageV2.User @@ -97,9 +100,9 @@ function assistantInfo( function basePart(messageID: string, id: string) { return { - id, + id: PartID.make(id), sessionID, - messageID, + messageID: MessageID.make(messageID), } } @@ -794,7 +797,7 @@ describe("session.message-v2.fromError", () => { code: "context_length_exceeded", }, } - const result = MessageV2.fromError(input, { providerID: "test" }) + const result = MessageV2.fromError(input, { providerID }) expect(result).toStrictEqual({ name: "ContextOverflowError", @@ -829,7 +832,7 @@ describe("session.message-v2.fromError", () => { message: item.code === "invalid_prompt" ? item.message : undefined, }, } - const result = MessageV2.fromError(input, { providerID: "test" }) + const result = MessageV2.fromError(input, { providerID }) expect(result).toStrictEqual({ name: "APIError", @@ -842,35 +845,6 @@ describe("session.message-v2.fromError", () => { }) }) - test("maps github-copilot 403 to reauth guidance", () => { - const error = new APICallError({ - message: "forbidden", - url: "https://api.githubcopilot.com/v1/chat/completions", - requestBodyValues: {}, - statusCode: 403, - responseHeaders: { "content-type": "application/json" }, - responseBody: '{"error":"forbidden"}', - isRetryable: false, - }) - - const result = MessageV2.fromError(error, { providerID: "github-copilot" }) - - expect(result).toStrictEqual({ - name: "APIError", - data: { - message: - "Please reauthenticate with the copilot provider to ensure your credentials work properly with OpenCode.", - statusCode: 403, - isRetryable: false, - responseHeaders: { "content-type": "application/json" }, - responseBody: '{"error":"forbidden"}', - metadata: { - url: "https://api.githubcopilot.com/v1/chat/completions", - }, - }, - }) - }) - test("detects context overflow from APICallError provider messages", () => { const cases = [ "prompt is too long: 213462 tokens > 200000 maximum", @@ -890,7 +864,7 @@ describe("session.message-v2.fromError", () => { responseHeaders: { "content-type": "application/json" }, isRetryable: false, }) - const result = MessageV2.fromError(error, { providerID: "test" }) + const result = MessageV2.fromError(error, { providerID }) expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(true) }) }) @@ -905,14 +879,14 @@ describe("session.message-v2.fromError", () => { responseHeaders: { "content-type": "application/json" }, isRetryable: false, }), - { providerID: "test" }, + { providerID }, ) expect(MessageV2.ContextOverflowError.isInstance(result)).toBe(false) expect(MessageV2.APIError.isInstance(result)).toBe(true) }) test("serializes unknown inputs", () => { - const result = MessageV2.fromError(123, { providerID: "test" }) + const result = MessageV2.fromError(123, { providerID }) expect(result).toStrictEqual({ name: "UnknownError", diff --git a/packages/opencode/test/session/messages-pagination.test.ts b/packages/opencode/test/session/messages-pagination.test.ts new file mode 100644 index 0000000000..3614b17d08 --- /dev/null +++ b/packages/opencode/test/session/messages-pagination.test.ts @@ -0,0 +1,115 @@ +import { describe, expect, test } from "bun:test" +import path from "path" +import { Instance } from "../../src/project/instance" +import { Session } from "../../src/session" +import { MessageV2 } from "../../src/session/message-v2" +import { MessageID, PartID, type SessionID } from "../../src/session/schema" +import { Log } from "../../src/util/log" + +const root = path.join(__dirname, "../..") +Log.init({ print: false }) + +async function fill(sessionID: SessionID, count: number, time = (i: number) => Date.now() + i) { + const ids = [] as MessageID[] + for (let i = 0; i < count; i++) { + const id = MessageID.ascending() + ids.push(id) + await Session.updateMessage({ + id, + sessionID, + role: "user", + time: { created: time(i) }, + agent: "test", + model: { providerID: "test", modelID: "test" }, + tools: {}, + mode: "", + } as unknown as MessageV2.Info) + await Session.updatePart({ + id: PartID.ascending(), + sessionID, + messageID: id, + type: "text", + text: `m${i}`, + }) + } + return ids +} + +describe("session message pagination", () => { + test("pages backward with opaque cursors", async () => { + await Instance.provide({ + directory: root, + fn: async () => { + const session = await Session.create({}) + const ids = await fill(session.id, 6) + + const a = await MessageV2.page({ sessionID: session.id, limit: 2 }) + expect(a.items.map((item) => item.info.id)).toEqual(ids.slice(-2)) + expect(a.items.every((item) => item.parts.length === 1)).toBe(true) + expect(a.more).toBe(true) + expect(a.cursor).toBeTruthy() + + const b = await MessageV2.page({ sessionID: session.id, limit: 2, before: a.cursor! }) + expect(b.items.map((item) => item.info.id)).toEqual(ids.slice(-4, -2)) + expect(b.more).toBe(true) + expect(b.cursor).toBeTruthy() + + const c = await MessageV2.page({ sessionID: session.id, limit: 2, before: b.cursor! }) + expect(c.items.map((item) => item.info.id)).toEqual(ids.slice(0, 2)) + expect(c.more).toBe(false) + expect(c.cursor).toBeUndefined() + + await Session.remove(session.id) + }, + }) + }) + + test("keeps stream order newest first", async () => { + await Instance.provide({ + directory: root, + fn: async () => { + const session = await Session.create({}) + const ids = await fill(session.id, 5) + + const items = await Array.fromAsync(MessageV2.stream(session.id)) + expect(items.map((item) => item.info.id)).toEqual(ids.slice().reverse()) + + await Session.remove(session.id) + }, + }) + }) + + test("accepts cursors generated from fractional timestamps", async () => { + await Instance.provide({ + directory: root, + fn: async () => { + const session = await Session.create({}) + const ids = await fill(session.id, 4, (i) => 1000.5 + i) + + const a = await MessageV2.page({ sessionID: session.id, limit: 2 }) + const b = await MessageV2.page({ sessionID: session.id, limit: 2, before: a.cursor! }) + + expect(a.items.map((item) => item.info.id)).toEqual(ids.slice(-2)) + expect(b.items.map((item) => item.info.id)).toEqual(ids.slice(0, 2)) + + await Session.remove(session.id) + }, + }) + }) + + test("scopes get by session id", async () => { + await Instance.provide({ + directory: root, + fn: async () => { + const a = await Session.create({}) + const b = await Session.create({}) + const [id] = await fill(a.id, 1) + + await expect(MessageV2.get({ sessionID: b.id, messageID: id })).rejects.toMatchObject({ name: "NotFoundError" }) + + await Session.remove(a.id) + await Session.remove(b.id) + }, + }) + }) +}) diff --git a/packages/opencode/test/session/prompt.test.ts b/packages/opencode/test/session/prompt.test.ts index e8a8c65b03..3986271dab 100644 --- a/packages/opencode/test/session/prompt.test.ts +++ b/packages/opencode/test/session/prompt.test.ts @@ -2,6 +2,7 @@ import path from "path" import { describe, expect, test } from "bun:test" import { fileURLToPath } from "url" import { Instance } from "../../src/project/instance" +import { ModelID, ProviderID } from "../../src/provider/schema" import { Session } from "../../src/session" import { MessageV2 } from "../../src/session/message-v2" import { SessionPrompt } from "../../src/session/prompt" @@ -173,7 +174,7 @@ describe("session.prompt agent variant", () => { const other = await SessionPrompt.prompt({ sessionID: session.id, agent: "build", - model: { providerID: "opencode", modelID: "kimi-k2.5-free" }, + model: { providerID: ProviderID.make("opencode"), modelID: ModelID.make("kimi-k2.5-free") }, noReply: true, parts: [{ type: "text", text: "hello" }], }) @@ -187,7 +188,7 @@ describe("session.prompt agent variant", () => { parts: [{ type: "text", text: "hello again" }], }) if (match.info.role !== "user") throw new Error("expected user message") - expect(match.info.model).toEqual({ providerID: "openai", modelID: "gpt-5.2" }) + expect(match.info.model).toEqual({ providerID: ProviderID.make("openai"), modelID: ModelID.make("gpt-5.2") }) expect(match.info.variant).toBe("xhigh") const override = await SessionPrompt.prompt({ diff --git a/packages/opencode/test/session/regression.test.ts b/packages/opencode/test/session/regression.test.ts new file mode 100644 index 0000000000..078cf8b9b3 --- /dev/null +++ b/packages/opencode/test/session/regression.test.ts @@ -0,0 +1,369 @@ +// altimate_change start — regression tests for telemetry, compaction, and processor fixes +// @ts-nocheck +import { describe, test, expect, beforeEach } from "bun:test" +import { Telemetry } from "../../src/telemetry" +import { SessionCompaction } from "../../src/session/compaction" +import { SessionID } from "../../src/session/schema" +import { Instance } from "../../src/project/instance" +import { Log } from "../../src/util/log" +import { tmpdir } from "../fixture/fixture" + +Log.init({ print: false }) + +// --------------------------------------------------------------------------- +// Telemetry event capture +// --------------------------------------------------------------------------- +let trackedEvents: Telemetry.Event[] = [] +const originalTrack = Telemetry.track.bind(Telemetry) + +function captureTrack(event: Telemetry.Event) { + trackedEvents.push(event) + originalTrack(event) +} + +beforeEach(async () => { + trackedEvents = [] + await Telemetry.shutdown() +}) + +// --------------------------------------------------------------------------- +// 1. toolCallCount — must be derived from actual tool parts +// --------------------------------------------------------------------------- +describe("toolCallCount accumulation", () => { + test("tool parts are counted per message step", () => { + // Simulates the prompt.ts logic: count parts of type "tool" and accumulate + let toolCallCount = 0 + + // Step 1: message with 3 tool parts + const step1Parts = [ + { type: "tool", tool: "read" }, + { type: "text", text: "hello" }, + { type: "tool", tool: "edit" }, + { type: "tool", tool: "bash" }, + ] + toolCallCount += step1Parts.filter((p) => p.type === "tool").length + expect(toolCallCount).toBe(3) + + // Step 2: message with 1 tool part + const step2Parts = [ + { type: "text", text: "thinking..." }, + { type: "tool", tool: "grep" }, + ] + toolCallCount += step2Parts.filter((p) => p.type === "tool").length + expect(toolCallCount).toBe(4) + + // Step 3: message with no tool parts + const step3Parts = [{ type: "text", text: "done" }] + toolCallCount += step3Parts.filter((p) => p.type === "tool").length + expect(toolCallCount).toBe(4) + }) + + test("outcome is 'abandoned' when toolCallCount is 0 and cost is 0", () => { + const sessionTotalCost = 0 + const toolCallCount = 0 + const sessionHadError = false + const aborted = false + + const outcome = aborted + ? "aborted" + : sessionHadError + ? "error" + : sessionTotalCost === 0 && toolCallCount === 0 + ? "abandoned" + : "completed" + + expect(outcome).toBe("abandoned") + }) + + test("outcome is 'completed' when toolCallCount > 0", () => { + const sessionTotalCost = 0 + const toolCallCount = 1 + const sessionHadError = false + const aborted = false + + const outcome = aborted + ? "aborted" + : sessionHadError + ? "error" + : sessionTotalCost === 0 && toolCallCount === 0 + ? "abandoned" + : "completed" + + expect(outcome).toBe("completed") + }) +}) + +// --------------------------------------------------------------------------- +// 2. text-end must preserve original start time +// --------------------------------------------------------------------------- +describe("text-end time preservation", () => { + test("end handler preserves original start time from text-start", () => { + const startTime = Date.now() - 5000 // 5 seconds ago + + // Simulate text-start: creates time with start + const currentText: any = { + type: "text", + text: "", + time: { start: startTime }, + } + + // Simulate text-end: should preserve start, add end + const endTime = Date.now() + currentText.time = { + start: currentText.time?.start ?? endTime, + end: endTime, + } + + expect(currentText.time.start).toBe(startTime) + expect(currentText.time.end).toBe(endTime) + expect(currentText.time.end).toBeGreaterThanOrEqual(currentText.time.start) + }) + + test("end handler falls back to Date.now() when start is missing", () => { + // Edge case: time object exists but start is undefined + const currentText: any = { + type: "text", + text: "", + time: undefined, + } + + const endTime = Date.now() + currentText.time = { + start: currentText.time?.start ?? endTime, + end: endTime, + } + + expect(currentText.time.start).toBe(endTime) + expect(currentText.time.end).toBe(endTime) + }) + + test("duration is non-negative", () => { + const startTime = Date.now() - 100 + const currentText: any = { + type: "text", + text: "some output", + time: { start: startTime }, + } + + const endTime = Date.now() + currentText.time = { + start: currentText.time?.start ?? endTime, + end: endTime, + } + + const duration = currentText.time.end - currentText.time.start + expect(duration).toBeGreaterThanOrEqual(0) + }) +}) + +// --------------------------------------------------------------------------- +// 3. emergencySessionEndFired — prevent duplicate session_end telemetry +// --------------------------------------------------------------------------- +describe("emergency session end deduplication", () => { + test("session_end fires only once during normal completion", () => { + let emergencySessionEndFired = false + const events: string[] = [] + + const emergencySessionEnd = () => { + if (emergencySessionEndFired) return + emergencySessionEndFired = true + events.push("emergency_session_end") + } + + // Simulate normal completion path + if (!emergencySessionEndFired) { + emergencySessionEndFired = true + events.push("normal_session_end") + } + + // Now emergency handler should be a no-op + emergencySessionEnd() + + expect(events).toEqual(["normal_session_end"]) + expect(events).toHaveLength(1) + }) + + test("emergency handler fires if normal completion didn't happen", () => { + let emergencySessionEndFired = false + const events: string[] = [] + + const emergencySessionEnd = () => { + if (emergencySessionEndFired) return + emergencySessionEndFired = true + events.push("emergency_session_end") + } + + // Simulate crash — emergency handler fires + emergencySessionEnd() + + expect(events).toEqual(["emergency_session_end"]) + expect(events).toHaveLength(1) + }) + + test("process.off removes emergency listeners after normal completion", () => { + const listeners: Map<string, Function[]> = new Map() + + // Simulate process.once + const addListener = (event: string, fn: Function) => { + const fns = listeners.get(event) ?? [] + fns.push(fn) + listeners.set(event, fns) + } + + // Simulate process.off + const removeListener = (event: string, fn: Function) => { + const fns = listeners.get(event) ?? [] + listeners.set( + event, + fns.filter((f) => f !== fn), + ) + } + + const handler = () => {} + addListener("beforeExit", handler) + addListener("exit", handler) + + expect(listeners.get("beforeExit")).toHaveLength(1) + expect(listeners.get("exit")).toHaveLength(1) + + // Normal completion removes listeners + removeListener("beforeExit", handler) + removeListener("exit", handler) + + expect(listeners.get("beforeExit")).toHaveLength(0) + expect(listeners.get("exit")).toHaveLength(0) + }) +}) + +// --------------------------------------------------------------------------- +// 4. Compaction circuit breaker — stop after 3 attempts +// --------------------------------------------------------------------------- +describe("compaction circuit breaker", () => { + test("circuit breaker activates after 3 attempts", () => { + const compactionAttempts = new Map<string, number>() + const sessionID = "ses_test-circuit-breaker" + const results: boolean[] = [] + + for (let i = 0; i < 5; i++) { + const attempt = (compactionAttempts.get(sessionID) ?? 0) + 1 + compactionAttempts.set(sessionID, attempt) + + if (attempt > 3) { + results.push(false) // circuit breaker tripped + } else { + results.push(true) // compaction proceeds + } + } + + expect(results).toEqual([true, true, true, false, false]) + }) + + test("circuit breaker is per-session", () => { + const compactionAttempts = new Map<string, number>() + const session1 = "ses_session-1" + const session2 = "ses_session-2" + + // Session 1: 4 attempts + for (let i = 0; i < 4; i++) { + const attempt = (compactionAttempts.get(session1) ?? 0) + 1 + compactionAttempts.set(session1, attempt) + } + + // Session 2: 1 attempt + const attempt2 = (compactionAttempts.get(session2) ?? 0) + 1 + compactionAttempts.set(session2, attempt2) + + expect(compactionAttempts.get(session1)).toBe(4) + expect(compactionAttempts.get(session2)).toBe(1) + + // Session 1 is over the limit, session 2 is not + expect(compactionAttempts.get(session1)! > 3).toBe(true) + expect(compactionAttempts.get(session2)! > 3).toBe(false) + }) + + test("abort signal clears attempt counter", () => { + const compactionAttempts = new Map<string, number>() + const sessionID = "ses_abort-test" + const controller = new AbortController() + + // Record 2 attempts + compactionAttempts.set(sessionID, 2) + controller.signal.addEventListener( + "abort", + () => { + compactionAttempts.delete(sessionID) + }, + { once: true }, + ) + + expect(compactionAttempts.get(sessionID)).toBe(2) + + // Abort clears the counter + controller.abort() + expect(compactionAttempts.has(sessionID)).toBe(false) + }) +}) + +// --------------------------------------------------------------------------- +// 5. compactionCount wired to agent_outcome telemetry +// --------------------------------------------------------------------------- +describe("compaction count in telemetry", () => { + test("compactionCount increments on each compact result", () => { + let compactionCount = 0 + const results = ["continue", "compact", "continue", "compact", "compact", "stop"] + + for (const result of results) { + if (result === "compact") { + compactionCount++ + } + if (result === "stop") break + } + + expect(compactionCount).toBe(3) + }) + + test("compactionCount is included in agent_outcome event", () => { + const compactionCount = 2 + const event = { + type: "agent_outcome" as const, + timestamp: Date.now(), + session_id: "ses_test", + agent: "build", + tool_calls: 5, + generations: 3, + duration_ms: 10000, + cost: 0.05, + compactions: compactionCount, + outcome: "completed" as const, + } + + expect(event.compactions).toBe(2) + expect(event.compactions).not.toBe(0) // regression: was hardcoded to 0 + }) +}) + +// --------------------------------------------------------------------------- +// 6. Telemetry lazy import caching +// --------------------------------------------------------------------------- +describe("telemetry lazy import cache", () => { + test("cached value is returned on subsequent calls", async () => { + let importCount = 0 + let cache: any = undefined + + async function getTelemetry() { + if (cache) return cache + importCount++ + cache = { track: () => {}, init: () => {} } // simulated module + return cache + } + + const first = await getTelemetry() + const second = await getTelemetry() + const third = await getTelemetry() + + expect(importCount).toBe(1) + expect(first).toBe(second) // strict reference equality + expect(second).toBe(third) + }) +}) +// altimate_change end diff --git a/packages/opencode/test/session/retry.test.ts b/packages/opencode/test/session/retry.test.ts index 7a598fb8a3..9621a4a022 100644 --- a/packages/opencode/test/session/retry.test.ts +++ b/packages/opencode/test/session/retry.test.ts @@ -4,6 +4,9 @@ import { APICallError } from "ai" import { setTimeout as sleep } from "node:timers/promises" import { SessionRetry } from "../../src/session/retry" import { MessageV2 } from "../../src/session/message-v2" +import { ProviderID } from "../../src/provider/schema" + +const providerID = ProviderID.make("test") function apiError(headers?: Record<string, string>): MessageV2.APIError { return new MessageV2.APIError({ @@ -173,7 +176,7 @@ describe("session.message-v2.fromError", () => { .then((res) => res.text()) .catch((e) => e) - const result = MessageV2.fromError(error, { providerID: "test" }) + const result = MessageV2.fromError(error, { providerID }) expect(MessageV2.APIError.isInstance(result)).toBe(true) expect((result as MessageV2.APIError).data.isRetryable).toBe(true) @@ -198,7 +201,7 @@ describe("session.message-v2.fromError", () => { test("converts token refresh failure to ProviderAuthError", () => { const error = new Error("Anthropic OAuth token refresh failed (HTTP 401). Try re-authenticating: altimate-code auth login anthropic") - const result = MessageV2.fromError(error, { providerID: "anthropic" }) + const result = MessageV2.fromError(error, { providerID: "anthropic" as any }) expect(result.name).toBe("ProviderAuthError") expect((result as any).data.providerID).toBe("anthropic") @@ -207,7 +210,7 @@ describe("session.message-v2.fromError", () => { test("converts codex token refresh failure to ProviderAuthError", () => { const error = new Error("Codex OAuth token refresh failed (HTTP 403). Try re-authenticating: altimate-code auth login openai") - const result = MessageV2.fromError(error, { providerID: "openai" }) + const result = MessageV2.fromError(error, { providerID: "openai" as any }) expect(result.name).toBe("ProviderAuthError") expect((result as any).data.providerID).toBe("openai") @@ -215,7 +218,7 @@ describe("session.message-v2.fromError", () => { test("provides descriptive message for generic Error with no message", () => { const error = new Error() - const result = MessageV2.fromError(error, { providerID: "test" }) + const result = MessageV2.fromError(error, { providerID: "test" as any }) expect(result.name).toBe("UnknownError") // Should not be just "Error" — should include stack or context @@ -225,7 +228,7 @@ describe("session.message-v2.fromError", () => { test("provides descriptive message for TypeError with no message", () => { const error = new TypeError() - const result = MessageV2.fromError(error, { providerID: "test" }) + const result = MessageV2.fromError(error, { providerID: "test" as any }) expect(result.name).toBe("UnknownError") expect((result as any).data.message).toContain("TypeError") @@ -241,7 +244,7 @@ describe("session.message-v2.fromError", () => { responseBody: '{"error":"boom"}', isRetryable: false, }) - const result = MessageV2.fromError(error, { providerID: "openai" }) as MessageV2.APIError + const result = MessageV2.fromError(error, { providerID: ProviderID.make("openai") }) as MessageV2.APIError expect(result.data.isRetryable).toBe(true) }) }) diff --git a/packages/opencode/test/session/revert-compact.test.ts b/packages/opencode/test/session/revert-compact.test.ts index de2b14573f..fb37a3a8dc 100644 --- a/packages/opencode/test/session/revert-compact.test.ts +++ b/packages/opencode/test/session/revert-compact.test.ts @@ -1,12 +1,13 @@ import { describe, expect, test, beforeEach, afterEach } from "bun:test" import path from "path" import { Session } from "../../src/session" +import { ModelID, ProviderID } from "../../src/provider/schema" import { SessionRevert } from "../../src/session/revert" import { SessionCompaction } from "../../src/session/compaction" import { MessageV2 } from "../../src/session/message-v2" import { Log } from "../../src/util/log" import { Instance } from "../../src/project/instance" -import { Identifier } from "../../src/id/id" +import { MessageID, PartID } from "../../src/session/schema" import { tmpdir } from "../fixture/fixture" const projectRoot = path.join(__dirname, "../..") @@ -24,13 +25,13 @@ describe("revert + compact workflow", () => { // Create a user message const userMsg1 = await Session.updateMessage({ - id: Identifier.ascending("message"), + id: MessageID.ascending(), role: "user", sessionID, agent: "default", model: { - providerID: "openai", - modelID: "gpt-4", + providerID: ProviderID.make("openai"), + modelID: ModelID.make("gpt-4"), }, time: { created: Date.now(), @@ -39,7 +40,7 @@ describe("revert + compact workflow", () => { // Add a text part to the user message await Session.updatePart({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: userMsg1.id, sessionID, type: "text", @@ -48,7 +49,7 @@ describe("revert + compact workflow", () => { // Create an assistant response message const assistantMsg1: MessageV2.Assistant = { - id: Identifier.ascending("message"), + id: MessageID.ascending(), role: "assistant", sessionID, mode: "default", @@ -64,8 +65,8 @@ describe("revert + compact workflow", () => { reasoning: 0, cache: { read: 0, write: 0 }, }, - modelID: "gpt-4", - providerID: "openai", + modelID: ModelID.make("gpt-4"), + providerID: ProviderID.make("openai"), parentID: userMsg1.id, time: { created: Date.now(), @@ -76,7 +77,7 @@ describe("revert + compact workflow", () => { // Add a text part to the assistant message await Session.updatePart({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: assistantMsg1.id, sessionID, type: "text", @@ -85,13 +86,13 @@ describe("revert + compact workflow", () => { // Create another user message const userMsg2 = await Session.updateMessage({ - id: Identifier.ascending("message"), + id: MessageID.ascending(), role: "user", sessionID, agent: "default", model: { - providerID: "openai", - modelID: "gpt-4", + providerID: ProviderID.make("openai"), + modelID: ModelID.make("gpt-4"), }, time: { created: Date.now(), @@ -99,7 +100,7 @@ describe("revert + compact workflow", () => { }) await Session.updatePart({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: userMsg2.id, sessionID, type: "text", @@ -108,7 +109,7 @@ describe("revert + compact workflow", () => { // Create another assistant response const assistantMsg2: MessageV2.Assistant = { - id: Identifier.ascending("message"), + id: MessageID.ascending(), role: "assistant", sessionID, mode: "default", @@ -124,8 +125,8 @@ describe("revert + compact workflow", () => { reasoning: 0, cache: { read: 0, write: 0 }, }, - modelID: "gpt-4", - providerID: "openai", + modelID: ModelID.make("gpt-4"), + providerID: ProviderID.make("openai"), parentID: userMsg2.id, time: { created: Date.now(), @@ -135,7 +136,7 @@ describe("revert + compact workflow", () => { await Session.updateMessage(assistantMsg2) await Session.updatePart({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: assistantMsg2.id, sessionID, type: "text", @@ -200,13 +201,13 @@ describe("revert + compact workflow", () => { // Create initial messages const userMsg = await Session.updateMessage({ - id: Identifier.ascending("message"), + id: MessageID.ascending(), role: "user", sessionID, agent: "default", model: { - providerID: "openai", - modelID: "gpt-4", + providerID: ProviderID.make("openai"), + modelID: ModelID.make("gpt-4"), }, time: { created: Date.now(), @@ -214,7 +215,7 @@ describe("revert + compact workflow", () => { }) await Session.updatePart({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: userMsg.id, sessionID, type: "text", @@ -222,7 +223,7 @@ describe("revert + compact workflow", () => { }) const assistantMsg: MessageV2.Assistant = { - id: Identifier.ascending("message"), + id: MessageID.ascending(), role: "assistant", sessionID, mode: "default", @@ -238,8 +239,8 @@ describe("revert + compact workflow", () => { reasoning: 0, cache: { read: 0, write: 0 }, }, - modelID: "gpt-4", - providerID: "openai", + modelID: ModelID.make("gpt-4"), + providerID: ProviderID.make("openai"), parentID: userMsg.id, time: { created: Date.now(), @@ -249,7 +250,7 @@ describe("revert + compact workflow", () => { await Session.updateMessage(assistantMsg) await Session.updatePart({ - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID: assistantMsg.id, sessionID, type: "text", diff --git a/packages/opencode/test/session/session.test.ts b/packages/opencode/test/session/session.test.ts index aa9ca05d04..2332586223 100644 --- a/packages/opencode/test/session/session.test.ts +++ b/packages/opencode/test/session/session.test.ts @@ -5,7 +5,7 @@ import { Bus } from "../../src/bus" import { Log } from "../../src/util/log" import { Instance } from "../../src/project/instance" import { MessageV2 } from "../../src/session/message-v2" -import { Identifier } from "../../src/id/id" +import { MessageID, PartID } from "../../src/session/schema" const projectRoot = path.join(__dirname, "../..") Log.init({ print: false }) @@ -81,7 +81,7 @@ describe("step-finish token propagation via Bus event", () => { fn: async () => { const session = await Session.create({}) - const messageID = Identifier.ascending("message") + const messageID = MessageID.ascending() await Session.updateMessage({ id: messageID, sessionID: session.id, @@ -107,7 +107,7 @@ describe("step-finish token propagation via Bus event", () => { } const partInput = { - id: Identifier.ascending("part"), + id: PartID.ascending(), messageID, sessionID: session.id, type: "step-finish" as const, diff --git a/packages/opencode/test/session/structured-output.test.ts b/packages/opencode/test/session/structured-output.test.ts index 2be4257dc7..f6131b149b 100644 --- a/packages/opencode/test/session/structured-output.test.ts +++ b/packages/opencode/test/session/structured-output.test.ts @@ -1,6 +1,7 @@ import { describe, expect, test } from "bun:test" import { MessageV2 } from "../../src/session/message-v2" import { SessionPrompt } from "../../src/session/prompt" +import { SessionID, MessageID } from "../../src/session/schema" describe("structured-output.OutputFormat", () => { test("parses text format", () => { @@ -95,8 +96,8 @@ describe("structured-output.StructuredOutputError", () => { describe("structured-output.UserMessage", () => { test("user message accepts outputFormat", () => { const result = MessageV2.User.safeParse({ - id: "test-id", - sessionID: "test-session", + id: MessageID.ascending(), + sessionID: SessionID.descending(), role: "user", time: { created: Date.now() }, agent: "default", @@ -111,8 +112,8 @@ describe("structured-output.UserMessage", () => { test("user message works without outputFormat (optional)", () => { const result = MessageV2.User.safeParse({ - id: "test-id", - sessionID: "test-session", + id: MessageID.ascending(), + sessionID: SessionID.descending(), role: "user", time: { created: Date.now() }, agent: "default", @@ -124,10 +125,10 @@ describe("structured-output.UserMessage", () => { describe("structured-output.AssistantMessage", () => { const baseAssistantMessage = { - id: "test-id", - sessionID: "test-session", + id: MessageID.ascending(), + sessionID: SessionID.descending(), role: "assistant" as const, - parentID: "parent-id", + parentID: MessageID.ascending(), modelID: "claude-3", providerID: "anthropic", mode: "default", diff --git a/packages/opencode/test/share/share-next.test.ts b/packages/opencode/test/share/share-next.test.ts new file mode 100644 index 0000000000..5be5d02450 --- /dev/null +++ b/packages/opencode/test/share/share-next.test.ts @@ -0,0 +1,76 @@ +import { test, expect, mock } from "bun:test" +import { ShareNext } from "../../src/share/share-next" +import { AccessToken, Account, AccountID, OrgID } from "../../src/account" +import { Config } from "../../src/config/config" + +test("ShareNext.request uses legacy share API without active org account", async () => { + const originalActive = Account.active + const originalConfigGet = Config.get + + Account.active = mock(() => undefined) + Config.get = mock(async () => ({ enterprise: { url: "https://legacy-share.example.com" } })) + + try { + const req = await ShareNext.request() + + expect(req.api.create).toBe("/api/share") + expect(req.api.sync("shr_123")).toBe("/api/share/shr_123/sync") + expect(req.api.remove("shr_123")).toBe("/api/share/shr_123") + expect(req.api.data("shr_123")).toBe("/api/share/shr_123/data") + expect(req.baseUrl).toBe("https://legacy-share.example.com") + expect(req.headers).toEqual({}) + } finally { + Account.active = originalActive + Config.get = originalConfigGet + } +}) + +test("ShareNext.request uses org share API with auth headers when account is active", async () => { + const originalActive = Account.active + const originalToken = Account.token + + Account.active = mock(() => ({ + id: AccountID.make("account-1"), + email: "user@example.com", + url: "https://control.example.com", + active_org_id: OrgID.make("org-1"), + })) + Account.token = mock(async () => AccessToken.make("st_test_token")) + + try { + const req = await ShareNext.request() + + expect(req.api.create).toBe("/api/shares") + expect(req.api.sync("shr_123")).toBe("/api/shares/shr_123/sync") + expect(req.api.remove("shr_123")).toBe("/api/shares/shr_123") + expect(req.api.data("shr_123")).toBe("/api/shares/shr_123/data") + expect(req.baseUrl).toBe("https://control.example.com") + expect(req.headers).toEqual({ + authorization: "Bearer st_test_token", + "x-org-id": "org-1", + }) + } finally { + Account.active = originalActive + Account.token = originalToken + } +}) + +test("ShareNext.request fails when org account has no token", async () => { + const originalActive = Account.active + const originalToken = Account.token + + Account.active = mock(() => ({ + id: AccountID.make("account-1"), + email: "user@example.com", + url: "https://control.example.com", + active_org_id: OrgID.make("org-1"), + })) + Account.token = mock(async () => undefined) + + try { + await expect(ShareNext.request()).rejects.toThrow("No active account token available for sharing") + } finally { + Account.active = originalActive + Account.token = originalToken + } +}) diff --git a/packages/opencode/test/skill/skill.test.ts b/packages/opencode/test/skill/skill.test.ts index 2264723a09..06ee4a4a09 100644 --- a/packages/opencode/test/skill/skill.test.ts +++ b/packages/opencode/test/skill/skill.test.ts @@ -362,7 +362,7 @@ name: opencode-skill description: A skill in the .opencode/skill directory. --- -# OpenCode Skill +# Altimate Code Skill `, ) await Bun.write( @@ -372,7 +372,7 @@ name: opencode-skill description: A skill in the .opencode/skills directory. --- -# OpenCode Skill +# Altimate Code Skill `, ) }, diff --git a/packages/opencode/test/storage/db.test.ts b/packages/opencode/test/storage/db.test.ts new file mode 100644 index 0000000000..601289e58e --- /dev/null +++ b/packages/opencode/test/storage/db.test.ts @@ -0,0 +1,14 @@ +import { describe, expect, test } from "bun:test" +import path from "path" +import { Installation } from "../../src/installation" +import { Database } from "../../src/storage/db" + +describe("Database.Path", () => { + test("returns database path for the current channel", () => { + const file = path.basename(Database.Path) + const expected = ["latest", "beta"].includes(Installation.CHANNEL) + ? "opencode.db" + : `opencode-${Installation.CHANNEL.replace(/[^a-zA-Z0-9._-]/g, "-")}.db` + expect(file).toBe(expected) + }) +}) diff --git a/packages/opencode/test/storage/json-migration.test.ts b/packages/opencode/test/storage/json-migration.test.ts index b70c9e1ebe..a714f11473 100644 --- a/packages/opencode/test/storage/json-migration.test.ts +++ b/packages/opencode/test/storage/json-migration.test.ts @@ -8,8 +8,10 @@ import { readFileSync, readdirSync } from "fs" import { JsonMigration } from "../../src/storage/json-migration" import { Global } from "../../src/global" import { ProjectTable } from "../../src/project/project.sql" +import { ProjectID } from "../../src/project/schema" import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../../src/session/session.sql" import { SessionShareTable } from "../../src/share/share.sql" +import { SessionID, MessageID, PartID } from "../../src/session/schema" // Test fixtures const fixtures = { @@ -84,6 +86,7 @@ function createTestDb() { .map((entry) => ({ sql: readFileSync(path.join(dir, entry.name, "migration.sql"), "utf-8"), timestamp: Number(entry.name.split("_")[0]), + name: entry.name, })) .sort((a, b) => a.timestamp - b.timestamp) migrate(drizzle({ client: sqlite }), migrations) @@ -122,7 +125,7 @@ describe("JSON to SQLite migration", () => { const db = drizzle({ client: sqlite }) const projects = db.select().from(ProjectTable).all() expect(projects.length).toBe(1) - expect(projects[0].id).toBe("proj_test123abc") + expect(projects[0].id).toBe(ProjectID.make("proj_test123abc")) expect(projects[0].worktree).toBe("/test/path") expect(projects[0].name).toBe("Test Project") expect(projects[0].sandboxes).toEqual(["/test/sandbox"]) @@ -147,7 +150,7 @@ describe("JSON to SQLite migration", () => { const db = drizzle({ client: sqlite }) const projects = db.select().from(ProjectTable).all() expect(projects.length).toBe(1) - expect(projects[0].id).toBe("proj_filename") // Uses filename, not JSON id + expect(projects[0].id).toBe(ProjectID.make("proj_filename")) // Uses filename, not JSON id }) test("migrates project with commands", async () => { @@ -168,7 +171,7 @@ describe("JSON to SQLite migration", () => { const db = drizzle({ client: sqlite }) const projects = db.select().from(ProjectTable).all() expect(projects.length).toBe(1) - expect(projects[0].id).toBe("proj_with_commands") + expect(projects[0].id).toBe(ProjectID.make("proj_with_commands")) expect(projects[0].commands).toEqual({ start: "npm run dev" }) }) @@ -189,7 +192,7 @@ describe("JSON to SQLite migration", () => { const db = drizzle({ client: sqlite }) const projects = db.select().from(ProjectTable).all() expect(projects.length).toBe(1) - expect(projects[0].id).toBe("proj_no_commands") + expect(projects[0].id).toBe(ProjectID.make("proj_no_commands")) expect(projects[0].commands).toBeNull() }) @@ -218,8 +221,8 @@ describe("JSON to SQLite migration", () => { const db = drizzle({ client: sqlite }) const sessions = db.select().from(SessionTable).all() expect(sessions.length).toBe(1) - expect(sessions[0].id).toBe("ses_test456def") - expect(sessions[0].project_id).toBe("proj_test123abc") + expect(sessions[0].id).toBe(SessionID.make("ses_test456def")) + expect(sessions[0].project_id).toBe(ProjectID.make("proj_test123abc")) expect(sessions[0].slug).toBe("test-session") expect(sessions[0].title).toBe("Test Session Title") expect(sessions[0].summary_additions).toBe(10) @@ -252,11 +255,11 @@ describe("JSON to SQLite migration", () => { const db = drizzle({ client: sqlite }) const messages = db.select().from(MessageTable).all() expect(messages.length).toBe(1) - expect(messages[0].id).toBe("msg_test789ghi") + expect(messages[0].id).toBe(MessageID.make("msg_test789ghi")) const parts = db.select().from(PartTable).all() expect(parts.length).toBe(1) - expect(parts[0].id).toBe("prt_testabc123") + expect(parts[0].id).toBe(PartID.make("prt_testabc123")) }) test("migrates legacy parts without ids in body", async () => { @@ -292,16 +295,16 @@ describe("JSON to SQLite migration", () => { const db = drizzle({ client: sqlite }) const messages = db.select().from(MessageTable).all() expect(messages.length).toBe(1) - expect(messages[0].id).toBe("msg_test789ghi") - expect(messages[0].session_id).toBe("ses_test456def") + expect(messages[0].id).toBe(MessageID.make("msg_test789ghi")) + expect(messages[0].session_id).toBe(SessionID.make("ses_test456def")) expect(messages[0].data).not.toHaveProperty("id") expect(messages[0].data).not.toHaveProperty("sessionID") const parts = db.select().from(PartTable).all() expect(parts.length).toBe(1) - expect(parts[0].id).toBe("prt_testabc123") - expect(parts[0].message_id).toBe("msg_test789ghi") - expect(parts[0].session_id).toBe("ses_test456def") + expect(parts[0].id).toBe(PartID.make("prt_testabc123")) + expect(parts[0].message_id).toBe(MessageID.make("msg_test789ghi")) + expect(parts[0].session_id).toBe(SessionID.make("ses_test456def")) expect(parts[0].data).not.toHaveProperty("id") expect(parts[0].data).not.toHaveProperty("messageID") expect(parts[0].data).not.toHaveProperty("sessionID") @@ -333,8 +336,8 @@ describe("JSON to SQLite migration", () => { const db = drizzle({ client: sqlite }) const messages = db.select().from(MessageTable).all() expect(messages.length).toBe(1) - expect(messages[0].id).toBe("msg_from_filename") // Uses filename, not JSON id - expect(messages[0].session_id).toBe("ses_test456def") + expect(messages[0].id).toBe(MessageID.make("msg_from_filename")) // Uses filename, not JSON id + expect(messages[0].session_id).toBe(SessionID.make("ses_test456def")) }) test("uses paths for part id and messageID when JSON has different values", async () => { @@ -371,8 +374,8 @@ describe("JSON to SQLite migration", () => { const db = drizzle({ client: sqlite }) const parts = db.select().from(PartTable).all() expect(parts.length).toBe(1) - expect(parts[0].id).toBe("prt_from_filename") // Uses filename, not JSON id - expect(parts[0].message_id).toBe("msg_realmsgid") // Uses parent dir, not JSON messageID + expect(parts[0].id).toBe(PartID.make("prt_from_filename")) // Uses filename, not JSON id + expect(parts[0].message_id).toBe(MessageID.make("msg_realmsgid")) // Uses parent dir, not JSON messageID }) test("skips orphaned sessions (no parent project)", async () => { @@ -424,8 +427,8 @@ describe("JSON to SQLite migration", () => { const db = drizzle({ client: sqlite }) const sessions = db.select().from(SessionTable).all() expect(sessions.length).toBe(1) - expect(sessions[0].id).toBe("ses_migrated") - expect(sessions[0].project_id).toBe(gitBasedProjectID) // Uses directory, not stale JSON + expect(sessions[0].id).toBe(SessionID.make("ses_migrated")) + expect(sessions[0].project_id).toBe(ProjectID.make(gitBasedProjectID)) // Uses directory, not stale JSON }) test("uses filename for session id when JSON has different value", async () => { @@ -456,8 +459,8 @@ describe("JSON to SQLite migration", () => { const db = drizzle({ client: sqlite }) const sessions = db.select().from(SessionTable).all() expect(sessions.length).toBe(1) - expect(sessions[0].id).toBe("ses_from_filename") // Uses filename, not JSON id - expect(sessions[0].project_id).toBe("proj_test123abc") + expect(sessions[0].id).toBe(SessionID.make("ses_from_filename")) // Uses filename, not JSON id + expect(sessions[0].project_id).toBe(ProjectID.make("proj_test123abc")) }) test("is idempotent (running twice doesn't duplicate)", async () => { @@ -642,7 +645,7 @@ describe("JSON to SQLite migration", () => { const db = drizzle({ client: sqlite }) const projects = db.select().from(ProjectTable).all() expect(projects.length).toBe(1) - expect(projects[0].id).toBe("proj_test123abc") + expect(projects[0].id).toBe(ProjectID.make("proj_test123abc")) }) test("skips invalid todo entries while preserving source positions", async () => { diff --git a/packages/opencode/test/tool/apply_patch.test.ts b/packages/opencode/test/tool/apply_patch.test.ts index f81723fee0..4e276517f1 100644 --- a/packages/opencode/test/tool/apply_patch.test.ts +++ b/packages/opencode/test/tool/apply_patch.test.ts @@ -4,10 +4,11 @@ import * as fs from "fs/promises" import { ApplyPatchTool } from "../../src/tool/apply_patch" import { Instance } from "../../src/project/instance" import { tmpdir } from "../fixture/fixture" +import { SessionID, MessageID } from "../../src/session/schema" const baseCtx = { - sessionID: "test", - messageID: "", + sessionID: SessionID.make("ses_test"), + messageID: MessageID.make(""), callID: "", agent: "build", abort: AbortSignal.any([]), diff --git a/packages/opencode/test/tool/bash.test.ts b/packages/opencode/test/tool/bash.test.ts index ac93016927..f947398b37 100644 --- a/packages/opencode/test/tool/bash.test.ts +++ b/packages/opencode/test/tool/bash.test.ts @@ -7,10 +7,11 @@ import { Filesystem } from "../../src/util/filesystem" import { tmpdir } from "../fixture/fixture" import type { PermissionNext } from "../../src/permission/next" import { Truncate } from "../../src/tool/truncation" +import { SessionID, MessageID } from "../../src/session/schema" const ctx = { - sessionID: "test", - messageID: "", + sessionID: SessionID.make("ses_test"), + messageID: MessageID.make(""), callID: "", agent: "build", abort: AbortSignal.any([]), diff --git a/packages/opencode/test/tool/edit.test.ts b/packages/opencode/test/tool/edit.test.ts index c3cf0404b9..b0ee95ff6f 100644 --- a/packages/opencode/test/tool/edit.test.ts +++ b/packages/opencode/test/tool/edit.test.ts @@ -5,10 +5,11 @@ import { EditTool } from "../../src/tool/edit" import { Instance } from "../../src/project/instance" import { tmpdir } from "../fixture/fixture" import { FileTime } from "../../src/file/time" +import { SessionID, MessageID } from "../../src/session/schema" const ctx = { - sessionID: "test-edit-session", - messageID: "", + sessionID: SessionID.make("ses_test-edit-session"), + messageID: MessageID.make(""), callID: "", agent: "build", abort: AbortSignal.any([]), @@ -451,6 +452,189 @@ describe("tool.edit", () => { }) }) + describe("line endings", () => { + const old = "alpha\nbeta\ngamma" + const next = "alpha\nbeta-updated\ngamma" + const alt = "alpha\nbeta\nomega" + + const normalize = (text: string, ending: "\n" | "\r\n") => { + const normalized = text.replaceAll("\r\n", "\n") + if (ending === "\n") return normalized + return normalized.replaceAll("\n", "\r\n") + } + + const count = (content: string) => { + const crlf = content.match(/\r\n/g)?.length ?? 0 + const lf = content.match(/\n/g)?.length ?? 0 + return { + crlf, + lf: lf - crlf, + } + } + + const expectLf = (content: string) => { + const counts = count(content) + expect(counts.crlf).toBe(0) + expect(counts.lf).toBeGreaterThan(0) + } + + const expectCrlf = (content: string) => { + const counts = count(content) + expect(counts.lf).toBe(0) + expect(counts.crlf).toBeGreaterThan(0) + } + + type Input = { + content: string + oldString: string + newString: string + replaceAll?: boolean + } + + const apply = async (input: Input) => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write(path.join(dir, "test.txt"), input.content) + }, + }) + + return await Instance.provide({ + directory: tmp.path, + fn: async () => { + const edit = await EditTool.init() + const filePath = path.join(tmp.path, "test.txt") + FileTime.read(ctx.sessionID, filePath) + await edit.execute( + { + filePath, + oldString: input.oldString, + newString: input.newString, + replaceAll: input.replaceAll, + }, + ctx, + ) + return await Bun.file(filePath).text() + }, + }) + } + + test("preserves LF with LF multi-line strings", async () => { + const content = normalize(old + "\n", "\n") + const output = await apply({ + content, + oldString: normalize(old, "\n"), + newString: normalize(next, "\n"), + }) + expect(output).toBe(normalize(next + "\n", "\n")) + expectLf(output) + }) + + test("preserves CRLF with CRLF multi-line strings", async () => { + const content = normalize(old + "\n", "\r\n") + const output = await apply({ + content, + oldString: normalize(old, "\r\n"), + newString: normalize(next, "\r\n"), + }) + expect(output).toBe(normalize(next + "\n", "\r\n")) + expectCrlf(output) + }) + + test("preserves LF when old/new use CRLF", async () => { + const content = normalize(old + "\n", "\n") + const output = await apply({ + content, + oldString: normalize(old, "\r\n"), + newString: normalize(next, "\r\n"), + }) + expect(output).toBe(normalize(next + "\n", "\n")) + expectLf(output) + }) + + test("preserves CRLF when old/new use LF", async () => { + const content = normalize(old + "\n", "\r\n") + const output = await apply({ + content, + oldString: normalize(old, "\n"), + newString: normalize(next, "\n"), + }) + expect(output).toBe(normalize(next + "\n", "\r\n")) + expectCrlf(output) + }) + + test("preserves LF when newString uses CRLF", async () => { + const content = normalize(old + "\n", "\n") + const output = await apply({ + content, + oldString: normalize(old, "\n"), + newString: normalize(next, "\r\n"), + }) + expect(output).toBe(normalize(next + "\n", "\n")) + expectLf(output) + }) + + test("preserves CRLF when newString uses LF", async () => { + const content = normalize(old + "\n", "\r\n") + const output = await apply({ + content, + oldString: normalize(old, "\r\n"), + newString: normalize(next, "\n"), + }) + expect(output).toBe(normalize(next + "\n", "\r\n")) + expectCrlf(output) + }) + + test("preserves LF with mixed old/new line endings", async () => { + const content = normalize(old + "\n", "\n") + const output = await apply({ + content, + oldString: "alpha\nbeta\r\ngamma", + newString: "alpha\r\nbeta\nomega", + }) + expect(output).toBe(normalize(alt + "\n", "\n")) + expectLf(output) + }) + + test("preserves CRLF with mixed old/new line endings", async () => { + const content = normalize(old + "\n", "\r\n") + const output = await apply({ + content, + oldString: "alpha\r\nbeta\ngamma", + newString: "alpha\nbeta\r\nomega", + }) + expect(output).toBe(normalize(alt + "\n", "\r\n")) + expectCrlf(output) + }) + + test("replaceAll preserves LF for multi-line blocks", async () => { + const blockOld = "alpha\nbeta" + const blockNew = "alpha\nbeta-updated" + const content = normalize(blockOld + "\n" + blockOld + "\n", "\n") + const output = await apply({ + content, + oldString: normalize(blockOld, "\n"), + newString: normalize(blockNew, "\n"), + replaceAll: true, + }) + expect(output).toBe(normalize(blockNew + "\n" + blockNew + "\n", "\n")) + expectLf(output) + }) + + test("replaceAll preserves CRLF for multi-line blocks", async () => { + const blockOld = "alpha\nbeta" + const blockNew = "alpha\nbeta-updated" + const content = normalize(blockOld + "\n" + blockOld + "\n", "\r\n") + const output = await apply({ + content, + oldString: normalize(blockOld, "\r\n"), + newString: normalize(blockNew, "\r\n"), + replaceAll: true, + }) + expect(output).toBe(normalize(blockNew + "\n" + blockNew + "\n", "\r\n")) + expectCrlf(output) + }) + }) + describe("concurrent editing", () => { test("serializes concurrent edits to same file", async () => { await using tmp = await tmpdir() diff --git a/packages/opencode/test/tool/external-directory.test.ts b/packages/opencode/test/tool/external-directory.test.ts index a75f767b3b..58e53e5839 100644 --- a/packages/opencode/test/tool/external-directory.test.ts +++ b/packages/opencode/test/tool/external-directory.test.ts @@ -4,10 +4,11 @@ import type { Tool } from "../../src/tool/tool" import { Instance } from "../../src/project/instance" import { assertExternalDirectory } from "../../src/tool/external-directory" import type { PermissionNext } from "../../src/permission/next" +import { SessionID, MessageID } from "../../src/session/schema" const baseCtx: Omit<Tool.Context, "ask"> = { - sessionID: "test", - messageID: "", + sessionID: SessionID.make("ses_test"), + messageID: MessageID.make(""), callID: "", agent: "build", abort: AbortSignal.any([]), diff --git a/packages/opencode/test/tool/feedback-submit.test.ts b/packages/opencode/test/tool/feedback-submit.test.ts index d2b2e7de14..e4a7d4654d 100644 --- a/packages/opencode/test/tool/feedback-submit.test.ts +++ b/packages/opencode/test/tool/feedback-submit.test.ts @@ -92,7 +92,7 @@ const ctx = { messages: [], metadata: () => {}, ask: async () => {}, -} +} as any // --------------------------------------------------------------------------- // Tests diff --git a/packages/opencode/test/tool/fixtures/models-api.json b/packages/opencode/test/tool/fixtures/models-api.json index 391e783699..98c1bcdd65 100644 --- a/packages/opencode/test/tool/fixtures/models-api.json +++ b/packages/opencode/test/tool/fixtures/models-api.json @@ -20977,9 +20977,9 @@ "id": "opencode", "env": ["OPENCODE_API_KEY"], "npm": "@ai-sdk/openai-compatible", - "api": "https://opencode.ai/zen/v1", - "name": "OpenCode Zen", - "doc": "https://opencode.ai/docs/zen", + "api": "https://altimate.ai/zen/v1", + "name": "Altimate Code Zen", + "doc": "https://altimate.ai/docs/zen", "models": { "glm-4.7": { "id": "glm-4.7", diff --git a/packages/opencode/test/tool/grep.test.ts b/packages/opencode/test/tool/grep.test.ts index e774580df6..e03b1752ec 100644 --- a/packages/opencode/test/tool/grep.test.ts +++ b/packages/opencode/test/tool/grep.test.ts @@ -3,10 +3,11 @@ import path from "path" import { GrepTool } from "../../src/tool/grep" import { Instance } from "../../src/project/instance" import { tmpdir } from "../fixture/fixture" +import { SessionID, MessageID } from "../../src/session/schema" const ctx = { - sessionID: "test", - messageID: "", + sessionID: SessionID.make("ses_test"), + messageID: MessageID.make(""), callID: "", agent: "build", abort: AbortSignal.any([]), diff --git a/packages/opencode/test/tool/question.test.ts b/packages/opencode/test/tool/question.test.ts index 4a436186db..9157aaa9a4 100644 --- a/packages/opencode/test/tool/question.test.ts +++ b/packages/opencode/test/tool/question.test.ts @@ -2,10 +2,11 @@ import { describe, expect, test, spyOn, beforeEach, afterEach } from "bun:test" import { z } from "zod" import { QuestionTool } from "../../src/tool/question" import * as QuestionModule from "../../src/question" +import { SessionID, MessageID } from "../../src/session/schema" const ctx = { - sessionID: "test-session", - messageID: "test-message", + sessionID: SessionID.make("ses_test-session"), + messageID: MessageID.make("test-message"), callID: "test-call", agent: "test-agent", abort: AbortSignal.any([]), diff --git a/packages/opencode/test/tool/read.test.ts b/packages/opencode/test/tool/read.test.ts index 8deb725d73..af5174e49c 100644 --- a/packages/opencode/test/tool/read.test.ts +++ b/packages/opencode/test/tool/read.test.ts @@ -6,12 +6,13 @@ import { Filesystem } from "../../src/util/filesystem" import { tmpdir } from "../fixture/fixture" import { PermissionNext } from "../../src/permission/next" import { Agent } from "../../src/agent/agent" +import { SessionID, MessageID } from "../../src/session/schema" const FIXTURES_DIR = path.join(import.meta.dir, "fixtures") const ctx = { - sessionID: "test", - messageID: "", + sessionID: SessionID.make("ses_test"), + messageID: MessageID.make(""), callID: "", agent: "build", abort: AbortSignal.any([]), diff --git a/packages/opencode/test/tool/skill.test.ts b/packages/opencode/test/tool/skill.test.ts index d5057ba9e7..5bcdb6c2b9 100644 --- a/packages/opencode/test/tool/skill.test.ts +++ b/packages/opencode/test/tool/skill.test.ts @@ -6,10 +6,11 @@ import type { Tool } from "../../src/tool/tool" import { Instance } from "../../src/project/instance" import { SkillTool } from "../../src/tool/skill" import { tmpdir } from "../fixture/fixture" +import { SessionID, MessageID } from "../../src/session/schema" const baseCtx: Omit<Tool.Context, "ask"> = { - sessionID: "test", - messageID: "", + sessionID: SessionID.make("ses_test"), + messageID: MessageID.make(""), callID: "", agent: "build", abort: AbortSignal.any([]), @@ -45,7 +46,7 @@ description: Skill for tool tests. fn: async () => { const tool = await SkillTool.init() const skillPath = path.join(tmp.path, ".opencode", "skill", "tool-skill", "SKILL.md") - expect(tool.description).toContain(`<location>${pathToFileURL(skillPath).href}</location>`) + expect(tool.description).toContain(`**tool-skill**: Skill for tool tests.`) }, }) } finally { diff --git a/packages/opencode/test/tool/webfetch.test.ts b/packages/opencode/test/tool/webfetch.test.ts index 0214700fed..088f3dd16d 100644 --- a/packages/opencode/test/tool/webfetch.test.ts +++ b/packages/opencode/test/tool/webfetch.test.ts @@ -2,12 +2,13 @@ import { describe, expect, test } from "bun:test" import path from "path" import { Instance } from "../../src/project/instance" import { WebFetchTool } from "../../src/tool/webfetch" +import { SessionID, MessageID } from "../../src/session/schema" const projectRoot = path.join(import.meta.dir, "../..") const ctx = { - sessionID: "test", - messageID: "message", + sessionID: SessionID.make("ses_test"), + messageID: MessageID.make("message"), callID: "", agent: "build", abort: AbortSignal.any([]), diff --git a/packages/opencode/test/tool/write.test.ts b/packages/opencode/test/tool/write.test.ts index 695d48ccbb..b93ab4e853 100644 --- a/packages/opencode/test/tool/write.test.ts +++ b/packages/opencode/test/tool/write.test.ts @@ -4,10 +4,11 @@ import fs from "fs/promises" import { WriteTool } from "../../src/tool/write" import { Instance } from "../../src/project/instance" import { tmpdir } from "../fixture/fixture" +import { SessionID, MessageID } from "../../src/session/schema" const ctx = { - sessionID: "test-write-session", - messageID: "", + sessionID: SessionID.make("ses_test-write-session"), + messageID: MessageID.make(""), callID: "", agent: "build", abort: AbortSignal.any([]), diff --git a/packages/opencode/test/util/data-url.test.ts b/packages/opencode/test/util/data-url.test.ts new file mode 100644 index 0000000000..b8148285c7 --- /dev/null +++ b/packages/opencode/test/util/data-url.test.ts @@ -0,0 +1,14 @@ +import { describe, expect, test } from "bun:test" +import { decodeDataUrl } from "../../src/util/data-url" + +describe("decodeDataUrl", () => { + test("decodes base64 data URLs", () => { + const body = '{\n "ok": true\n}\n' + const url = `data:text/plain;base64,${Buffer.from(body).toString("base64")}` + expect(decodeDataUrl(url)).toBe(body) + }) + + test("decodes plain data URLs", () => { + expect(decodeDataUrl("data:text/plain,hello%20world")).toBe("hello world") + }) +}) diff --git a/packages/opencode/test/util/effect-zod.test.ts b/packages/opencode/test/util/effect-zod.test.ts new file mode 100644 index 0000000000..4004ca2d23 --- /dev/null +++ b/packages/opencode/test/util/effect-zod.test.ts @@ -0,0 +1,61 @@ +import { describe, expect, test } from "bun:test" +import { Schema } from "effect" + +import { zod } from "../../src/util/effect-zod" + +describe("util.effect-zod", () => { + test("converts class schemas for route dto shapes", () => { + class Method extends Schema.Class<Method>("ProviderAuthMethod")({ + type: Schema.Union([Schema.Literal("oauth"), Schema.Literal("api")]), + label: Schema.String, + }) {} + + const out = zod(Method) + + expect(out.meta()?.ref).toBe("ProviderAuthMethod") + expect( + out.parse({ + type: "oauth", + label: "OAuth", + }), + ).toEqual({ + type: "oauth", + label: "OAuth", + }) + }) + + test("converts structs with optional fields, arrays, and records", () => { + const out = zod( + Schema.Struct({ + foo: Schema.optional(Schema.String), + bar: Schema.Array(Schema.Number), + baz: Schema.Record(Schema.String, Schema.Boolean), + }), + ) + + expect( + out.parse({ + bar: [1, 2], + baz: { ok: true }, + }), + ).toEqual({ + bar: [1, 2], + baz: { ok: true }, + }) + expect( + out.parse({ + foo: "hi", + bar: [1], + baz: { ok: false }, + }), + ).toEqual({ + foo: "hi", + bar: [1], + baz: { ok: false }, + }) + }) + + test("throws for unsupported tuple schemas", () => { + expect(() => zod(Schema.Tuple([Schema.String, Schema.Number]))).toThrow("unsupported effect schema") + }) +}) diff --git a/packages/opencode/test/util/filesystem.test.ts b/packages/opencode/test/util/filesystem.test.ts index a6255db88f..aea0b1db87 100644 --- a/packages/opencode/test/util/filesystem.test.ts +++ b/packages/opencode/test/util/filesystem.test.ts @@ -440,4 +440,119 @@ describe("filesystem", () => { expect(await fs.readFile(filepath, "utf-8")).toBe(content) }) }) + + describe("resolve()", () => { + test("resolves slash-prefixed drive paths on Windows", async () => { + if (process.platform !== "win32") return + await using tmp = await tmpdir() + const forward = tmp.path.replaceAll("\\", "/") + expect(Filesystem.resolve(`/${forward}`)).toBe(Filesystem.normalizePath(tmp.path)) + }) + + test("resolves slash-prefixed drive roots on Windows", async () => { + if (process.platform !== "win32") return + await using tmp = await tmpdir() + const drive = tmp.path[0].toUpperCase() + expect(Filesystem.resolve(`/${drive}:`)).toBe(Filesystem.resolve(`${drive}:/`)) + }) + + test("resolves Git Bash and MSYS2 paths on Windows", async () => { + // Git Bash and MSYS2 both use /<drive>/... paths on Windows. + if (process.platform !== "win32") return + await using tmp = await tmpdir() + const drive = tmp.path[0].toLowerCase() + const rest = tmp.path.slice(2).replaceAll("\\", "/") + expect(Filesystem.resolve(`/${drive}${rest}`)).toBe(Filesystem.normalizePath(tmp.path)) + }) + + test("resolves Git Bash and MSYS2 drive roots on Windows", async () => { + // Git Bash and MSYS2 both use /<drive> paths on Windows. + if (process.platform !== "win32") return + await using tmp = await tmpdir() + const drive = tmp.path[0].toLowerCase() + expect(Filesystem.resolve(`/${drive}`)).toBe(Filesystem.resolve(`${drive.toUpperCase()}:/`)) + }) + + test("resolves Cygwin paths on Windows", async () => { + if (process.platform !== "win32") return + await using tmp = await tmpdir() + const drive = tmp.path[0].toLowerCase() + const rest = tmp.path.slice(2).replaceAll("\\", "/") + expect(Filesystem.resolve(`/cygdrive/${drive}${rest}`)).toBe(Filesystem.normalizePath(tmp.path)) + }) + + test("resolves Cygwin drive roots on Windows", async () => { + if (process.platform !== "win32") return + await using tmp = await tmpdir() + const drive = tmp.path[0].toLowerCase() + expect(Filesystem.resolve(`/cygdrive/${drive}`)).toBe(Filesystem.resolve(`${drive.toUpperCase()}:/`)) + }) + + test("resolves WSL mount paths on Windows", async () => { + if (process.platform !== "win32") return + await using tmp = await tmpdir() + const drive = tmp.path[0].toLowerCase() + const rest = tmp.path.slice(2).replaceAll("\\", "/") + expect(Filesystem.resolve(`/mnt/${drive}${rest}`)).toBe(Filesystem.normalizePath(tmp.path)) + }) + + test("resolves WSL mount roots on Windows", async () => { + if (process.platform !== "win32") return + await using tmp = await tmpdir() + const drive = tmp.path[0].toLowerCase() + expect(Filesystem.resolve(`/mnt/${drive}`)).toBe(Filesystem.resolve(`${drive.toUpperCase()}:/`)) + }) + + test("resolves symlinked directory to canonical path", async () => { + await using tmp = await tmpdir() + const target = path.join(tmp.path, "real") + await fs.mkdir(target) + const link = path.join(tmp.path, "link") + await fs.symlink(target, link) + expect(Filesystem.resolve(link)).toBe(Filesystem.resolve(target)) + }) + + test("returns unresolved path when target does not exist", async () => { + await using tmp = await tmpdir() + const missing = path.join(tmp.path, "does-not-exist-" + Date.now()) + const result = Filesystem.resolve(missing) + expect(result).toBe(Filesystem.normalizePath(path.resolve(missing))) + }) + + test("throws ELOOP on symlink cycle", async () => { + await using tmp = await tmpdir() + const a = path.join(tmp.path, "a") + const b = path.join(tmp.path, "b") + await fs.symlink(b, a) + await fs.symlink(a, b) + expect(() => Filesystem.resolve(a)).toThrow() + }) + + // Windows: chmod(0o000) is a no-op, so EACCES cannot be triggered + test("throws EACCES on permission-denied symlink target", async () => { + if (process.platform === "win32") return + if (process.getuid?.() === 0) return // skip when running as root + await using tmp = await tmpdir() + const dir = path.join(tmp.path, "restricted") + await fs.mkdir(dir) + const link = path.join(tmp.path, "link") + await fs.symlink(dir, link) + await fs.chmod(dir, 0o000) + try { + expect(() => Filesystem.resolve(path.join(link, "child"))).toThrow() + } finally { + await fs.chmod(dir, 0o755) + } + }) + + // Windows: traversing through a file throws ENOENT (not ENOTDIR), + // which resolve() catches as a fallback instead of rethrowing + test("rethrows non-ENOENT errors", async () => { + if (process.platform === "win32") return + await using tmp = await tmpdir() + const file = path.join(tmp.path, "not-a-directory") + await fs.writeFile(file, "x") + expect(() => Filesystem.resolve(path.join(file, "child"))).toThrow() + }) + }) }) diff --git a/packages/opencode/test/util/instance-state.test.ts b/packages/opencode/test/util/instance-state.test.ts new file mode 100644 index 0000000000..e5d2129fb0 --- /dev/null +++ b/packages/opencode/test/util/instance-state.test.ts @@ -0,0 +1,139 @@ +import { afterEach, expect, test } from "bun:test" +import { Effect } from "effect" + +import { Instance } from "../../src/project/instance" +import { InstanceState } from "../../src/util/instance-state" +import { tmpdir } from "../fixture/fixture" + +async function access<A, E>(state: InstanceState.State<A, E>, dir: string) { + return Instance.provide({ + directory: dir, + fn: () => Effect.runPromise(InstanceState.get(state)), + }) +} + +afterEach(async () => { + await Instance.disposeAll() +}) + +test("InstanceState caches values for the same instance", async () => { + await using tmp = await tmpdir() + let n = 0 + + await Effect.runPromise( + Effect.scoped( + Effect.gen(function* () { + const state = yield* InstanceState.make({ + lookup: () => Effect.sync(() => ({ n: ++n })), + }) + + const a = yield* Effect.promise(() => access(state, tmp.path)) + const b = yield* Effect.promise(() => access(state, tmp.path)) + + expect(a).toBe(b) + expect(n).toBe(1) + }), + ), + ) +}) + +test("InstanceState isolates values by directory", async () => { + await using a = await tmpdir() + await using b = await tmpdir() + let n = 0 + + await Effect.runPromise( + Effect.scoped( + Effect.gen(function* () { + const state = yield* InstanceState.make({ + lookup: (dir) => Effect.sync(() => ({ dir, n: ++n })), + }) + + const x = yield* Effect.promise(() => access(state, a.path)) + const y = yield* Effect.promise(() => access(state, b.path)) + const z = yield* Effect.promise(() => access(state, a.path)) + + expect(x).toBe(z) + expect(x).not.toBe(y) + expect(n).toBe(2) + }), + ), + ) +}) + +test("InstanceState is disposed on instance reload", async () => { + await using tmp = await tmpdir() + const seen: string[] = [] + let n = 0 + + await Effect.runPromise( + Effect.scoped( + Effect.gen(function* () { + const state = yield* InstanceState.make({ + lookup: () => Effect.sync(() => ({ n: ++n })), + release: (value) => + Effect.sync(() => { + seen.push(String(value.n)) + }), + }) + + const a = yield* Effect.promise(() => access(state, tmp.path)) + yield* Effect.promise(() => Instance.reload({ directory: tmp.path })) + const b = yield* Effect.promise(() => access(state, tmp.path)) + + expect(a).not.toBe(b) + expect(seen).toEqual(["1"]) + }), + ), + ) +}) + +test("InstanceState is disposed on disposeAll", async () => { + await using a = await tmpdir() + await using b = await tmpdir() + const seen: string[] = [] + + await Effect.runPromise( + Effect.scoped( + Effect.gen(function* () { + const state = yield* InstanceState.make({ + lookup: (dir) => Effect.sync(() => ({ dir })), + release: (value) => + Effect.sync(() => { + seen.push(value.dir) + }), + }) + + yield* Effect.promise(() => access(state, a.path)) + yield* Effect.promise(() => access(state, b.path)) + yield* Effect.promise(() => Instance.disposeAll()) + + expect(seen.sort()).toEqual([a.path, b.path].sort()) + }), + ), + ) +}) + +test("InstanceState dedupes concurrent lookups for the same directory", async () => { + await using tmp = await tmpdir() + let n = 0 + + await Effect.runPromise( + Effect.scoped( + Effect.gen(function* () { + const state = yield* InstanceState.make({ + lookup: () => + Effect.promise(async () => { + n += 1 + await Bun.sleep(10) + return { n } + }), + }) + + const [a, b] = yield* Effect.promise(() => Promise.all([access(state, tmp.path), access(state, tmp.path)])) + expect(a).toBe(b) + expect(n).toBe(1) + }), + ), + ) +}) diff --git a/packages/opencode/test/util/module.test.ts b/packages/opencode/test/util/module.test.ts new file mode 100644 index 0000000000..738b4a785b --- /dev/null +++ b/packages/opencode/test/util/module.test.ts @@ -0,0 +1,59 @@ +import { describe, expect, test } from "bun:test" +import path from "path" +import { Module } from "@opencode-ai/util/module" +import { Filesystem } from "../../src/util/filesystem" +import { tmpdir } from "../fixture/fixture" + +describe("util.module", () => { + test("resolves package subpaths from the provided dir", async () => { + await using tmp = await tmpdir() + const root = path.join(tmp.path, "proj") + const file = path.join(root, "node_modules/typescript/lib/tsserver.js") + await Filesystem.write(file, "export {}\n") + await Filesystem.writeJson(path.join(root, "node_modules/typescript/package.json"), { name: "typescript" }) + + expect(Module.resolve("typescript/lib/tsserver.js", root)).toBe(file) + }) + + test("resolves packages through ancestor node_modules", async () => { + await using tmp = await tmpdir() + const root = path.join(tmp.path, "proj") + const cwd = path.join(root, "apps/web") + const file = path.join(root, "node_modules/eslint/lib/api.js") + await Filesystem.write(file, "export {}\n") + await Filesystem.writeJson(path.join(root, "node_modules/eslint/package.json"), { + name: "eslint", + main: "lib/api.js", + }) + await Filesystem.write(path.join(cwd, ".keep"), "") + + expect(Module.resolve("eslint", cwd)).toBe(file) + }) + + test("resolves relative to the provided dir", async () => { + await using tmp = await tmpdir() + const a = path.join(tmp.path, "a") + const b = path.join(tmp.path, "b") + const left = path.join(a, "node_modules/biome/index.js") + const right = path.join(b, "node_modules/biome/index.js") + await Filesystem.write(left, "export {}\n") + await Filesystem.write(right, "export {}\n") + await Filesystem.writeJson(path.join(a, "node_modules/biome/package.json"), { + name: "biome", + main: "index.js", + }) + await Filesystem.writeJson(path.join(b, "node_modules/biome/package.json"), { + name: "biome", + main: "index.js", + }) + + expect(Module.resolve("biome", a)).toBe(left) + expect(Module.resolve("biome", b)).toBe(right) + expect(Module.resolve("biome", a)).not.toBe(Module.resolve("biome", b)) + }) + + test("returns undefined when resolution fails", async () => { + await using tmp = await tmpdir() + expect(Module.resolve("missing-package", tmp.path)).toBeUndefined() + }) +}) diff --git a/packages/opencode/test/util/process.test.ts b/packages/opencode/test/util/process.test.ts index ce599d6d8f..758469fe3e 100644 --- a/packages/opencode/test/util/process.test.ts +++ b/packages/opencode/test/util/process.test.ts @@ -1,5 +1,6 @@ import { describe, expect, test } from "bun:test" import { Process } from "../../src/util/process" +import { tmpdir } from "../fixture/fixture" function node(script: string) { return [process.execPath, "-e", script] @@ -56,4 +57,21 @@ describe("util.process", () => { expect(out.code).not.toBe(0) expect(Date.now() - started).toBeLessThan(1000) }, 3000) + + test("uses cwd when spawning commands", async () => { + await using tmp = await tmpdir() + const out = await Process.run(node("process.stdout.write(process.cwd())"), { + cwd: tmp.path, + }) + expect(out.stdout.toString()).toBe(tmp.path) + }) + + test("merges environment overrides", async () => { + const out = await Process.run(node('process.stdout.write(process.env.OPENCODE_TEST ?? "")'), { + env: { + OPENCODE_TEST: "set", + }, + }) + expect(out.stdout.toString()).toBe("set") + }) }) diff --git a/packages/opencode/test/util/which.test.ts b/packages/opencode/test/util/which.test.ts index 323173b181..70c2fb2d9f 100644 --- a/packages/opencode/test/util/which.test.ts +++ b/packages/opencode/test/util/which.test.ts @@ -22,6 +22,13 @@ function env(PATH: string): NodeJS.ProcessEnv { } } +function envPath(Path: string): NodeJS.ProcessEnv { + return { + Path, + PathExt: process.env["PathExt"] ?? process.env["PATHEXT"], + } +} + function same(a: string | null, b: string) { if (process.platform === "win32") { expect(a?.toLowerCase()).toBe(b.toLowerCase()) @@ -79,4 +86,15 @@ describe("util.which", () => { expect(which("pathext", { PATH: bin, PATHEXT: ".CMD" })).toBe(file) }) + + test("uses Windows Path casing fallback", async () => { + if (process.platform !== "win32") return + + await using tmp = await tmpdir() + const bin = path.join(tmp.path, "bin") + await fs.mkdir(bin) + const file = await cmd(bin, "mixed") + + same(which("mixed", envPath(bin)), file) + }) }) diff --git a/packages/opencode/tsconfig.json b/packages/opencode/tsconfig.json index 9067d84fd6..ff9886313a 100644 --- a/packages/opencode/tsconfig.json +++ b/packages/opencode/tsconfig.json @@ -11,6 +11,13 @@ "paths": { "@/*": ["./src/*"], "@tui/*": ["./src/cli/cmd/tui/*"] - } + }, + "plugins": [ + { + "name": "@effect/language-service", + "transform": "@effect/language-service/transform", + "namespaceImportPackages": ["effect", "@effect/*"] + } + ] } } diff --git a/packages/plugin/src/example.ts b/packages/plugin/src/example.ts index 94745a37b7..1cf042fe96 100644 --- a/packages/plugin/src/example.ts +++ b/packages/plugin/src/example.ts @@ -1,5 +1,5 @@ -import { Plugin } from "./index" -import { tool } from "./tool" +import { Plugin } from "./index.js" +import { tool } from "./tool.js" export const ExamplePlugin: Plugin = async (ctx) => { return { diff --git a/packages/plugin/src/index.ts b/packages/plugin/src/index.ts index 76370d1d5a..b78bcae177 100644 --- a/packages/plugin/src/index.ts +++ b/packages/plugin/src/index.ts @@ -12,10 +12,10 @@ import type { Config, } from "@opencode-ai/sdk" -import type { BunShell } from "./shell" -import { type ToolDefinition } from "./tool" +import type { BunShell } from "./shell.js" +import { type ToolDefinition } from "./tool.js" -export * from "./tool" +export * from "./tool.js" export type ProviderContext = { source: "env" | "config" | "custom" | "api" diff --git a/packages/plugin/tsconfig.json b/packages/plugin/tsconfig.json index 58072c81c9..1173818783 100644 --- a/packages/plugin/tsconfig.json +++ b/packages/plugin/tsconfig.json @@ -3,9 +3,9 @@ "extends": "@tsconfig/node22/tsconfig.json", "compilerOptions": { "outDir": "dist", - "module": "preserve", + "module": "nodenext", "declaration": true, - "moduleResolution": "bundler", + "moduleResolution": "nodenext", "lib": ["es2022", "dom", "dom.iterable"] }, "include": ["src"] diff --git a/packages/script/package.json b/packages/script/package.json index 45de3bcb99..aa0d2f563f 100644 --- a/packages/script/package.json +++ b/packages/script/package.json @@ -2,8 +2,12 @@ "$schema": "https://json.schemastore.org/package", "name": "@opencode-ai/script", "license": "MIT", + "dependencies": { + "semver": "^7.6.3" + }, "devDependencies": { - "@types/bun": "catalog:" + "@types/bun": "catalog:", + "@types/semver": "^7.5.8" }, "exports": { ".": "./src/index.ts" diff --git a/packages/script/src/index.ts b/packages/script/src/index.ts index ee4bc1e465..d148ce0d2b 100644 --- a/packages/script/src/index.ts +++ b/packages/script/src/index.ts @@ -1,4 +1,5 @@ -import { $, semver } from "bun" +import { $ } from "bun" +import semver from "semver" import path from "path" const rootPkgPath = path.resolve(import.meta.dir, "../../../package.json") diff --git a/packages/sdk/js/src/gen/types.gen.ts b/packages/sdk/js/src/gen/types.gen.ts index 8eefe5bfe9..1e62358956 100644 --- a/packages/sdk/js/src/gen/types.gen.ts +++ b/packages/sdk/js/src/gen/types.gen.ts @@ -1209,7 +1209,7 @@ export type Config = { diff_style?: "auto" | "stacked" } /** - * Command configuration, see https://opencode.ai/docs/commands + * Command configuration, see https://altimate.ai/docs/commands */ command?: { [key: string]: { @@ -1266,7 +1266,7 @@ export type Config = { [key: string]: AgentConfig | undefined } /** - * Agent configuration, see https://opencode.ai/docs/agent + * Agent configuration, see https://altimate.ai/docs/agent */ agent?: { plan?: AgentConfig diff --git a/packages/sdk/js/src/v2/client.ts b/packages/sdk/js/src/v2/client.ts index 8685be52d6..ad956dd4b3 100644 --- a/packages/sdk/js/src/v2/client.ts +++ b/packages/sdk/js/src/v2/client.ts @@ -5,7 +5,7 @@ import { type Config } from "./gen/client/types.gen.js" import { OpencodeClient } from "./gen/sdk.gen.js" export { type Config as OpencodeClientConfig, OpencodeClient } -export function createOpencodeClient(config?: Config & { directory?: string }) { +export function createOpencodeClient(config?: Config & { directory?: string; experimental_workspaceID?: string }) { if (!config?.fetch) { const customFetch: any = (req: any) => { // @ts-ignore @@ -27,6 +27,13 @@ export function createOpencodeClient(config?: Config & { directory?: string }) { } } + if (config?.experimental_workspaceID) { + config.headers = { + ...config.headers, + "x-opencode-workspace": config.experimental_workspaceID, + } + } + const client = createClient(config) return new OpencodeClient({ client }) } diff --git a/packages/sdk/js/src/v2/gen/sdk.gen.ts b/packages/sdk/js/src/v2/gen/sdk.gen.ts index 1c1b31e46f..e06cfb25d4 100644 --- a/packages/sdk/js/src/v2/gen/sdk.gen.ts +++ b/packages/sdk/js/src/v2/gen/sdk.gen.ts @@ -77,6 +77,7 @@ import type { PermissionRespondResponses, PermissionRuleset, ProjectCurrentResponses, + ProjectInitGitResponses, ProjectListResponses, ProjectUpdateErrors, ProjectUpdateResponses, @@ -231,7 +232,7 @@ export class Config extends HeyApiClient { /** * Get global configuration * - * Retrieve the current global OpenCode configuration settings and preferences. + * Retrieve the current global Altimate Code configuration settings and preferences. */ public get<ThrowOnError extends boolean = false>(options?: Options<never, ThrowOnError>) { return (options?.client ?? this.client).get<GlobalConfigGetResponses, unknown, ThrowOnError>({ @@ -243,7 +244,7 @@ export class Config extends HeyApiClient { /** * Update global configuration * - * Update global OpenCode configuration settings and preferences. + * Update global Altimate Code configuration settings and preferences. */ public update<ThrowOnError extends boolean = false>( parameters?: { @@ -269,7 +270,7 @@ export class Global extends HeyApiClient { /** * Get health * - * Get health information about the OpenCode server. + * Get health information about the Altimate Code server. */ public health<ThrowOnError extends boolean = false>(options?: Options<never, ThrowOnError>) { return (options?.client ?? this.client).get<GlobalHealthResponses, unknown, ThrowOnError>({ @@ -281,7 +282,7 @@ export class Global extends HeyApiClient { /** * Get global events * - * Subscribe to global events from the OpenCode system using server-sent events. + * Subscribe to global events from the Altimate Code system using server-sent events. */ public event<ThrowOnError extends boolean = false>(options?: Options<never, ThrowOnError>) { return (options?.client ?? this.client).sse.get<GlobalEventResponses, unknown, ThrowOnError>({ @@ -293,7 +294,7 @@ export class Global extends HeyApiClient { /** * Dispose instance * - * Clean up and dispose all OpenCode instances, releasing all resources. + * Clean up and dispose all Altimate Code instances, releasing all resources. */ public dispose<ThrowOnError extends boolean = false>(options?: Options<never, ThrowOnError>) { return (options?.client ?? this.client).post<GlobalDisposeResponses, unknown, ThrowOnError>({ @@ -368,7 +369,7 @@ export class Project extends HeyApiClient { /** * List all projects * - * Get a list of projects that have been opened with OpenCode. + * Get a list of projects that have been opened with Altimate Code. */ public list<ThrowOnError extends boolean = false>( parameters?: { @@ -398,7 +399,7 @@ export class Project extends HeyApiClient { /** * Get current project * - * Retrieve the currently active project that OpenCode is working with. + * Retrieve the currently active project that Altimate Code is working with. */ public current<ThrowOnError extends boolean = false>( parameters?: { @@ -425,6 +426,36 @@ export class Project extends HeyApiClient { }) } + /** + * Initialize git repository + * + * Create a git repository for the current project and return the refreshed project info. + */ + public initGit<ThrowOnError extends boolean = false>( + parameters?: { + directory?: string + workspace?: string + }, + options?: Options<never, ThrowOnError>, + ) { + const params = buildClientParams( + [parameters], + [ + { + args: [ + { in: "query", key: "directory" }, + { in: "query", key: "workspace" }, + ], + }, + ], + ) + return (options?.client ?? this.client).post<ProjectInitGitResponses, unknown, ThrowOnError>({ + url: "/project/git/init", + ...options, + ...params, + }) + } + /** * Update project * @@ -482,7 +513,7 @@ export class Pty extends HeyApiClient { /** * List PTY sessions * - * Get a list of all active pseudo-terminal (PTY) sessions managed by OpenCode. + * Get a list of all active pseudo-terminal (PTY) sessions managed by Altimate Code. */ public list<ThrowOnError extends boolean = false>( parameters?: { @@ -701,7 +732,7 @@ export class Config2 extends HeyApiClient { /** * Get configuration * - * Retrieve the current OpenCode configuration settings and preferences. + * Retrieve the current Altimate Code configuration settings and preferences. */ public get<ThrowOnError extends boolean = false>( parameters?: { @@ -731,7 +762,7 @@ export class Config2 extends HeyApiClient { /** * Update configuration * - * Update OpenCode configuration settings and preferences. + * Update Altimate Code configuration settings and preferences. */ public update<ThrowOnError extends boolean = false>( parameters?: { @@ -981,7 +1012,7 @@ export class Session extends HeyApiClient { /** * List sessions * - * Get a list of all OpenCode sessions across projects, sorted by most recently updated. Archived sessions are excluded by default. + * Get a list of all Altimate Code sessions across projects, sorted by most recently updated. Archived sessions are excluded by default. */ public list<ThrowOnError extends boolean = false>( parameters?: { @@ -1217,7 +1248,7 @@ export class Session2 extends HeyApiClient { /** * List sessions * - * Get a list of all OpenCode sessions, sorted by most recently updated. + * Get a list of all Altimate Code sessions, sorted by most recently updated. */ public list<ThrowOnError extends boolean = false>( parameters?: { @@ -1255,7 +1286,7 @@ export class Session2 extends HeyApiClient { /** * Create session * - * Create a new OpenCode session for interacting with AI assistants and managing conversations. + * Create a new Altimate Code session for interacting with AI assistants and managing conversations. */ public create<ThrowOnError extends boolean = false>( parameters?: { @@ -1264,6 +1295,7 @@ export class Session2 extends HeyApiClient { parentID?: string title?: string permission?: PermissionRuleset + workspaceID?: string }, options?: Options<never, ThrowOnError>, ) { @@ -1277,6 +1309,7 @@ export class Session2 extends HeyApiClient { { in: "body", key: "parentID" }, { in: "body", key: "title" }, { in: "body", key: "permission" }, + { in: "body", key: "workspaceID" }, ], }, ], @@ -1358,7 +1391,7 @@ export class Session2 extends HeyApiClient { /** * Get session * - * Retrieve detailed information about a specific OpenCode session. + * Retrieve detailed information about a specific Altimate Code session. */ public get<ThrowOnError extends boolean = false>( parameters: { @@ -1760,6 +1793,7 @@ export class Session2 extends HeyApiClient { directory?: string workspace?: string limit?: number + before?: string }, options?: Options<never, ThrowOnError>, ) { @@ -1772,6 +1806,7 @@ export class Session2 extends HeyApiClient { { in: "query", key: "directory" }, { in: "query", key: "workspace" }, { in: "query", key: "limit" }, + { in: "query", key: "before" }, ], }, ], @@ -3532,7 +3567,7 @@ export class Instance extends HeyApiClient { /** * Dispose instance * - * Clean up and dispose the current OpenCode instance, releasing all resources. + * Clean up and dispose the current Altimate Code instance, releasing all resources. */ public dispose<ThrowOnError extends boolean = false>( parameters?: { @@ -3564,7 +3599,7 @@ export class Path extends HeyApiClient { /** * Get paths * - * Retrieve the current working directory and related path information for the OpenCode instance. + * Retrieve the current working directory and related path information for the Altimate Code instance. */ public get<ThrowOnError extends boolean = false>( parameters?: { @@ -3628,7 +3663,7 @@ export class Command extends HeyApiClient { /** * List commands * - * Get a list of all available commands in the OpenCode system. + * Get a list of all available commands in the Altimate Code system. */ public list<ThrowOnError extends boolean = false>( parameters?: { @@ -3705,7 +3740,7 @@ export class App extends HeyApiClient { /** * List agents * - * Get a list of all available AI agents in the OpenCode system. + * Get a list of all available AI agents in the Altimate Code system. */ public agents<ThrowOnError extends boolean = false>( parameters?: { @@ -3735,7 +3770,7 @@ export class App extends HeyApiClient { /** * List skills * - * Get a list of all available skills in the OpenCode system. + * Get a list of all available skills in the Altimate Code system. */ public skills<ThrowOnError extends boolean = false>( parameters?: { diff --git a/packages/sdk/js/src/v2/gen/types.gen.ts b/packages/sdk/js/src/v2/gen/types.gen.ts index afb2224a75..0c7d344040 100644 --- a/packages/sdk/js/src/v2/gen/types.gen.ts +++ b/packages/sdk/js/src/v2/gen/types.gen.ts @@ -1225,7 +1225,11 @@ export type ProviderConfig = { * Timeout in milliseconds for requests to this provider. Default is 300000 (5 minutes). Set to false to disable timeout. */ timeout?: number | false - [key: string]: unknown | string | boolean | number | false | undefined + /** + * Timeout in milliseconds between streamed SSE chunks for this provider. If no chunk arrives within this window, the request is aborted. + */ + chunkTimeout?: number + [key: string]: unknown | string | boolean | number | false | number | undefined } } @@ -1311,7 +1315,7 @@ export type Config = { logLevel?: LogLevel server?: ServerConfig /** - * Command configuration, see https://opencode.ai/docs/commands + * Command configuration, see https://altimate.ai/docs/commands */ command?: { [key: string]: { @@ -1385,7 +1389,7 @@ export type Config = { [key: string]: AgentConfig | undefined } /** - * Agent configuration, see https://opencode.ai/docs/agents + * Agent configuration, see https://altimate.ai/docs/agents */ agent?: { plan?: AgentConfig @@ -2087,6 +2091,25 @@ export type ProjectCurrentResponses = { export type ProjectCurrentResponse = ProjectCurrentResponses[keyof ProjectCurrentResponses] +export type ProjectInitGitData = { + body?: never + path?: never + query?: { + directory?: string + workspace?: string + } + url: "/project/git/init" +} + +export type ProjectInitGitResponses = { + /** + * Project information after git initialization + */ + 200: Project +} + +export type ProjectInitGitResponse = ProjectInitGitResponses[keyof ProjectInitGitResponses] + export type ProjectUpdateData = { body?: { name?: string @@ -2745,6 +2768,7 @@ export type SessionCreateData = { parentID?: string title?: string permission?: PermissionRuleset + workspaceID?: string } path?: never query?: { @@ -2946,9 +2970,6 @@ export type SessionChildrenResponse = SessionChildrenResponses[keyof SessionChil export type SessionTodoData = { body?: never path: { - /** - * Session ID - */ sessionID: string } query?: { @@ -2987,9 +3008,6 @@ export type SessionInitData = { messageID: string } path: { - /** - * Session ID - */ sessionID: string } query?: { @@ -3175,9 +3193,6 @@ export type SessionSummarizeData = { auto?: boolean } path: { - /** - * Session ID - */ sessionID: string } query?: { @@ -3212,15 +3227,16 @@ export type SessionSummarizeResponse = SessionSummarizeResponses[keyof SessionSu export type SessionMessagesData = { body?: never path: { - /** - * Session ID - */ sessionID: string } query?: { directory?: string workspace?: string + /** + * Maximum number of messages to return + */ limit?: number + before?: string } url: "/session/{sessionID}/message" } @@ -3271,9 +3287,6 @@ export type SessionPromptData = { parts: Array<TextPartInput | FilePartInput | AgentPartInput | SubtaskPartInput> } path: { - /** - * Session ID - */ sessionID: string } query?: { @@ -3311,13 +3324,7 @@ export type SessionPromptResponse = SessionPromptResponses[keyof SessionPromptRe export type SessionDeleteMessageData = { body?: never path: { - /** - * Session ID - */ sessionID: string - /** - * Message ID - */ messageID: string } query?: { @@ -3352,13 +3359,7 @@ export type SessionDeleteMessageResponse = SessionDeleteMessageResponses[keyof S export type SessionMessageData = { body?: never path: { - /** - * Session ID - */ sessionID: string - /** - * Message ID - */ messageID: string } query?: { @@ -3396,17 +3397,8 @@ export type SessionMessageResponse = SessionMessageResponses[keyof SessionMessag export type PartDeleteData = { body?: never path: { - /** - * Session ID - */ sessionID: string - /** - * Message ID - */ messageID: string - /** - * Part ID - */ partID: string } query?: { @@ -3441,17 +3433,8 @@ export type PartDeleteResponse = PartDeleteResponses[keyof PartDeleteResponses] export type PartUpdateData = { body?: Part path: { - /** - * Session ID - */ sessionID: string - /** - * Message ID - */ messageID: string - /** - * Part ID - */ partID: string } query?: { @@ -3504,9 +3487,6 @@ export type SessionPromptAsyncData = { parts: Array<TextPartInput | FilePartInput | AgentPartInput | SubtaskPartInput> } path: { - /** - * Session ID - */ sessionID: string } query?: { @@ -3556,9 +3536,6 @@ export type SessionCommandData = { }> } path: { - /** - * Session ID - */ sessionID: string } query?: { @@ -3603,9 +3580,6 @@ export type SessionShellData = { command: string } path: { - /** - * Session ID - */ sessionID: string } query?: { diff --git a/packages/sdk/openapi.json b/packages/sdk/openapi.json index 7db79bcbe2..0a4bad1b95 100644 --- a/packages/sdk/openapi.json +++ b/packages/sdk/openapi.json @@ -10,7 +10,7 @@ "get": { "operationId": "global.health", "summary": "Get health", - "description": "Get health information about the OpenCode server.", + "description": "Get health information about the Altimate Code server.", "responses": { "200": { "description": "Health information", @@ -45,7 +45,7 @@ "get": { "operationId": "global.event", "summary": "Get global events", - "description": "Subscribe to global events from the OpenCode system using server-sent events.", + "description": "Subscribe to global events from the Altimate Code system using server-sent events.", "responses": { "200": { "description": "Event stream", @@ -70,7 +70,7 @@ "get": { "operationId": "global.config.get", "summary": "Get global configuration", - "description": "Retrieve the current global OpenCode configuration settings and preferences.", + "description": "Retrieve the current global Altimate Code configuration settings and preferences.", "responses": { "200": { "description": "Get global config info", @@ -93,7 +93,7 @@ "patch": { "operationId": "global.config.update", "summary": "Update global configuration", - "description": "Update global OpenCode configuration settings and preferences.", + "description": "Update global Altimate Code configuration settings and preferences.", "responses": { "200": { "description": "Successfully updated global config", @@ -137,7 +137,7 @@ "post": { "operationId": "global.dispose", "summary": "Dispose instance", - "description": "Clean up and dispose all OpenCode instances, releasing all resources.", + "description": "Clean up and dispose all Altimate Code instances, releasing all resources.", "responses": { "200": { "description": "Global disposed", @@ -275,7 +275,7 @@ } ], "summary": "List all projects", - "description": "Get a list of projects that have been opened with OpenCode.", + "description": "Get a list of projects that have been opened with Altimate Code.", "responses": { "200": { "description": "List of projects", @@ -319,7 +319,7 @@ } ], "summary": "Get current project", - "description": "Retrieve the currently active project that OpenCode is working with.", + "description": "Retrieve the currently active project that Altimate Code is working with.", "responses": { "200": { "description": "Current project information", @@ -340,6 +340,47 @@ ] } }, + "/project/git/init": { + "post": { + "operationId": "project.initGit", + "parameters": [ + { + "in": "query", + "name": "directory", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "workspace", + "schema": { + "type": "string" + } + } + ], + "summary": "Initialize git repository", + "description": "Create a git repository for the current project and return the refreshed project info.", + "responses": { + "200": { + "description": "Project information after git initialization", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Project" + } + } + } + } + }, + "x-codeSamples": [ + { + "lang": "js", + "source": "import { createOpencodeClient } from \"@opencode-ai/sdk\n\nconst client = createOpencodeClient()\nawait client.project.initGit({\n ...\n})" + } + ] + } + }, "/project/{projectID}": { "patch": { "operationId": "project.update", @@ -466,7 +507,7 @@ } ], "summary": "List PTY sessions", - "description": "Get a list of all active pseudo-terminal (PTY) sessions managed by OpenCode.", + "description": "Get a list of all active pseudo-terminal (PTY) sessions managed by Altimate Code.", "responses": { "200": { "description": "List of sessions", @@ -596,7 +637,8 @@ "in": "path", "name": "ptyID", "schema": { - "type": "string" + "type": "string", + "pattern": "^pty.*" }, "required": true } @@ -653,7 +695,8 @@ "in": "path", "name": "ptyID", "schema": { - "type": "string" + "type": "string", + "pattern": "^pty.*" }, "required": true } @@ -736,7 +779,8 @@ "in": "path", "name": "ptyID", "schema": { - "type": "string" + "type": "string", + "pattern": "^pty.*" }, "required": true } @@ -795,7 +839,8 @@ "in": "path", "name": "ptyID", "schema": { - "type": "string" + "type": "string", + "pattern": "^pty.*" }, "required": true } @@ -852,7 +897,7 @@ } ], "summary": "Get configuration", - "description": "Retrieve the current OpenCode configuration settings and preferences.", + "description": "Retrieve the current Altimate Code configuration settings and preferences.", "responses": { "200": { "description": "Get config info", @@ -891,7 +936,7 @@ } ], "summary": "Update configuration", - "description": "Update OpenCode configuration settings and preferences.", + "description": "Update Altimate Code configuration settings and preferences.", "responses": { "200": { "description": "Successfully updated config", @@ -1587,7 +1632,7 @@ } ], "summary": "List sessions", - "description": "Get a list of all OpenCode sessions across projects, sorted by most recently updated. Archived sessions are excluded by default.", + "description": "Get a list of all Altimate Code sessions across projects, sorted by most recently updated. Archived sessions are excluded by default.", "responses": { "200": { "description": "List of sessions", @@ -1711,7 +1756,7 @@ } ], "summary": "List sessions", - "description": "Get a list of all OpenCode sessions, sorted by most recently updated.", + "description": "Get a list of all Altimate Code sessions, sorted by most recently updated.", "responses": { "200": { "description": "List of sessions", @@ -1753,7 +1798,7 @@ } ], "summary": "Create session", - "description": "Create a new OpenCode session for interacting with AI assistants and managing conversations.", + "description": "Create a new Altimate Code session for interacting with AI assistants and managing conversations.", "responses": { "200": { "description": "Successfully created session", @@ -1791,6 +1836,10 @@ }, "permission": { "$ref": "#/components/schemas/PermissionRuleset" + }, + "workspaceID": { + "type": "string", + "pattern": "^wrk.*" } } } @@ -1891,7 +1940,7 @@ } ], "summary": "Get session", - "description": "Retrieve detailed information about a specific OpenCode session.", + "description": "Retrieve detailed information about a specific Altimate Code session.", "tags": ["Session"], "responses": { "200": { @@ -2021,7 +2070,8 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "required": true } @@ -2186,10 +2236,10 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, - "required": true, - "description": "Session ID" + "required": true } ], "summary": "Get session todos", @@ -2259,10 +2309,10 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, - "required": true, - "description": "Session ID" + "required": true } ], "summary": "Initialize session", @@ -2416,7 +2466,8 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "required": true } @@ -2485,7 +2536,8 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "required": true } @@ -2683,10 +2735,10 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, - "required": true, - "description": "Session ID" + "required": true } ], "summary": "Summarize session", @@ -2775,16 +2827,26 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, - "required": true, - "description": "Session ID" + "required": true }, { "in": "query", "name": "limit", "schema": { - "type": "number" + "type": "integer", + "minimum": 0, + "maximum": 9007199254740991 + }, + "description": "Maximum number of messages to return" + }, + { + "in": "query", + "name": "before", + "schema": { + "type": "string" } } ], @@ -2865,10 +2927,10 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, - "required": true, - "description": "Session ID" + "required": true } ], "summary": "Send message", @@ -3019,19 +3081,19 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, - "required": true, - "description": "Session ID" + "required": true }, { "in": "path", "name": "messageID", "schema": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, - "required": true, - "description": "Message ID" + "required": true } ], "summary": "Get message", @@ -3108,19 +3170,19 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, - "required": true, - "description": "Session ID" + "required": true }, { "in": "path", "name": "messageID", "schema": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, - "required": true, - "description": "Message ID" + "required": true } ], "summary": "Delete message", @@ -3187,28 +3249,28 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, - "required": true, - "description": "Session ID" + "required": true }, { "in": "path", "name": "messageID", "schema": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, - "required": true, - "description": "Message ID" + "required": true }, { "in": "path", "name": "partID", "schema": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, - "required": true, - "description": "Part ID" + "required": true } ], "description": "Delete a part from a message", @@ -3272,28 +3334,28 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, - "required": true, - "description": "Session ID" + "required": true }, { "in": "path", "name": "messageID", "schema": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, - "required": true, - "description": "Message ID" + "required": true }, { "in": "path", "name": "partID", "schema": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, - "required": true, - "description": "Part ID" + "required": true } ], "description": "Update a part in a message", @@ -3368,10 +3430,10 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, - "required": true, - "description": "Session ID" + "required": true } ], "summary": "Send async message", @@ -3503,10 +3565,10 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, - "required": true, - "description": "Session ID" + "required": true } ], "summary": "Send command", @@ -3588,7 +3650,8 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "type": { "type": "string", @@ -3648,10 +3711,10 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, - "required": true, - "description": "Session ID" + "required": true } ], "summary": "Run shell command", @@ -3748,7 +3811,8 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "required": true } @@ -3837,7 +3901,8 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "required": true } @@ -3906,7 +3971,8 @@ "in": "path", "name": "sessionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "required": true }, @@ -3914,7 +3980,8 @@ "in": "path", "name": "permissionID", "schema": { - "type": "string" + "type": "string", + "pattern": "^per.*" }, "required": true } @@ -4000,7 +4067,8 @@ "in": "path", "name": "requestID", "schema": { - "type": "string" + "type": "string", + "pattern": "^per.*" }, "required": true } @@ -4176,7 +4244,8 @@ "in": "path", "name": "requestID", "schema": { - "type": "string" + "type": "string", + "pattern": "^que.*" }, "required": true } @@ -4264,7 +4333,8 @@ "in": "path", "name": "requestID", "schema": { - "type": "string" + "type": "string", + "pattern": "^que.*" }, "required": true } @@ -6261,7 +6331,7 @@ "sessionID": { "description": "Session ID to navigate to", "type": "string", - "pattern": "^ses" + "pattern": "^ses.*" } }, "required": ["sessionID"] @@ -6393,7 +6463,7 @@ } ], "summary": "Dispose instance", - "description": "Clean up and dispose the current OpenCode instance, releasing all resources.", + "description": "Clean up and dispose the current Altimate Code instance, releasing all resources.", "responses": { "200": { "description": "Instance disposed", @@ -6434,7 +6504,7 @@ } ], "summary": "Get paths", - "description": "Retrieve the current working directory and related path information for the OpenCode instance.", + "description": "Retrieve the current working directory and related path information for the Altimate Code instance.", "responses": { "200": { "description": "Path", @@ -6516,7 +6586,7 @@ } ], "summary": "List commands", - "description": "Get a list of all available commands in the OpenCode system.", + "description": "Get a list of all available commands in the Altimate Code system.", "responses": { "200": { "description": "List of commands", @@ -6644,7 +6714,7 @@ } ], "summary": "List agents", - "description": "Get a list of all available AI agents in the OpenCode system.", + "description": "Get a list of all available AI agents in the Altimate Code system.", "responses": { "200": { "description": "List of agents", @@ -6688,7 +6758,7 @@ } ], "summary": "List skills", - "description": "Get a list of all available skills in the OpenCode system.", + "description": "Get a list of all available skills in the Altimate Code system.", "responses": { "200": { "description": "List of skills", @@ -7150,10 +7220,12 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "role": { "type": "string", @@ -7390,10 +7462,12 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "role": { "type": "string", @@ -7437,7 +7511,8 @@ ] }, "parentID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "modelID": { "type": "string" @@ -7562,10 +7637,12 @@ "type": "object", "properties": { "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" } }, "required": ["sessionID", "messageID"] @@ -7577,13 +7654,16 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "type": { "type": "string", @@ -7624,13 +7704,16 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "type": { "type": "string", @@ -7667,13 +7750,16 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "type": { "type": "string", @@ -7832,13 +7918,16 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "type": { "type": "string", @@ -8025,13 +8114,16 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "type": { "type": "string", @@ -8060,13 +8152,16 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "type": { "type": "string", @@ -8082,13 +8177,16 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "type": { "type": "string", @@ -8140,13 +8238,16 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "type": { "type": "string", @@ -8162,13 +8263,16 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "type": { "type": "string", @@ -8190,13 +8294,16 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "type": { "type": "string", @@ -8231,13 +8338,16 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "type": { "type": "string", @@ -8265,13 +8375,16 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "type": { "type": "string", @@ -8356,13 +8469,16 @@ "type": "object", "properties": { "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "partID": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "field": { "type": "string" @@ -8387,13 +8503,16 @@ "type": "object", "properties": { "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "partID": { - "type": "string" + "type": "string", + "pattern": "^prt.*" } }, "required": ["sessionID", "messageID", "partID"] @@ -8438,7 +8557,8 @@ "type": "object", "properties": { "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "callID": { "type": "string" @@ -8473,10 +8593,12 @@ "type": "object", "properties": { "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "requestID": { - "type": "string" + "type": "string", + "pattern": "^per.*" }, "reply": { "type": "string", @@ -8542,7 +8664,8 @@ "type": "object", "properties": { "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "status": { "$ref": "#/components/schemas/SessionStatus" @@ -8564,7 +8687,8 @@ "type": "object", "properties": { "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" } }, "required": ["sessionID"] @@ -8637,7 +8761,8 @@ "type": "object", "properties": { "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "callID": { "type": "string" @@ -8678,10 +8803,12 @@ "type": "object", "properties": { "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "requestID": { - "type": "string" + "type": "string", + "pattern": "^que.*" }, "answers": { "type": "array", @@ -8706,10 +8833,12 @@ "type": "object", "properties": { "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "requestID": { - "type": "string" + "type": "string", + "pattern": "^que.*" } }, "required": ["sessionID", "requestID"] @@ -8728,7 +8857,8 @@ "type": "object", "properties": { "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" } }, "required": ["sessionID"] @@ -8800,7 +8930,8 @@ "type": "object", "properties": { "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "todos": { "type": "array", @@ -8921,7 +9052,7 @@ "sessionID": { "description": "Session ID to navigate to", "type": "string", - "pattern": "^ses" + "pattern": "^ses.*" } }, "required": ["sessionID"] @@ -9039,7 +9170,8 @@ "type": "string" }, "workspaceID": { - "type": "string" + "type": "string", + "pattern": "^wrk.*" }, "directory": { "type": "string" @@ -9109,10 +9241,12 @@ "type": "object", "properties": { "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "partID": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "snapshot": { "type": "string" @@ -9194,7 +9328,8 @@ "type": "object", "properties": { "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "diff": { "type": "array", @@ -9219,7 +9354,8 @@ "type": "object", "properties": { "sessionID": { - "type": "string" + "type": "string", + "pattern": "^ses.*" }, "error": { "anyOf": [ @@ -10067,6 +10203,12 @@ "const": false } ] + }, + "chunkTimeout": { + "description": "Timeout in milliseconds between streamed SSE chunks for this provider. If no chunk arrives within this window, the request is aborted.", + "type": "integer", + "exclusiveMinimum": 0, + "maximum": 9007199254740991 } }, "additionalProperties": {} @@ -10198,7 +10340,7 @@ "$ref": "#/components/schemas/ServerConfig" }, "command": { - "description": "Command configuration, see https://opencode.ai/docs/commands", + "description": "Command configuration, see https://altimate.ai/docs/commands", "type": "object", "propertyNames": { "type": "string" @@ -10332,7 +10474,7 @@ } }, "agent": { - "description": "Agent configuration, see https://opencode.ai/docs/agents", + "description": "Agent configuration, see https://altimate.ai/docs/agents", "type": "object", "properties": { "plan": { @@ -11118,7 +11260,8 @@ "type": "string" }, "workspaceID": { - "type": "string" + "type": "string", + "pattern": "^wrk.*" }, "directory": { "type": "string" @@ -11188,10 +11331,12 @@ "type": "object", "properties": { "messageID": { - "type": "string" + "type": "string", + "pattern": "^msg.*" }, "partID": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "snapshot": { "type": "string" @@ -11240,7 +11385,8 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "type": { "type": "string", @@ -11281,7 +11427,8 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "type": { "type": "string", @@ -11306,7 +11453,8 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "type": { "type": "string", @@ -11341,7 +11489,8 @@ "type": "object", "properties": { "id": { - "type": "string" + "type": "string", + "pattern": "^prt.*" }, "type": { "type": "string", diff --git a/packages/util/src/module.ts b/packages/util/src/module.ts new file mode 100644 index 0000000000..6ed3b23d7a --- /dev/null +++ b/packages/util/src/module.ts @@ -0,0 +1,10 @@ +import { createRequire } from "node:module" +import path from "node:path" + +export namespace Module { + export function resolve(id: string, dir: string) { + try { + return createRequire(path.join(dir, "package.json")).resolve(id) + } catch {} + } +} diff --git a/script/beta.ts b/script/beta.ts index b0e6c2dcc1..61f9cf8620 100755 --- a/script/beta.ts +++ b/script/beta.ts @@ -79,7 +79,8 @@ async function fix(pr: PR, files: string[]) { async function main() { console.log("Fetching open PRs with beta label...") - const stdout = await $`gh pr list --state open --label beta --json number,title,author,labels --limit 100`.text() + const stdout = + await $`gh pr list --state open --draft=false --label beta --json number,title,author,labels --limit 100`.text() const prs: PR[] = JSON.parse(stdout).sort((a: PR, b: PR) => a.number - b.number) console.log(`Found ${prs.length} open PRs with beta label`) diff --git a/script/sync-zed.ts b/script/sync-zed.ts deleted file mode 100755 index ac9eade578..0000000000 --- a/script/sync-zed.ts +++ /dev/null @@ -1,130 +0,0 @@ -#!/usr/bin/env bun - -import { $ } from "bun" -import { tmpdir } from "os" -import { join } from "path" - -const FORK_REPO = "AltimateAI/zed-extensions" -const UPSTREAM_REPO = "zed-industries/extensions" -const EXTENSION_NAME = "altimate-code" - -async function main() { - const version = process.argv[2] - if (!version) throw new Error("Version argument required, ex: bun script/sync-zed.ts v1.0.52") - - const token = process.env.ZED_EXTENSIONS_PAT - if (!token) throw new Error("ZED_EXTENSIONS_PAT environment variable required") - - const prToken = process.env.ZED_PR_PAT - if (!prToken) throw new Error("ZED_PR_PAT environment variable required") - - const cleanVersion = version.replace(/^v/, "") - console.log(`📦 Syncing Zed extension for version ${cleanVersion}`) - - const commitSha = await $`git rev-parse ${version}`.text() - const sha = commitSha.trim() - console.log(`🔍 Found commit SHA: ${sha}`) - - const extensionToml = await $`git show ${version}:packages/extensions/zed/extension.toml`.text() - const parsed = Bun.TOML.parse(extensionToml) as { version: string } - const extensionVersion = parsed.version - - if (extensionVersion !== cleanVersion) { - throw new Error(`Version mismatch: extension.toml has ${extensionVersion} but tag is ${cleanVersion}`) - } - console.log(`✅ Version ${extensionVersion} matches tag`) - - // Clone the fork to a temp directory - const workDir = join(tmpdir(), `zed-extensions-${Date.now()}`) - console.log(`📁 Working in ${workDir}`) - - await $`git clone https://x-access-token:${token}@github.com/${FORK_REPO}.git ${workDir}` - process.chdir(workDir) - - // Configure git identity - await $`git config user.name "Aiden Cline"` - await $`git config user.email "63023139+rekram1-node@users.noreply.github.com "` - - // Sync fork with upstream (force reset to match exactly) - console.log(`🔄 Syncing fork with upstream...`) - await $`git remote add upstream https://github.com/${UPSTREAM_REPO}.git` - await $`git fetch upstream` - await $`git checkout main` - await $`git reset --hard upstream/main` - await $`git push origin main --force` - console.log(`✅ Fork synced (force reset to upstream)`) - - // Create a new branch - const branchName = `update-${EXTENSION_NAME}-${cleanVersion}` - console.log(`🌿 Creating branch ${branchName}`) - await $`git checkout -b ${branchName}` - - const submodulePath = `extensions/${EXTENSION_NAME}` - console.log(`📌 Updating submodule to commit ${sha}`) - await $`git submodule update --init ${submodulePath}` - process.chdir(submodulePath) - await $`git fetch` - await $`git checkout ${sha}` - process.chdir(workDir) - await $`git add ${submodulePath}` - - console.log(`📝 Updating extensions.toml`) - const extensionsTomlPath = "extensions.toml" - const extensionsToml = await Bun.file(extensionsTomlPath).text() - - const versionRegex = new RegExp(`(\\[${EXTENSION_NAME}\\][\\s\\S]*?)version = "[^"]+"`) - const updatedToml = extensionsToml.replace(versionRegex, `$1version = "${cleanVersion}"`) - - if (updatedToml === extensionsToml) { - throw new Error(`Failed to update version in extensions.toml - pattern not found`) - } - - await Bun.write(extensionsTomlPath, updatedToml) - await $`git add extensions.toml` - - const commitMessage = `Update ${EXTENSION_NAME} to v${cleanVersion}` - - await $`git commit -m ${commitMessage}` - console.log(`✅ Changes committed`) - - // Delete any existing branches for opencode updates - console.log(`🔍 Checking for existing branches...`) - const branches = await $`git ls-remote --heads https://x-access-token:${token}@github.com/${FORK_REPO}.git`.text() - const branchPattern = `refs/heads/update-${EXTENSION_NAME}-` - const oldBranches = branches - .split("\n") - .filter((line) => line.includes(branchPattern)) - .map((line) => line.split("refs/heads/")[1]) - .filter(Boolean) - - if (oldBranches.length > 0) { - console.log(`🗑️ Found ${oldBranches.length} old branch(es), deleting...`) - for (const branch of oldBranches) { - await $`git push https://x-access-token:${token}@github.com/${FORK_REPO}.git --delete ${branch}` - console.log(`✅ Deleted branch ${branch}`) - } - } - - console.log(`🚀 Pushing to fork...`) - await $`git push https://x-access-token:${token}@github.com/${FORK_REPO}.git ${branchName}` - - console.log(`📬 Creating pull request...`) - const prResult = - await $`gh pr create --repo ${UPSTREAM_REPO} --base main --head ${FORK_REPO.split("/")[0]}:${branchName} --title "Update ${EXTENSION_NAME} to v${cleanVersion}" --body "Updating OpenCode extension to v${cleanVersion}"` - .env({ ...process.env, GH_TOKEN: prToken }) - .nothrow() - - if (prResult.exitCode !== 0) { - console.error("stderr:", prResult.stderr.toString()) - throw new Error(`Failed with exit code ${prResult.exitCode}`) - } - - const prUrl = prResult.stdout.toString().trim() - console.log(`✅ Pull request created: ${prUrl}`) - console.log(`🎉 Done!`) -} - -main().catch((err) => { - console.error("❌ Error:", err.message) - process.exit(1) -}) diff --git a/script/upstream/README.md b/script/upstream/README.md index f175737020..87e7598f6e 100644 --- a/script/upstream/README.md +++ b/script/upstream/README.md @@ -124,6 +124,8 @@ The merge script prints a `gh pr create` command at the end. Review the diff, th | `bun run script/upstream/analyze.ts --branding` | Audit for branding leaks | | `bun run script/upstream/analyze.ts --branding --json` | Branding audit (CI-friendly) | | `bun run script/upstream/analyze.ts` | Check `altimate_change` marker integrity | +| `bun run script/upstream/analyze.ts --markers --base main` | Check PR for missing markers | +| `bun run script/upstream/analyze.ts --markers --base main --strict` | Same, but fail CI on warnings | | `bun run script/upstream/verify-restructure.ts` | Verify branch restructure | ## Configuration @@ -226,22 +228,33 @@ These help during conflict resolution — you can see exactly what we changed vs script/upstream/ ├── README.md # This runbook ├── merge.ts # Main merge orchestrator -├── analyze.ts # Branding audit & marker analysis +├── analyze.ts # Branding audit, marker analysis, and CI marker guard ├── list-versions.ts # List upstream tags with status ├── verify-restructure.ts # Branch comparison verification -├── merge-config.json # Legacy declarative config ├── package.json # Dependencies (minimatch) ├── tsconfig.json # TypeScript config -├── utils/ -│ ├── config.ts # All branding rules and merge config -│ ├── git.ts # Git command wrappers (sync + async) -│ ├── logger.ts # Colored terminal logging -│ └── report.ts # Merge report types and output -└── transforms/ - ├── keep-ours.ts # Resolve conflicts: keep our version - ├── skip-files.ts # Resolve conflicts: accept upstream - ├── lock-files.ts # Lock file regeneration - └── branding.ts # Apply branding replacements +└── utils/ + ├── config.ts # All branding rules and merge config + ├── git.ts # Git command wrappers (sync + async) + ├── logger.ts # Colored terminal logging and ANSI formatting + └── report.ts # Merge report types and output +``` + +## CI Integration + +The **Marker Guard** CI job (in `.github/workflows/ci.yml`) runs automatically on every PR. It checks whether any upstream-shared files (`packages/opencode/src/`) have new code without `altimate_change` markers. This prevents accidental overwrites during future upstream merges. + +```bash +# What CI runs: +bun run script/upstream/analyze.ts --markers --base origin/main --strict +``` + +If the job fails, wrap your custom code with markers: + +```typescript +// altimate_change start — description of your change +... your modifications ... +// altimate_change end ``` ## Troubleshooting diff --git a/script/upstream/analyze.ts b/script/upstream/analyze.ts index 92b5b2a97a..f205a9989e 100644 --- a/script/upstream/analyze.ts +++ b/script/upstream/analyze.ts @@ -23,6 +23,7 @@ import fs from "fs" import path from "path" import * as git from "./utils/git" import * as logger from "./utils/logger" +import { RESET, BOLD, DIM, CYAN, GREEN, RED, YELLOW, MAGENTA, bold, dim, cyan, banner } from "./utils/logger" import { loadConfig, repoRoot, type MergeConfig } from "./utils/config" // --------------------------------------------------------------------------- @@ -33,6 +34,9 @@ const { values: args } = parseArgs({ options: { version: { type: "string", short: "v" }, branding: { type: "boolean", default: false }, + markers: { type: "boolean", default: false }, + strict: { type: "boolean", default: false }, + base: { type: "string" }, verbose: { type: "boolean", default: false }, json: { type: "boolean", default: false }, help: { type: "boolean", short: "h", default: false }, @@ -40,30 +44,6 @@ const { values: args } = parseArgs({ strict: false, }) as any -// --------------------------------------------------------------------------- -// ANSI helpers -// --------------------------------------------------------------------------- - -const RESET = "\x1b[0m" -const BOLD = "\x1b[1m" -const DIM = "\x1b[2m" -const CYAN = "\x1b[36m" -const GREEN = "\x1b[32m" -const RED = "\x1b[31m" -const YELLOW = "\x1b[33m" -const MAGENTA = "\x1b[35m" - -function bold(s: string): string { return `${BOLD}${s}${RESET}` } -function dim(s: string): string { return `${DIM}${s}${RESET}` } -function cyan(s: string): string { return `${CYAN}${s}${RESET}` } - -function banner(text: string): void { - const line = "═".repeat(60) - console.log(`\n${CYAN}${line}${RESET}`) - console.log(`${CYAN} ${BOLD}${text}${RESET}`) - console.log(`${CYAN}${line}${RESET}\n`) -} - // --------------------------------------------------------------------------- // Branding leak detection // --------------------------------------------------------------------------- @@ -509,6 +489,9 @@ function printUsage(): void { ${bold("OPTIONS")} --version, -v <tag> Upstream version to analyze --branding Scan codebase for upstream branding leaks + --markers Check changed files for missing altimate_change markers + --base <branch> Base branch for --markers comparison (default: HEAD) + --strict Exit with code 1 on warnings (for CI) --verbose Show all results (not just top 20) --json Output results as JSON --help, -h Show this help message @@ -523,11 +506,236 @@ function printUsage(): void { ${dim("# Full branding audit with all details")} bun run script/upstream/analyze.ts --branding --verbose + ${dim("# Check PR for missing markers (CI)")} + bun run script/upstream/analyze.ts --markers --base main --strict + ${dim("# Machine-readable output for CI")} bun run script/upstream/analyze.ts --branding --json `) } +// --------------------------------------------------------------------------- +// CI marker guard (--markers mode, formerly check-markers.ts) +// --------------------------------------------------------------------------- + +interface MarkerWarning { + file: string + line: number + context: string + reason: string +} + +function getChangedFiles(base?: string): string[] { + const { execSync } = require("child_process") + const root = repoRoot() + // Only check Modified files (M), not Added (A). New files don't exist + // upstream so they can't be overwritten by a merge — no markers needed. + const cmd = base + ? `git diff --name-only --diff-filter=M ${base}...HEAD` + : `git diff --name-only --diff-filter=M HEAD` + try { + return execSync(cmd, { cwd: root, encoding: "utf-8" }) + .trim() + .split("\n") + .filter(Boolean) + } catch { + return [] + } +} + +// Cache for upstream file existence checks (populated on first use) +let _upstreamFilesCache: Set<string> | null = null + +function getUpstreamFiles(config: MergeConfig): Set<string> { + if (_upstreamFilesCache) return _upstreamFilesCache + const { execSync } = require("child_process") + const root = repoRoot() + const refs = [`${config.upstreamRemote}/dev`, `${config.upstreamRemote}/main`] + for (const ref of refs) { + try { + const output = execSync(`git ls-tree -r --name-only ${ref}`, { + cwd: root, + encoding: "utf-8", + maxBuffer: 10 * 1024 * 1024, + }) + _upstreamFilesCache = new Set(output.trim().split("\n").filter(Boolean)) + return _upstreamFilesCache + } catch { + // ref not available, try next + } + } + // Upstream not available — return empty set (all files treated as ours-only) + _upstreamFilesCache = new Set() + return _upstreamFilesCache +} + +// altimate_change start — exclude test files, generated code, and config files from marker checks +const markerExcludePatterns = [ + "**/test/**", + "**/tests/**", + "**/*.test.ts", + "**/*.test.tsx", + "**/*.spec.ts", + "**/tsconfig.json", + "**/package.json", + "packages/sdk/js/src/gen/**", + "packages/sdk/js/src/v2/gen/**", + "packages/sdk/openapi.json", + "packages/script/**", + "script/**", +] +// altimate_change end + +function isUpstreamShared(file: string, config: MergeConfig): boolean { + const { minimatch } = require("minimatch") + if (config.keepOurs.some((p: string) => minimatch(file, p))) return false + if (config.skipFiles.some((p: string) => minimatch(file, p))) return false + // altimate_change start — skip files that don't need marker protection + if (markerExcludePatterns.some((p) => minimatch(file, p))) return false + // altimate_change end + const ext = path.extname(file) + if (!config.transformableExtensions.includes(ext)) return false + + // Only flag files that actually exist in upstream. Files we created + // that don't exist upstream can't be overwritten by a merge. + const upstreamFiles = getUpstreamFiles(config) + if (upstreamFiles.size === 0) { + // Fallback: if upstream isn't available, use directory heuristic + return file.startsWith("packages/opencode/src/") + } + return upstreamFiles.has(file) +} + +function checkFileForMarkers(file: string, base?: string): MarkerWarning[] { + const { execSync } = require("child_process") + const root = repoRoot() + const warnings: MarkerWarning[] = [] + + const diffCmd = base + ? `git diff -U5 ${base}...HEAD -- "${file}"` + : `git diff -U5 HEAD -- "${file}"` + + let diffOutput: string + try { + diffOutput = execSync(diffCmd, { cwd: root, encoding: "utf-8" }) + } catch { + return warnings + } + + if (!diffOutput.trim()) return warnings + + const lines = diffOutput.split("\n") + let inMarkerBlock = false + let currentLine = 0 + let hasNewCode = false + let newCodeStart = 0 + let newCodeContext = "" + + for (const line of lines) { + const hunkMatch = line.match(/^@@ -\d+(?:,\d+)? \+(\d+)/) + if (hunkMatch) { + currentLine = parseInt(hunkMatch[1]) - 1 + continue + } + + if (line.startsWith("+") && !line.startsWith("+++")) { + currentLine++ + const content = line.slice(1).trim() + + if (content.includes("altimate_change start")) { inMarkerBlock = true; continue } + if (content.includes("altimate_change end")) { inMarkerBlock = false; continue } + if (content.includes("altimate_change")) continue + + if (!content) continue + if (content.startsWith("//") && !content.includes("TODO")) continue + if (content.startsWith("import ")) continue + if (content.startsWith("export ")) continue + + if (!inMarkerBlock) { + if (!hasNewCode) { + hasNewCode = true + newCodeStart = currentLine + newCodeContext = content + } + } + } else if (line.startsWith("-")) { + // deleted line, don't increment + } else { + currentLine++ + } + } + + if (hasNewCode) { + warnings.push({ + file, + line: newCodeStart, + context: newCodeContext.slice(0, 80), + reason: "New code added to upstream-shared file without altimate_change markers", + }) + } + + return warnings +} + +function runMarkerCheck(config: MergeConfig, base?: string, strict?: boolean): number { + const { execSync } = require("child_process") + const root = repoRoot() + + // Ensure upstream remote exists and is fetched so we can check file existence + try { + execSync(`git remote get-url ${config.upstreamRemote}`, { cwd: root, stdio: "ignore" }) + execSync(`git fetch ${config.upstreamRemote} --quiet`, { cwd: root, stdio: "ignore" }) + } catch { + // If upstream remote doesn't exist, fall back to pattern-only checks + logger.warn(`Could not fetch '${config.upstreamRemote}' remote — falling back to pattern-based detection`) + } + + const changedFiles = getChangedFiles(base) + const sharedFiles = changedFiles.filter((f) => isUpstreamShared(f, config)) + + if (sharedFiles.length === 0) { + logger.success("No upstream-shared files modified — no markers needed") + return 0 + } + + console.log(`Checking ${sharedFiles.length} upstream-shared file(s) for altimate_change markers...\n`) + + const allWarnings: MarkerWarning[] = [] + for (const file of sharedFiles) { + const warnings = checkFileForMarkers(file, base) + allWarnings.push(...warnings) + } + + if (allWarnings.length === 0) { + logger.success("All custom code in upstream-shared files is properly marked") + return 0 + } + + console.log(`\n${YELLOW}⚠ Found ${allWarnings.length} file(s) with unmarked custom code:${RESET}\n`) + for (const w of allWarnings) { + console.log(` ${w.file}:${w.line}`) + console.log(` ${w.reason}`) + console.log(` Context: ${DIM}${w.context}${RESET}`) + console.log() + } + console.log("Wrap custom code with markers to protect from upstream overwrites:") + console.log(` ${DIM}// altimate_change start — description${RESET}`) + console.log(` ${DIM}... your code ...${RESET}`) + console.log(` ${DIM}// altimate_change end${RESET}`) + console.log() + + if (strict) { + logger.error("--strict mode — failing CI") + return 1 + } + logger.warn("Run with --strict to enforce in CI") + return 0 +} + +// --------------------------------------------------------------------------- +// Main +// --------------------------------------------------------------------------- + async function main(): Promise<void> { if (args.help) { printUsage() @@ -536,17 +744,18 @@ async function main(): Promise<void> { const config = loadConfig() - // Both --version and --branding can run together const hasVersion = Boolean(args.version) const hasBranding = Boolean(args.branding) + const hasMarkers = Boolean(args.markers) - if (!hasVersion && !hasBranding) { + if (!hasVersion && !hasBranding && !hasMarkers) { // Default: run marker analysis printMarkerAnalysis(config) console.log() logger.info("Use --version <tag> to analyze an upstream version") logger.info("Use --branding to audit for branding leaks") + logger.info("Use --markers --base main to check for missing markers") return } @@ -584,6 +793,15 @@ async function main(): Promise<void> { process.exit(1) } } + + // ─── Marker guard (CI mode) ─────────────────────────────────────────────── + + if (hasMarkers) { + const exitCode = runMarkerCheck(config, args.base, Boolean(args.strict)) + if (exitCode !== 0) { + process.exit(exitCode) + } + } } main().catch((e) => { diff --git a/script/upstream/merge-config.json b/script/upstream/merge-config.json deleted file mode 100644 index 5c625a9717..0000000000 --- a/script/upstream/merge-config.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "$comment": "Declarative config for upstream merge tooling", - "keepOurs": [ - "src/altimate/**", - "packages/altimate-engine/**", - "script/upstream/**", - "script/revert-renames.sh", - "README.md", - ".github/**", - "docs/**", - "experiments/**", - ".opencode/**", - "CHANGELOG.md", - "CODE_OF_CONDUCT.md", - "RELEASING.md" - ], - "skipFiles": [ - "packages/app/**", - "packages/console/**", - "packages/containers/**", - "packages/desktop/**", - "packages/desktop-electron/**", - "packages/docs/**", - "packages/enterprise/**", - "packages/extensions/**", - "packages/function/**", - "packages/identity/**", - "packages/slack/**", - "packages/storybook/**", - "packages/ui/**", - "packages/web/**", - "nix/**", - "flake.nix", - "flake.lock", - "infra/**", - "sst.config.ts", - "sst-env.d.ts", - "specs/**", - "README.*.md" - ], - "packageMappings": { - "@opencode-ai/opencode": "@altimateai/altimate-code", - "@opencode-ai/util": "@altimateai/altimate-code-util", - "@opencode-ai/plugin": "@altimateai/altimate-code-plugin", - "@opencode-ai/sdk": "@altimateai/altimate-code-sdk" - }, - "changeMarker": "altimate_change", - "upstreamRemote": "upstream", - "upstreamBranch": "dev" -} diff --git a/script/upstream/merge.ts b/script/upstream/merge.ts index c4d64f0f10..dade2dfbf2 100644 --- a/script/upstream/merge.ts +++ b/script/upstream/merge.ts @@ -21,6 +21,7 @@ import fs from "fs" import path from "path" import * as git from "./utils/git" import * as logger from "./utils/logger" +import { RESET, BOLD, DIM, CYAN, GREEN, RED, YELLOW, MAGENTA, bold, dim, cyan, green, red, yellow, banner } from "./utils/logger" import { loadConfig, repoRoot, type MergeConfig, type StringReplacement } from "./utils/config" import { createReport, addFileReport, printSummary, writeReport, type MergeReport, type FileReport, type Change } from "./utils/report" @@ -95,29 +96,6 @@ function printUsage(): void { `) } -const RESET = "\x1b[0m" -const BOLD = "\x1b[1m" -const DIM = "\x1b[2m" -const CYAN = "\x1b[36m" -const GREEN = "\x1b[32m" -const RED = "\x1b[31m" -const YELLOW = "\x1b[33m" -const MAGENTA = "\x1b[35m" - -function bold(s: string): string { return `${BOLD}${s}${RESET}` } -function dim(s: string): string { return `${DIM}${s}${RESET}` } -function cyan(s: string): string { return `${CYAN}${s}${RESET}` } -function green(s: string): string { return `${GREEN}${s}${RESET}` } -function red(s: string): string { return `${RED}${s}${RESET}` } -function yellow(s: string): string { return `${YELLOW}${s}${RESET}` } - -function banner(text: string): void { - const line = "═".repeat(60) - console.log(`\n${CYAN}${line}${RESET}`) - console.log(`${CYAN} ${BOLD}${text}${RESET}`) - console.log(`${CYAN}${line}${RESET}\n`) -} - function saveState(state: MergeState): void { const stateFile = path.join(repoRoot(), STATE_FILE) fs.writeFileSync(stateFile, JSON.stringify(state, null, 2)) @@ -301,49 +279,64 @@ async function autoResolveConflicts( const remaining: string[] = [] for (const file of conflicts) { - // Strategy 1: keepOurs — files we own entirely - const isKeepOurs = config.keepOurs.some((p) => minimatch(file, p)) - if (isKeepOurs) { - await $`git checkout --ours -- ${file}`.cwd(root).quiet() - await $`git add ${file}`.cwd(root).quiet() - resolved.push(file) - logger.success(`${file} ${dim("(kept ours)")}`) - continue - } + try { + // Strategy 1: keepOurs — files we own entirely + const isKeepOurs = config.keepOurs.some((p) => minimatch(file, p)) + if (isKeepOurs) { + await $`git checkout --ours -- ${file}`.cwd(root).quiet() + await $`git add ${file}`.cwd(root).quiet() + resolved.push(file) + logger.success(`${file} ${dim("(kept ours)")}`) + continue + } - // Strategy 2: skipFiles — upstream files we accept wholesale - const isSkipFile = config.skipFiles.some((p) => minimatch(file, p)) - if (isSkipFile) { - await $`git checkout --theirs -- ${file}`.cwd(root).quiet() - await $`git add ${file}`.cwd(root).quiet() - resolved.push(file) - logger.success(`${file} ${dim("(accepted upstream)")}`) - continue - } + // Strategy 2: skipFiles — upstream files we accept wholesale + const isSkipFile = config.skipFiles.some((p) => minimatch(file, p)) + if (isSkipFile) { + // File may have been deleted on one side; try --theirs first, fall back to removing + try { + await $`git checkout --theirs -- ${file}`.cwd(root).quiet() + } catch { + // File deleted on upstream side — accept the deletion + await $`git rm --force --ignore-unmatch -- ${file}`.cwd(root).quiet() + } + await $`git add ${file}`.cwd(root).quiet() + resolved.push(file) + logger.success(`${file} ${dim("(accepted upstream)")}`) + continue + } - // Strategy 3: Lock files — accept ours, will regenerate later - if (file === "bun.lock" || file.endsWith("/bun.lock") || - file === "package-lock.json" || file.endsWith("/package-lock.json")) { - await $`git checkout --ours -- ${file}`.cwd(root).quiet() - await $`git add ${file}`.cwd(root).quiet() - resolved.push(file) - logger.success(`${file} ${dim("(kept ours, will regenerate)")}`) - continue - } + // Strategy 3: Lock files — accept ours, will regenerate later + if (file === "bun.lock" || file.endsWith("/bun.lock") || + file === "package-lock.json" || file.endsWith("/package-lock.json")) { + await $`git checkout --ours -- ${file}`.cwd(root).quiet() + await $`git add ${file}`.cwd(root).quiet() + resolved.push(file) + logger.success(`${file} ${dim("(kept ours, will regenerate)")}`) + continue + } - // Strategy 4: Binary files — accept upstream - const binaryExts = [".png", ".jpg", ".jpeg", ".gif", ".ico", ".woff", ".woff2", - ".ttf", ".eot", ".pyc", ".whl", ".gz", ".zip", ".tar", - ".svg", ".webp", ".avif"] - if (binaryExts.some((ext) => file.endsWith(ext))) { - await $`git checkout --theirs -- ${file}`.cwd(root).quiet() - await $`git add ${file}`.cwd(root).quiet() - resolved.push(file) - logger.success(`${file} ${dim("(binary, accepted upstream)")}`) - continue - } + // Strategy 4: Binary files — accept upstream + const binaryExts = [".png", ".jpg", ".jpeg", ".gif", ".ico", ".woff", ".woff2", + ".ttf", ".eot", ".pyc", ".whl", ".gz", ".zip", ".tar", + ".svg", ".webp", ".avif"] + if (binaryExts.some((ext) => file.endsWith(ext))) { + try { + await $`git checkout --theirs -- ${file}`.cwd(root).quiet() + } catch { + await $`git rm --force --ignore-unmatch -- ${file}`.cwd(root).quiet() + } + await $`git add ${file}`.cwd(root).quiet() + resolved.push(file) + logger.success(`${file} ${dim("(binary, accepted upstream)")}`) + continue + } - remaining.push(file) + remaining.push(file) + } catch (e: any) { + logger.warn(`Could not auto-resolve ${file}: ${e.message || e}`) + remaining.push(file) + } } return { resolved, remaining } @@ -748,10 +741,14 @@ async function continueAfterConflicts(config: MergeConfig): Promise<void> { logger.success("All conflicts resolved") - // Commit the merge + // Commit the merge (skip if already committed) logger.info("Committing merge...") - await $`git commit --no-edit`.cwd(root).quiet() - logger.success("Merge committed") + try { + await $`git commit --no-edit`.cwd(root).quiet() + logger.success("Merge committed") + } catch { + logger.info("Merge already committed, continuing...") + } // Use pre-merge version snapshot from saved state, fall back to current if not available const versionSnapshot = state.versionSnapshot ?? snapshotVersions() @@ -759,6 +756,58 @@ async function continueAfterConflicts(config: MergeConfig): Promise<void> { await postMergeTransforms(config, state.version, versionSnapshot) } +// --------------------------------------------------------------------------- +// skipFiles cleanup +// --------------------------------------------------------------------------- + +/** + * Delete files/directories matching skipFiles patterns that exist in the repo. + * These are upstream packages we don't need — the merge may have re-introduced them. + */ +async function cleanupSkipFiles(config: MergeConfig): Promise<void> { + const { minimatch } = await import("minimatch") + const root = repoRoot() + + // Get all tracked files and find those matching skipFiles patterns + const trackedFiles = await git.getTrackedFiles() + const toRemove = trackedFiles.filter((f) => + config.skipFiles.some((p) => minimatch(f, p)), + ) + + if (toRemove.length === 0) { + logger.info("No skipFiles to clean up") + return + } + + logger.info(`Removing ${toRemove.length} file(s) matching skipFiles patterns...`) + + // Remove via git rm in batches + const batchSize = 100 + for (let i = 0; i < toRemove.length; i += batchSize) { + const batch = toRemove.slice(i, i + batchSize) + try { + await $`git rm -rf --ignore-unmatch -- ${batch}`.cwd(root).quiet() + } catch (e: any) { + logger.warn(`Some skipFiles could not be removed: ${e.message || e}`) + } + } + + // Also delete leftover empty directories for skipFiles directory patterns + for (const pattern of config.skipFiles) { + // Only handle directory-level patterns (ending with /**) + if (!pattern.endsWith("/**")) continue + const dirPath = path.join(root, pattern.replace("/**", "")) + if (fs.existsSync(dirPath)) { + try { + fs.rmSync(dirPath, { recursive: true, force: true }) + logger.success(`Removed directory: ${pattern.replace("/**", "")}`) + } catch { + // Best effort + } + } + } +} + // --------------------------------------------------------------------------- // Post-merge transforms (steps 7-11) // --------------------------------------------------------------------------- @@ -771,9 +820,13 @@ async function postMergeTransforms( const root = repoRoot() const report = createReport(version) - // ─── Step 7: Apply branding transforms ──────────────────────────────────── + // ─── Step 7: Clean up skipFiles and apply branding transforms ───────────── + + logger.step(7, TOTAL_STEPS, "Cleaning up skipFiles and applying branding transforms") + + // Delete skipFiles directories/files that may have been introduced by the merge + await cleanupSkipFiles(config) - logger.step(7, TOTAL_STEPS, "Applying branding transforms") await applyBrandingTransforms(config, report) // ─── Step 8: Restore package versions ───────────────────────────────────── diff --git a/script/upstream/transforms/branding.ts b/script/upstream/transforms/branding.ts deleted file mode 100644 index 7c726ed372..0000000000 --- a/script/upstream/transforms/branding.ts +++ /dev/null @@ -1,298 +0,0 @@ -/** - * Core branding transform for upstream merge automation. - * - * Applies string-level replacements to convert OpenCode/anomalyco references - * to Altimate Code/AltimateAI. This is the heart of the branding system — - * every merge from upstream runs through these transforms to rebrand the - * codebase in a single pass. - * - * Rule application order matters: - * 1. URL rules (always apply — unambiguous domain swaps) - * 2. GitHub repo rules (always apply) - * 3. Container registry rules (always apply) - * 4. Email rules (always apply) - * 5. App identifier rules (always apply) - * 6. Social rules (always apply) - * 7. npm install rules (always apply — they match specific command patterns) - * 8. CLI binary rules (always apply) - * 9. Product name rules — ONLY if the line is NOT preserved - * - * Preservation patterns protect internal code references (imports, env vars, - * directory names) from being mangled by product name transforms. - */ - -import { Glob } from "bun" -import path from "path" -import { minimatch } from "minimatch" -import { - defaultConfig, - loadConfig, - repoRoot, - type MergeConfig, - type StringReplacement, -} from "../utils/config" -import type { Change, FileReport } from "../utils/report" -import { noChanges } from "../utils/report" - -// --------------------------------------------------------------------------- -// Directories and paths to skip when walking the repo tree. -// These never contain transformable source code. -// --------------------------------------------------------------------------- - -const SKIP_DIRS = new Set([ - "node_modules", - ".git", - ".venv", - "dist", - ".next", - ".turbo", - ".cache", -]) - -// --------------------------------------------------------------------------- -// Public API -// --------------------------------------------------------------------------- - -/** - * Check if a line should be preserved (not have product name transforms applied). - * Returns true if the line contains any preservation pattern. - * - * Preservation patterns are substrings like `@opencode-ai/`, `OPENCODE_`, etc. - * that indicate internal code references. URL and GitHub transforms are - * unambiguous and still apply to preserved lines — only the generic product - * name swap ("OpenCode" -> "Altimate Code") is skipped. - */ -export function shouldPreserveLine( - line: string, - preservePatterns: string[], -): boolean { - return preservePatterns.some((pattern) => line.includes(pattern)) -} - -/** - * Apply all branding replacements to a single line of text. - * - * Respects preservation patterns for product name transforms. - * URL/GitHub/registry/email/app-id/social/npm-install/cli-binary transforms - * always apply because they are unambiguous string swaps. Product name - * transforms ("OpenCode" -> "Altimate Code") only apply when the line is - * NOT preserved. - * - * @param line - The original line of text. - * @param config - The merge configuration containing all rule sets. - * @returns An object with the (possibly transformed) line and a list of changes. - */ -export function transformLine( - line: string, - config: MergeConfig, -): { line: string; changes: Change[] } { - const changes: Change[] = [] - let current = line - - // Helper: apply a set of rules and track changes. - function applyRules(rules: StringReplacement[]) { - for (const rule of rules) { - // Reset lastIndex for sticky/global regexes - rule.pattern.lastIndex = 0 - if (rule.pattern.test(current)) { - rule.pattern.lastIndex = 0 - const before = current - current = current.replace(rule.pattern, rule.replacement) - if (current !== before) { - changes.push({ - description: rule.description, - rule: rule.description, - // Line number is filled in by the caller (applyBranding) - line: 0, - before, - after: current, - }) - } - } - } - } - - // 1-8: These rule categories always apply — they are unambiguous. - // The brandingRules array contains ALL rules in specificity order, - // but we need to separate product name rules for preservation logic. - // Use the dedicated rule arrays from config to apply non-product-name - // rules first, then conditionally apply product name rules. - // - // The brandingRules array is the union of all categories. Rather than - // rely on that flat list, we apply each category's rules directly from - // the config so we can gate product name rules on preservation. - - // URL, GitHub, registry, email, app-id, social, npm-install, and - // cli-binary rules are all embedded in brandingRules but NOT in - // productNameRules. We apply everything in brandingRules EXCEPT - // productNameRules first (those are the "always apply" set). - const alwaysApplyRules = config.brandingRules.filter( - (rule) => !config.productNameRules.includes(rule), - ) - applyRules(alwaysApplyRules) - - // 9: Product name rules — only apply if the line is NOT preserved. - const preserved = shouldPreserveLine(current, config.preservePatterns) - if (!preserved) { - applyRules(config.productNameRules) - } - - return { line: current, changes } -} - -/** - * Apply all branding transforms to file content. - * - * Splits the content line-by-line, transforms each line, and reassembles. - * Returns the modified content and a flat list of all changes with correct - * line numbers. - * - * @param content - The full file content as a string. - * @param filePath - Relative path (used only for diagnostics, not I/O). - * @param config - Optional override config; defaults to defaultConfig. - * @returns Modified content and list of changes. - */ -export function applyBranding( - content: string, - filePath: string, - config: MergeConfig = defaultConfig, -): { content: string; changes: Change[] } { - const lines = content.split("\n") - const allChanges: Change[] = [] - const transformed: string[] = [] - - for (let i = 0; i < lines.length; i++) { - const result = transformLine(lines[i], config) - - // Stamp the correct 1-based line number on each change - for (const change of result.changes) { - change.line = i + 1 - allChanges.push(change) - } - - transformed.push(result.line) - } - - return { - content: transformed.join("\n"), - changes: allChanges, - } -} - -/** - * Apply branding transforms to a single file on disk. - * - * Reads the file, applies all branding rules via `applyBranding`, and - * optionally writes the result back. Returns a FileReport describing - * what was changed. - * - * @param filePath - Absolute or repo-relative path to the file. - * @param options - `dryRun: true` skips the write step. - * @returns A FileReport with the list of changes. - */ -export async function transformFile( - filePath: string, - options?: { dryRun?: boolean }, -): Promise<FileReport> { - const config = loadConfig() - const root = repoRoot() - const absPath = path.isAbsolute(filePath) - ? filePath - : path.join(root, filePath) - const relPath = path.relative(root, absPath) - - // Only transform files with recognized extensions - const ext = path.extname(absPath).toLowerCase() - if (!config.transformableExtensions.includes(ext)) { - return noChanges(relPath) - } - - // Read file content using Bun's file API - const file = Bun.file(absPath) - if (!(await file.exists())) { - return noChanges(relPath) - } - - const content = await file.text() - const { content: branded, changes } = applyBranding(content, relPath, config) - - if (changes.length === 0) { - return noChanges(relPath) - } - - // Write the transformed content back unless this is a dry run - if (!options?.dryRun) { - await Bun.write(absPath, branded) - } - - return { - filePath: relPath, - applied: !options?.dryRun, - changes: changes.map((c) => ({ - description: `${c.rule ?? c.description} (L${c.line})`, - before: c.before?.trim(), - after: c.after?.trim(), - line: c.line, - rule: c.rule, - })), - } -} - -/** - * Apply branding transforms to all transformable files in the repo. - * - * Walks the repository tree using Bun's Glob, respects keepOurs patterns - * (those files are ours and shouldn't be re-branded), and only processes - * files whose extensions are in `transformableExtensions`. - * - * @param options - `dryRun: true` skips writes; `verbose: true` logs each file. - * @returns An array of FileReports for files that had changes. - */ -export async function transformAll( - options?: { dryRun?: boolean; verbose?: boolean }, -): Promise<FileReport[]> { - const config = loadConfig() - const root = repoRoot() - const reports: FileReport[] = [] - - // Build a glob pattern that matches all transformable extensions. - // Bun's Glob doesn't support {a,b,c} alternation in all cases, - // so we run one glob per extension. - for (const ext of config.transformableExtensions) { - // ext includes the leading dot, e.g. ".ts" - const pattern = `**/*${ext}` - const glob = new Glob(pattern) - - for await (const relPath of glob.scan({ - cwd: root, - dot: false, - onlyFiles: true, - })) { - // Skip files inside ignored directories - const parts = relPath.split(path.sep) - if (parts.some((part) => SKIP_DIRS.has(part))) { - continue - } - - // Skip files matching keepOurs — those are our custom files and - // should not have upstream branding transforms applied to them. - const isKept = config.keepOurs.some((pattern) => - minimatch(relPath, pattern), - ) - if (isKept) { - continue - } - - if (options?.verbose) { - console.log(` scanning: ${relPath}`) - } - - const report = await transformFile(path.join(root, relPath), options) - if (report.changes.length > 0) { - reports.push(report) - } - } - } - - return reports -} diff --git a/script/upstream/transforms/extensions.ts b/script/upstream/transforms/extensions.ts deleted file mode 100644 index d9f8fa47c7..0000000000 --- a/script/upstream/transforms/extensions.ts +++ /dev/null @@ -1,190 +0,0 @@ -import fs from "fs" -import path from "path" -import { repoRoot } from "../utils/config" -import type { FileReport, Change } from "../utils/report" -import { noChanges } from "../utils/report" - -/** Extension files to transform. */ -const ZED_EXTENSION = "packages/extensions/zed/extension.toml" -const VSCODE_PACKAGE = "sdks/vscode/package.json" - -/** Transform the Zed extension.toml file. */ -async function transformZedExtension( - options?: { dryRun?: boolean }, -): Promise<FileReport> { - const root = repoRoot() - const absPath = path.join(root, ZED_EXTENSION) - - if (!fs.existsSync(absPath)) { - return noChanges(ZED_EXTENSION) - } - - let content = fs.readFileSync(absPath, "utf-8") - const changes: Change[] = [] - - const replacements: Array<{ match: RegExp; replacement: string; description: string }> = [ - { - match: /^id\s*=\s*"opencode"/m, - replacement: 'id = "altimate-code"', - description: "extension id", - }, - { - match: /^name\s*=\s*"OpenCode"/m, - replacement: 'name = "Altimate Code"', - description: "extension name", - }, - { - match: /^authors\s*=\s*\["Anomaly"\]/m, - replacement: 'authors = ["Altimate AI"]', - description: "extension authors", - }, - { - match: /\[agent_servers\.opencode\]/g, - replacement: "[agent_servers.altimate-code]", - description: "agent_servers section name", - }, - { - match: /opencode-aarch64-apple-darwin/g, - replacement: "altimate-code-aarch64-apple-darwin", - description: "binary name (aarch64-apple-darwin)", - }, - { - match: /opencode-x86_64-apple-darwin/g, - replacement: "altimate-code-x86_64-apple-darwin", - description: "binary name (x86_64-apple-darwin)", - }, - { - match: /opencode-aarch64-unknown-linux/g, - replacement: "altimate-code-aarch64-unknown-linux", - description: "binary name (aarch64-unknown-linux)", - }, - { - match: /opencode-x86_64-unknown-linux/g, - replacement: "altimate-code-x86_64-unknown-linux", - description: "binary name (x86_64-unknown-linux)", - }, - { - match: /opencode-x86_64-pc-windows/g, - replacement: "altimate-code-x86_64-pc-windows", - description: "binary name (x86_64-pc-windows)", - }, - { - match: /cmd\s*=\s*"\.\/opencode"/g, - replacement: 'cmd = "./altimate-code"', - description: "agent command", - }, - { - match: /anomalyco\/opencode/g, - replacement: "AltimateAI/altimate-code", - description: "GitHub owner/repo in archive URLs", - }, - ] - - for (const r of replacements) { - const before = content - content = content.replace(r.match, r.replacement) - if (content !== before) { - changes.push({ description: r.description }) - } - } - - if (changes.length === 0) { - return noChanges(ZED_EXTENSION) - } - - if (!options?.dryRun) { - fs.writeFileSync(absPath, content, "utf-8") - } - - return { filePath: ZED_EXTENSION, applied: !options?.dryRun, changes } -} - -/** Transform the VSCode extension package.json. */ -async function transformVscodeExtension( - options?: { dryRun?: boolean }, -): Promise<FileReport> { - const root = repoRoot() - const absPath = path.join(root, VSCODE_PACKAGE) - - if (!fs.existsSync(absPath)) { - return noChanges(VSCODE_PACKAGE) - } - - const content = fs.readFileSync(absPath, "utf-8") - let pkg: any - - try { - pkg = JSON.parse(content) - } catch { - return noChanges(VSCODE_PACKAGE) - } - - const changes: Change[] = [] - - // Name - if (pkg.name && pkg.name.includes("opencode")) { - const updated = pkg.name.replace(/opencode/g, "altimate-code") - changes.push({ description: "extension name", before: pkg.name, after: updated }) - pkg.name = updated - } - - // Display name - if (pkg.displayName && pkg.displayName.includes("OpenCode")) { - const updated = pkg.displayName.replace(/OpenCode/g, "Altimate Code") - changes.push({ description: "displayName", before: pkg.displayName, after: updated }) - pkg.displayName = updated - } - - // Description - if (typeof pkg.description === "string" && pkg.description.includes("OpenCode")) { - const updated = pkg.description.replace(/OpenCode/g, "Altimate Code") - changes.push({ description: "description", before: pkg.description, after: updated }) - pkg.description = updated - } - - // Publisher - if (pkg.publisher === "anomalyco") { - changes.push({ description: "publisher", before: pkg.publisher, after: "altimateai" }) - pkg.publisher = "altimateai" - } - - // Repository URL - if (typeof pkg.repository?.url === "string" && pkg.repository.url.includes("anomalyco")) { - const updated = pkg.repository.url.replace(/anomalyco\/opencode/g, "AltimateAI/altimate-code") - changes.push({ description: "repository.url", before: pkg.repository.url, after: updated }) - pkg.repository.url = updated - } - - // Command titles in contributes.commands - if (Array.isArray(pkg.contributes?.commands)) { - for (const cmd of pkg.contributes.commands) { - if (typeof cmd.title === "string" && cmd.title.includes("OpenCode")) { - const updated = cmd.title.replace(/OpenCode/g, "Altimate Code") - changes.push({ description: `command title "${cmd.command}"`, before: cmd.title, after: updated }) - cmd.title = updated - } - } - } - - if (changes.length === 0) { - return noChanges(VSCODE_PACKAGE) - } - - if (!options?.dryRun) { - fs.writeFileSync(absPath, JSON.stringify(pkg, null, 2) + "\n", "utf-8") - } - - return { filePath: VSCODE_PACKAGE, applied: !options?.dryRun, changes } -} - -/** Transform all editor extension files. */ -export async function transformExtensions( - options?: { dryRun?: boolean }, -): Promise<FileReport[]> { - const reports: FileReport[] = [] - - reports.push(await transformZedExtension(options)) - reports.push(await transformVscodeExtension(options)) - - return reports -} diff --git a/script/upstream/transforms/i18n.ts b/script/upstream/transforms/i18n.ts deleted file mode 100644 index b36bdf85dc..0000000000 --- a/script/upstream/transforms/i18n.ts +++ /dev/null @@ -1,103 +0,0 @@ -import fs from "fs" -import path from "path" -import { repoRoot } from "../utils/config" -import type { FileReport, Change } from "../utils/report" -import { noChanges } from "../utils/report" - -/** Glob patterns for i18n translation files. */ -const I18N_PATTERNS = [ - "packages/app/src/i18n", - "packages/desktop/src/i18n", - "packages/desktop-electron/src/renderer/i18n", - "packages/console/app/src/i18n", - "packages/ui/src/i18n", -] - -/** String replacements to apply in translation files. */ -const TRANSLATION_REPLACEMENTS: Array<{ - match: RegExp - replacement: string - description: string -}> = [ - { - match: /OpenCode Desktop/g, - replacement: "Altimate Code Desktop", - description: '"OpenCode Desktop" product name', - }, - { - match: /OpenCode/g, - replacement: "Altimate Code", - description: '"OpenCode" product name', - }, - { - match: /opencode\.ai/g, - replacement: "altimate.ai", - description: "URL reference", - }, -] - -/** Find all TypeScript files in an i18n directory. */ -function findI18nFiles(dirPath: string): string[] { - if (!fs.existsSync(dirPath)) return [] - - const files: string[] = [] - for (const entry of fs.readdirSync(dirPath, { withFileTypes: true })) { - if (entry.isFile() && (entry.name.endsWith(".ts") || entry.name.endsWith(".tsx"))) { - files.push(path.join(dirPath, entry.name)) - } - } - return files -} - -/** Transform a single i18n translation file. */ -async function transformI18nFile( - absPath: string, - relPath: string, - options?: { dryRun?: boolean }, -): Promise<FileReport> { - if (!fs.existsSync(absPath)) { - return noChanges(relPath) - } - - let content = fs.readFileSync(absPath, "utf-8") - const changes: Change[] = [] - - // Apply replacements in order (more specific first — "OpenCode Desktop" before "OpenCode") - for (const r of TRANSLATION_REPLACEMENTS) { - const before = content - content = content.replace(r.match, r.replacement) - if (content !== before) { - changes.push({ description: r.description }) - } - } - - if (changes.length === 0) { - return noChanges(relPath) - } - - if (!options?.dryRun) { - fs.writeFileSync(absPath, content, "utf-8") - } - - return { filePath: relPath, applied: !options?.dryRun, changes } -} - -/** Transform all i18n translation files. */ -export async function transformI18n( - options?: { dryRun?: boolean }, -): Promise<FileReport[]> { - const root = repoRoot() - const reports: FileReport[] = [] - - for (const pattern of I18N_PATTERNS) { - const dirPath = path.join(root, pattern) - const files = findI18nFiles(dirPath) - - for (const absPath of files) { - const relPath = path.relative(root, absPath) - reports.push(await transformI18nFile(absPath, relPath, options)) - } - } - - return reports -} diff --git a/script/upstream/transforms/keep-ours.ts b/script/upstream/transforms/keep-ours.ts deleted file mode 100644 index ab2c537081..0000000000 --- a/script/upstream/transforms/keep-ours.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { minimatch } from "minimatch" -import { git, conflictedFiles } from "../utils/git" -import { loadConfig } from "../utils/config" - -/** Check if a file path matches any keepOurs pattern. */ -export function shouldKeepOurs(filePath: string): boolean { - const config = loadConfig() - return config.keepOurs.some((pattern) => minimatch(filePath, pattern)) -} - -/** - * For conflicted files matching keepOurs patterns, resolve by keeping our version. - * Uses `git checkout HEAD -- <file>` to restore our version. - * Returns the list of resolved file paths. - */ -export function resolveKeepOurs(): { resolved: string[] } { - const conflicts = conflictedFiles() - const resolved: string[] = [] - - for (const file of conflicts) { - if (shouldKeepOurs(file)) { - git(`checkout --ours -- "${file}"`) - git(`add "${file}"`) - resolved.push(file) - } - } - - return { resolved } -} - -/** Reset all keepOurs files to our version (git checkout HEAD -- <file>). */ -export async function resetKeepOursFiles(): Promise<string[]> { - const config = loadConfig() - const output = git("diff --name-only HEAD") - const modifiedFiles = output - .split("\n") - .filter((f) => f.length > 0) - - const restored: string[] = [] - - for (const file of modifiedFiles) { - if (shouldKeepOurs(file)) { - git(`checkout HEAD -- "${file}"`) - restored.push(file) - } - } - - return restored -} - -/** Get list of keepOurs glob patterns. */ -export function getKeepOursList(): string[] { - const config = loadConfig() - return [...config.keepOurs] -} diff --git a/script/upstream/transforms/lock-files.ts b/script/upstream/transforms/lock-files.ts deleted file mode 100644 index 1078a8127b..0000000000 --- a/script/upstream/transforms/lock-files.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { execSync } from "child_process" -import { git, conflictedFiles } from "../utils/git" -import { repoRoot } from "../utils/config" - -/** - * For lock files (bun.lock), accept ours during merge, then regenerate. - */ -export function resolveLockFiles(): string[] { - const conflicts = conflictedFiles() - const resolved: string[] = [] - - for (const file of conflicts) { - if (file === "bun.lock" || file.endsWith("/bun.lock")) { - git(`checkout --ours -- "${file}"`) - git(`add "${file}"`) - resolved.push(file) - } - } - - return resolved -} - -/** - * Regenerate the lock file after merge is complete. - */ -export function regenerateLockFile(): void { - console.log(" Regenerating bun.lock...") - execSync("bun install", { - cwd: repoRoot(), - stdio: "inherit", - }) - git("add bun.lock") -} diff --git a/script/upstream/transforms/nix.ts b/script/upstream/transforms/nix.ts deleted file mode 100644 index cef0a6a4be..0000000000 --- a/script/upstream/transforms/nix.ts +++ /dev/null @@ -1,119 +0,0 @@ -import fs from "fs" -import path from "path" -import { repoRoot } from "../utils/config" -import type { FileReport, Change } from "../utils/report" -import { noChanges } from "../utils/report" - -/** Nix files to transform. */ -const NIX_FILES = ["nix/opencode.nix", "nix/desktop.nix"] - -/** - * Patterns that MUST be preserved (not transformed). - * These are internal Nix derivation references, env vars, or path segments. - */ -const PRESERVE_PATTERNS = [ - /pname\s*=\s*"opencode"/, // Internal derivation name - /OPENCODE_/, // Environment variables - /packages\/opencode/, // Source path references - /inherit\s+\(opencode\)/, // Nix inherit expressions - /opencode\s*\)/, // Nix function parameter -] - -/** Check if a line contains a pattern that should be preserved. */ -function shouldPreserveLine(line: string): boolean { - return PRESERVE_PATTERNS.some((p) => p.test(line)) -} - -/** Transform a single Nix file. */ -async function transformNixFile( - relPath: string, - options?: { dryRun?: boolean }, -): Promise<FileReport> { - const root = repoRoot() - const absPath = path.join(root, relPath) - - if (!fs.existsSync(absPath)) { - return noChanges(relPath) - } - - const content = fs.readFileSync(absPath, "utf-8") - const lines = content.split("\n") - const changes: Change[] = [] - const updatedLines: string[] = [] - - for (let i = 0; i < lines.length; i++) { - let line = lines[i] - - // Skip lines with patterns we must preserve - if (shouldPreserveLine(line)) { - updatedLines.push(line) - continue - } - - const original = line - - // Install path: $out/bin/opencode -> $out/bin/altimate-code - line = line.replace( - /\$out\/bin\/opencode(?!-desktop)/g, - "$out/bin/altimate-code", - ) - - // Desktop binary: opencode-desktop -> altimate-code-desktop - line = line.replace(/opencode-desktop/g, "altimate-code-desktop") - - // Share path: $out/share/opencode/ -> $out/share/altimate-code/ - line = line.replace(/\$out\/share\/opencode\//g, "$out/share/altimate-code/") - - // Shell completion: --cmd opencode -> --cmd altimate-code - line = line.replace(/--cmd\s+opencode/g, "--cmd altimate-code") - - // mainProgram: mainProgram = "opencode" -> mainProgram = "altimate-code" - line = line.replace( - /mainProgram\s*=\s*"opencode"/g, - 'mainProgram = "altimate-code"', - ) - - // meta.description: ensure branded - line = line.replace( - /description\s*=\s*"[^"]*[Oo]pen[Cc]ode[^"]*"/g, - (match) => match.replace(/OpenCode/g, "Altimate Code").replace(/opencode/g, "altimate-code"), - ) - - // meta.homepage: ensure altimate.ai - line = line.replace(/opencode\.ai/g, "altimate.ai") - - if (line !== original) { - changes.push({ - description: `line ${i + 1}`, - before: original.trim(), - after: line.trim(), - line: i + 1, - }) - } - - updatedLines.push(line) - } - - if (changes.length === 0) { - return noChanges(relPath) - } - - if (!options?.dryRun) { - fs.writeFileSync(absPath, updatedLines.join("\n"), "utf-8") - } - - return { filePath: relPath, applied: !options?.dryRun, changes } -} - -/** Transform all Nix package files. */ -export async function transformNix( - options?: { dryRun?: boolean }, -): Promise<FileReport[]> { - const reports: FileReport[] = [] - - for (const file of NIX_FILES) { - reports.push(await transformNixFile(file, options)) - } - - return reports -} diff --git a/script/upstream/transforms/package-json.ts b/script/upstream/transforms/package-json.ts deleted file mode 100644 index 868d88a052..0000000000 --- a/script/upstream/transforms/package-json.ts +++ /dev/null @@ -1,210 +0,0 @@ -import fs from "fs" -import path from "path" -import { repoRoot } from "../utils/config" -import type { FileReport, Change } from "../utils/report" -import { noChanges } from "../utils/report" - -/** The main CLI package.json that gets special bin/name treatment. */ -const MAIN_PACKAGE = "packages/opencode/package.json" - -/** Fields to transform across all package.json files. */ -const REPO_TRANSFORMS: Array<{ - path: string[] - match: string | RegExp - replacement: string - description: string -}> = [ - { - path: ["repository", "url"], - match: /anomalyco\/opencode/g, - replacement: "AltimateAI/altimate-code", - description: "repository.url owner/repo", - }, - { - path: ["homepage"], - match: /opencode\.ai/g, - replacement: "altimate.ai", - description: "homepage URL", - }, - { - path: ["author", "name"], - match: "Anomaly", - replacement: "Altimate AI", - description: "author.name", - }, - { - path: ["publisher"], - match: "anomalyco", - replacement: "altimateai", - description: "publisher", - }, -] - -/** Get a nested value from an object by path. */ -function getByPath(obj: any, pathParts: string[]): any { - let current = obj - for (const key of pathParts) { - if (current == null || typeof current !== "object") return undefined - current = current[key] - } - return current -} - -/** Set a nested value in an object by path. */ -function setByPath(obj: any, pathParts: string[], value: any): void { - let current = obj - for (let i = 0; i < pathParts.length - 1; i++) { - const key = pathParts[i] - if (current[key] == null || typeof current[key] !== "object") return - current = current[key] - } - current[pathParts[pathParts.length - 1]] = value -} - -/** Transform a package.json file with Altimate Code branding. */ -export async function transformPackageJson( - filePath: string, - options?: { dryRun?: boolean }, -): Promise<FileReport> { - const root = repoRoot() - const absPath = path.isAbsolute(filePath) - ? filePath - : path.join(root, filePath) - const relPath = path.relative(root, absPath) - - if (!fs.existsSync(absPath)) { - return noChanges(relPath) - } - - const content = fs.readFileSync(absPath, "utf-8") - let pkg: any - - try { - pkg = JSON.parse(content) - } catch { - return noChanges(relPath) - } - - const changes: Change[] = [] - - // Main package special transforms - if (relPath === MAIN_PACKAGE) { - // Name - if (pkg.name && pkg.name !== "@altimateai/altimate-code") { - changes.push({ - description: "package name", - before: pkg.name, - after: "@altimateai/altimate-code", - }) - pkg.name = "@altimateai/altimate-code" - } - - // Bin entries - const currentBin = pkg.bin || {} - const desiredBin: Record<string, string> = { - "altimate-code": "./bin/cli.mjs", - altimate: "./bin/cli.mjs", - } - - // Remove upstream opencode bin entry - if ("opencode" in currentBin) { - changes.push({ - description: 'remove upstream "opencode" bin entry', - before: `"opencode": "${currentBin.opencode}"`, - after: "(removed)", - }) - delete currentBin.opencode - } - - // Add our bin entries - for (const [name, target] of Object.entries(desiredBin)) { - if (currentBin[name] !== target) { - changes.push({ - description: `bin entry "${name}"`, - before: currentBin[name] ?? "(missing)", - after: target, - }) - currentBin[name] = target - } - } - - pkg.bin = desiredBin - } - - // General transforms for all package.json files - for (const transform of REPO_TRANSFORMS) { - const current = getByPath(pkg, transform.path) - if (current === undefined || typeof current !== "string") continue - - const updated = - typeof transform.match === "string" - ? current.replace(transform.match, transform.replacement) - : current.replace(transform.match, transform.replacement) - - if (updated !== current) { - changes.push({ - description: transform.description, - before: current, - after: updated, - }) - setByPath(pkg, transform.path, updated) - } - } - - // Update author.email if it contains opencode - const authorEmail = getByPath(pkg, ["author", "email"]) - if (typeof authorEmail === "string" && authorEmail.includes("opencode")) { - const updated = authorEmail.replace(/opencode/g, "altimate-code") - changes.push({ - description: "author.email", - before: authorEmail, - after: updated, - }) - setByPath(pkg, ["author", "email"], updated) - } - - if (changes.length === 0) { - return noChanges(relPath) - } - - if (!options?.dryRun) { - fs.writeFileSync(absPath, JSON.stringify(pkg, null, 2) + "\n", "utf-8") - } - - return { filePath: relPath, applied: !options?.dryRun, changes } -} - -/** Recursively find all package.json files, skipping node_modules and .venv. */ -function findPackageJsonFiles(dir: string): string[] { - const skipDirs = new Set(["node_modules", ".git", ".venv", "dist"]) - const results: string[] = [] - - for (const entry of fs.readdirSync(dir, { withFileTypes: true })) { - if (skipDirs.has(entry.name)) continue - const fullPath = path.join(dir, entry.name) - - if (entry.isDirectory()) { - results.push(...findPackageJsonFiles(fullPath)) - } else if (entry.name === "package.json") { - results.push(fullPath) - } - } - - return results -} - -/** Transform all package.json files in the repo. */ -export async function transformAllPackageJson( - options?: { dryRun?: boolean }, -): Promise<FileReport[]> { - const root = repoRoot() - const files = findPackageJsonFiles(root) - const reports: FileReport[] = [] - - for (const file of files) { - const report = await transformPackageJson(file, options) - reports.push(report) - } - - return reports -} diff --git a/script/upstream/transforms/preserve-versions.ts b/script/upstream/transforms/preserve-versions.ts deleted file mode 100644 index c4b233182f..0000000000 --- a/script/upstream/transforms/preserve-versions.ts +++ /dev/null @@ -1,89 +0,0 @@ -import fs from "fs" -import path from "path" -import { repoRoot } from "../utils/config" -import type { FileReport, Change } from "../utils/report" - -/** Files whose versions we preserve during upstream merges. */ -const VERSION_FILES = [ - "packages/opencode/package.json", - "packages/desktop/package.json", - "packages/desktop-electron/package.json", - "sdks/vscode/package.json", - "packages/extensions/zed/extension.toml", -] as const - -/** Read the version from a file. Supports package.json and extension.toml. */ -function readVersion(absPath: string): string | null { - if (!fs.existsSync(absPath)) return null - - const content = fs.readFileSync(absPath, "utf-8") - - if (absPath.endsWith(".toml")) { - // Parse version from TOML: version = "x.y.z" - const match = content.match(/^version\s*=\s*"([^"]+)"/m) - return match ? match[1] : null - } - - // JSON package.json - try { - const pkg = JSON.parse(content) - return pkg.version ?? null - } catch { - return null - } -} - -/** Write a version back into a file. */ -function writeVersion(absPath: string, version: string): void { - const content = fs.readFileSync(absPath, "utf-8") - - if (absPath.endsWith(".toml")) { - const updated = content.replace( - /^(version\s*=\s*)"[^"]+"/m, - `$1"${version}"`, - ) - fs.writeFileSync(absPath, updated, "utf-8") - return - } - - // JSON package.json — parse, update, write with 2-space indent - const pkg = JSON.parse(content) - pkg.version = version - fs.writeFileSync(absPath, JSON.stringify(pkg, null, 2) + "\n", "utf-8") -} - -/** Snapshot current versions before merge. */ -export async function snapshotVersions(): Promise<Record<string, string>> { - const root = repoRoot() - const snapshot: Record<string, string> = {} - - for (const relPath of VERSION_FILES) { - const absPath = path.join(root, relPath) - const version = readVersion(absPath) - if (version) { - snapshot[relPath] = version - } - } - - return snapshot -} - -/** Restore our versions after merge. Returns list of files that were restored. */ -export async function restoreVersions( - snapshot: Record<string, string>, -): Promise<string[]> { - const root = repoRoot() - const restored: string[] = [] - - for (const [relPath, ourVersion] of Object.entries(snapshot)) { - const absPath = path.join(root, relPath) - const currentVersion = readVersion(absPath) - - if (currentVersion !== null && currentVersion !== ourVersion) { - writeVersion(absPath, ourVersion) - restored.push(relPath) - } - } - - return restored -} diff --git a/script/upstream/transforms/scripts.ts b/script/upstream/transforms/scripts.ts deleted file mode 100644 index 406305423b..0000000000 --- a/script/upstream/transforms/scripts.ts +++ /dev/null @@ -1,144 +0,0 @@ -import fs from "fs" -import path from "path" -import { repoRoot } from "../utils/config" -import type { FileReport, Change } from "../utils/report" -import { noChanges } from "../utils/report" - -/** Directory containing build/release scripts. */ -const SCRIPTS_DIR = "script" - -/** String replacements for build/release scripts. */ -const SCRIPT_REPLACEMENTS: Array<{ - match: RegExp - replacement: string - description: string -}> = [ - // GitHub API URL patterns - { - match: /api\.github\.com\/repos\/anomalyco\/opencode/g, - replacement: "api.github.com/repos/AltimateAI/altimate-code", - description: "GitHub API URL", - }, - { - match: /github\.com\/anomalyco\/opencode/g, - replacement: "github.com/AltimateAI/altimate-code", - description: "GitHub URL", - }, - // Release artifact naming - { - match: /opencode-v\$\{/g, - replacement: "altimate-code-v${", - description: "release artifact prefix (template literal)", - }, - { - match: /opencode-v\$/g, - replacement: "altimate-code-v$", - description: "release artifact prefix (shell variable)", - }, - { - match: /"opencode-/g, - replacement: '"altimate-code-', - description: "artifact name prefix", - }, - // Bot identity strings - { - match: /opencode-bot/g, - replacement: "altimate-code-bot", - description: "bot identity", - }, - { - match: /opencode\[bot\]/g, - replacement: "altimate-code[bot]", - description: "bot identity (bracket notation)", - }, - // Owner/repo in script strings - { - match: /anomalyco\/opencode/g, - replacement: "AltimateAI/altimate-code", - description: "owner/repo reference", - }, -] - -/** - * Patterns to EXCLUDE from transformation. - * These are internal references that should keep the upstream naming. - */ -const EXCLUDE_PATHS = [ - "script/upstream/", // Our own merge tooling references upstream intentionally -] - -/** Find TypeScript files in the scripts directory (non-recursive into upstream). */ -function findScriptFiles(dir: string): string[] { - if (!fs.existsSync(dir)) return [] - - const results: string[] = [] - - for (const entry of fs.readdirSync(dir, { withFileTypes: true })) { - const fullPath = path.join(dir, entry.name) - if (entry.isDirectory()) { - // Skip upstream directory — it references upstream intentionally - if (entry.name === "upstream") continue - results.push(...findScriptFiles(fullPath)) - } else if (entry.name.endsWith(".ts") || entry.name.endsWith(".mts")) { - results.push(fullPath) - } - } - - return results -} - -/** Transform a single script file. */ -async function transformScriptFile( - absPath: string, - relPath: string, - options?: { dryRun?: boolean }, -): Promise<FileReport> { - // Skip excluded paths - for (const exclude of EXCLUDE_PATHS) { - if (relPath.startsWith(exclude)) { - return noChanges(relPath) - } - } - - if (!fs.existsSync(absPath)) { - return noChanges(relPath) - } - - let content = fs.readFileSync(absPath, "utf-8") - const changes: Change[] = [] - - for (const r of SCRIPT_REPLACEMENTS) { - const before = content - content = content.replace(r.match, r.replacement) - if (content !== before) { - changes.push({ description: r.description }) - } - } - - if (changes.length === 0) { - return noChanges(relPath) - } - - if (!options?.dryRun) { - fs.writeFileSync(absPath, content, "utf-8") - } - - return { filePath: relPath, applied: !options?.dryRun, changes } -} - -/** Transform all build/release scripts. */ -export async function transformScripts( - options?: { dryRun?: boolean }, -): Promise<FileReport[]> { - const root = repoRoot() - const scriptsDir = path.join(root, SCRIPTS_DIR) - const files = findScriptFiles(scriptsDir) - const reports: FileReport[] = [] - - for (const absPath of files) { - const relPath = path.relative(root, absPath) - reports.push(await transformScriptFile(absPath, relPath, options)) - } - - return reports -} diff --git a/script/upstream/transforms/skip-files.ts b/script/upstream/transforms/skip-files.ts deleted file mode 100644 index 7707f3b98a..0000000000 --- a/script/upstream/transforms/skip-files.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { minimatch } from "minimatch" -import { git, conflictedFiles } from "../utils/git" -import { loadConfig } from "../utils/config" - -/** - * For conflicted files matching skipFiles patterns, resolve by accepting upstream's version. - * These are upstream packages we don't modify — we keep them to avoid merge friction. - */ -export function resolveSkipFiles(): { resolved: string[]; skipped: string[] } { - const config = loadConfig() - const conflicts = conflictedFiles() - const resolved: string[] = [] - const skipped: string[] = [] - - for (const file of conflicts) { - const shouldSkip = config.skipFiles.some((pattern) => minimatch(file, pattern)) - if (shouldSkip) { - git(`checkout --theirs -- "${file}"`) - git(`add "${file}"`) - resolved.push(file) - } else { - skipped.push(file) - } - } - - return { resolved, skipped } -} diff --git a/script/upstream/transforms/tauri.ts b/script/upstream/transforms/tauri.ts deleted file mode 100644 index 195c60d672..0000000000 --- a/script/upstream/transforms/tauri.ts +++ /dev/null @@ -1,301 +0,0 @@ -import fs from "fs" -import path from "path" -import { repoRoot } from "../utils/config" -import type { FileReport, Change } from "../utils/report" -import { noChanges } from "../utils/report" - -/** Tauri JSON config files to transform. */ -const TAURI_CONFIGS = [ - "packages/desktop/src-tauri/tauri.conf.json", - "packages/desktop/src-tauri/tauri.prod.conf.json", - "packages/desktop/src-tauri/tauri.beta.conf.json", -] - -/** Electron builder config (TypeScript, handled via string replacement). */ -const ELECTRON_CONFIG = "packages/desktop-electron/electron-builder.config.ts" - -/** JSON field transforms for Tauri configs. */ -interface JsonTransform { - /** Dot-separated path to the field. */ - jsonPath: string - /** Value pattern to match. */ - match: string | RegExp - /** Replacement value. */ - replacement: string - /** Description for reporting. */ - description: string -} - -const TAURI_JSON_TRANSFORMS: JsonTransform[] = [ - { - jsonPath: "productName", - match: /^OpenCode$/, - replacement: "Altimate Code", - description: "productName", - }, - { - jsonPath: "productName", - match: /^OpenCode Dev$/, - replacement: "Altimate Code Dev", - description: "productName (Dev)", - }, - { - jsonPath: "productName", - match: /^OpenCode Beta$/, - replacement: "Altimate Code Beta", - description: "productName (Beta)", - }, - { - jsonPath: "mainBinaryName", - match: /^OpenCode$/, - replacement: "Altimate Code", - description: "mainBinaryName", - }, - { - jsonPath: "mainBinaryName", - match: /^OpenCode Dev$/, - replacement: "Altimate Code Dev", - description: "mainBinaryName (Dev)", - }, - { - jsonPath: "mainBinaryName", - match: /^OpenCode Beta$/, - replacement: "Altimate Code Beta", - description: "mainBinaryName (Beta)", - }, -] - -/** Get a value from a nested JSON object using dot-separated path. */ -function getByDotPath(obj: any, dotPath: string): any { - const parts = dotPath.split(".") - let current = obj - for (const part of parts) { - if (current == null || typeof current !== "object") return undefined - current = current[part] - } - return current -} - -/** Set a value in a nested JSON object using dot-separated path. */ -function setByDotPath(obj: any, dotPath: string, value: any): void { - const parts = dotPath.split(".") - let current = obj - for (let i = 0; i < parts.length - 1; i++) { - if (current[parts[i]] == null || typeof current[parts[i]] !== "object") return - current = current[parts[i]] - } - current[parts[parts.length - 1]] = value -} - -/** Transform a single Tauri JSON config file. */ -export async function transformTauriConfig( - filePath: string, - options?: { dryRun?: boolean }, -): Promise<FileReport> { - const root = repoRoot() - const absPath = path.isAbsolute(filePath) ? filePath : path.join(root, filePath) - const relPath = path.relative(root, absPath) - - if (!fs.existsSync(absPath)) { - return noChanges(relPath) - } - - const content = fs.readFileSync(absPath, "utf-8") - const changes: Change[] = [] - - if (absPath.endsWith(".ts")) { - // Electron builder config — use string replacement - return transformElectronConfig(absPath, relPath, content, options) - } - - // Tauri JSON config - let config: any - try { - config = JSON.parse(content) - } catch { - return noChanges(relPath) - } - - // Apply named field transforms - for (const transform of TAURI_JSON_TRANSFORMS) { - const current = getByDotPath(config, transform.jsonPath) - if (current === undefined || typeof current !== "string") continue - - const matches = - typeof transform.match === "string" - ? current === transform.match - : transform.match.test(current) - - if (matches) { - const updated = - typeof transform.match === "string" - ? transform.replacement - : current.replace(transform.match, transform.replacement) - changes.push({ - description: transform.description, - before: current, - after: updated, - }) - setByDotPath(config, transform.jsonPath, updated) - } - } - - // Apply identifier transforms recursively - transformIdentifiersInConfig(config, changes) - - // Transform updater endpoints (GitHub URLs) - transformUpdaterEndpoints(config, changes) - - // Transform protocols - transformProtocols(config, changes) - - if (changes.length === 0) { - return noChanges(relPath) - } - - if (!options?.dryRun) { - fs.writeFileSync(absPath, JSON.stringify(config, null, 2) + "\n", "utf-8") - } - - return { filePath: relPath, applied: !options?.dryRun, changes } -} - -/** Transform identifier strings throughout the config. */ -function transformIdentifiersInConfig(config: any, changes: Change[]): void { - // Walk the entire config looking for identifier patterns - walkAndReplace(config, /ai\.opencode\./g, "ai.altimate.code.", "identifier", changes) -} - -/** Transform updater endpoint URLs. */ -function transformUpdaterEndpoints(config: any, changes: Change[]): void { - const endpoints = getByDotPath(config, "plugins.updater.endpoints") - if (!Array.isArray(endpoints)) return - - for (let i = 0; i < endpoints.length; i++) { - if (typeof endpoints[i] !== "string") continue - const updated = endpoints[i] - .replace(/anomalyco\/opencode/g, "AltimateAI/altimate-code") - .replace(/opencode/g, "altimate-code") - if (updated !== endpoints[i]) { - changes.push({ - description: `updater endpoint [${i}]`, - before: endpoints[i], - after: updated, - }) - endpoints[i] = updated - } - } -} - -/** Transform protocol definitions. */ -function transformProtocols(config: any, changes: Change[]): void { - const protocols = config.app?.security?.protocols ?? config.protocols - if (!Array.isArray(protocols)) return - - for (const protocol of protocols) { - if (protocol.name === "OpenCode") { - changes.push({ - description: "protocol name", - before: protocol.name, - after: "Altimate Code", - }) - protocol.name = "Altimate Code" - } - } -} - -/** Walk an object tree and replace string values matching a pattern. */ -function walkAndReplace( - obj: any, - pattern: RegExp, - replacement: string, - label: string, - changes: Change[], - currentPath: string = "", -): void { - if (obj == null || typeof obj !== "object") return - - const entries = Array.isArray(obj) - ? obj.map((v, i) => [String(i), v] as const) - : Object.entries(obj) - - for (const [key, value] of entries) { - const fullPath = currentPath ? `${currentPath}.${key}` : key - - if (typeof value === "string") { - // Reset lastIndex for global regex - pattern.lastIndex = 0 - if (pattern.test(value)) { - pattern.lastIndex = 0 - const updated = value.replace(pattern, replacement) - if (Array.isArray(obj)) { - obj[Number(key)] = updated - } else { - ;(obj as any)[key] = updated - } - changes.push({ - description: `${label} at ${fullPath}`, - before: value, - after: updated, - }) - } - } else if (typeof value === "object" && value !== null) { - walkAndReplace(value, pattern, replacement, label, changes, fullPath) - } - } -} - -/** Transform electron-builder.config.ts via string replacement. */ -function transformElectronConfig( - absPath: string, - relPath: string, - content: string, - options?: { dryRun?: boolean }, -): FileReport { - const changes: Change[] = [] - let updated = content - - const replacements: Array<{ match: RegExp; replacement: string; description: string }> = [ - { match: /appId:\s*"ai\.opencode\./g, replacement: 'appId: "ai.altimate.code.', description: "appId identifier" }, - { match: /productName:\s*"OpenCode"/g, replacement: 'productName: "Altimate Code"', description: "productName" }, - { match: /opencode-electron/g, replacement: "altimate-code-electron", description: "artifact name" }, - { match: /packageName:\s*"opencode"/g, replacement: 'packageName: "altimate-code"', description: "rpm packageName" }, - { match: /packageName:\s*"opencode-dev"/g, replacement: 'packageName: "altimate-code-dev"', description: "rpm packageName (dev)" }, - { match: /packageName:\s*"opencode-beta"/g, replacement: 'packageName: "altimate-code-beta"', description: "rpm packageName (beta)" }, - { match: /owner:\s*"anomalyco"/g, replacement: 'owner: "AltimateAI"', description: "publish.owner" }, - { match: /repo:\s*"opencode"/g, replacement: 'repo: "altimate-code"', description: "publish.repo" }, - ] - - for (const r of replacements) { - const before = updated - updated = updated.replace(r.match, r.replacement) - if (updated !== before) { - changes.push({ description: r.description }) - } - } - - if (changes.length === 0) { - return noChanges(relPath) - } - - if (!options?.dryRun) { - fs.writeFileSync(absPath, updated, "utf-8") - } - - return { filePath: relPath, applied: !options?.dryRun, changes } -} - -/** Transform all Tauri and electron-builder config files. */ -export async function transformAllTauri( - options?: { dryRun?: boolean }, -): Promise<FileReport[]> { - const reports: FileReport[] = [] - - for (const file of TAURI_CONFIGS) { - reports.push(await transformTauriConfig(file, options)) - } - - reports.push(await transformTauriConfig(ELECTRON_CONFIG, options)) - - return reports -} diff --git a/script/upstream/transforms/web-docs.ts b/script/upstream/transforms/web-docs.ts deleted file mode 100644 index 32e1be515d..0000000000 --- a/script/upstream/transforms/web-docs.ts +++ /dev/null @@ -1,117 +0,0 @@ -import fs from "fs" -import path from "path" -import { repoRoot } from "../utils/config" -import type { FileReport, Change } from "../utils/report" -import { noChanges } from "../utils/report" - -/** Directories and file patterns to scan for web documentation. */ -const DOC_TARGETS = [ - { dir: "packages/web/src/content/docs", extensions: [".mdx", ".md"] }, - { dir: "packages/web/src/components", extensions: [".astro"] }, -] - -/** Single-file targets. */ -const SINGLE_FILES = ["packages/web/config.mjs"] - -/** Replacements for web documentation files. */ -const DOC_REPLACEMENTS: Array<{ - match: RegExp - replacement: string - description: string -}> = [ - // CLI commands (binary is renamed) - { match: /opencode serve/g, replacement: "altimate-code serve", description: "CLI command: serve" }, - { match: /opencode auth login/g, replacement: "altimate-code auth login", description: "CLI command: auth login" }, - { match: /opencode auth logout/g, replacement: "altimate-code auth logout", description: "CLI command: auth logout" }, - { match: /opencode auth/g, replacement: "altimate-code auth", description: "CLI command: auth" }, - { match: /opencode config/g, replacement: "altimate-code config", description: "CLI command: config" }, - { match: /opencode init/g, replacement: "altimate-code init", description: "CLI command: init" }, - // Install commands in code blocks - { match: /npm install -g opencode/g, replacement: "npm install -g @altimateai/altimate-code", description: "npm install command" }, - { match: /npx opencode/g, replacement: "npx @altimateai/altimate-code", description: "npx command" }, - { match: /brew install opencode/g, replacement: "brew install altimate-code", description: "brew install command" }, - // Product names (more specific first) - { match: /OpenCode Desktop/g, replacement: "Altimate Code Desktop", description: "product name (Desktop)" }, - { match: /OpenCode/g, replacement: "Altimate Code", description: "product name" }, - // URLs - { match: /opencode\.ai/g, replacement: "altimate.ai", description: "website URL" }, - // GitHub references - { match: /anomalyco\/opencode/g, replacement: "AltimateAI/altimate-code", description: "GitHub owner/repo" }, -] - -/** Recursively find files with given extensions in a directory. */ -function findFiles(dir: string, extensions: string[]): string[] { - if (!fs.existsSync(dir)) return [] - - const results: string[] = [] - - for (const entry of fs.readdirSync(dir, { withFileTypes: true })) { - const fullPath = path.join(dir, entry.name) - if (entry.isDirectory()) { - results.push(...findFiles(fullPath, extensions)) - } else if (extensions.some((ext) => entry.name.endsWith(ext))) { - results.push(fullPath) - } - } - - return results -} - -/** Transform a single documentation file. */ -async function transformDocFile( - absPath: string, - relPath: string, - options?: { dryRun?: boolean }, -): Promise<FileReport> { - if (!fs.existsSync(absPath)) { - return noChanges(relPath) - } - - let content = fs.readFileSync(absPath, "utf-8") - const changes: Change[] = [] - - for (const r of DOC_REPLACEMENTS) { - const before = content - content = content.replace(r.match, r.replacement) - if (content !== before) { - changes.push({ description: r.description }) - } - } - - if (changes.length === 0) { - return noChanges(relPath) - } - - if (!options?.dryRun) { - fs.writeFileSync(absPath, content, "utf-8") - } - - return { filePath: relPath, applied: !options?.dryRun, changes } -} - -/** Transform all web documentation files. */ -export async function transformWebDocs( - options?: { dryRun?: boolean }, -): Promise<FileReport[]> { - const root = repoRoot() - const reports: FileReport[] = [] - - // Process directory targets - for (const target of DOC_TARGETS) { - const dirPath = path.join(root, target.dir) - const files = findFiles(dirPath, target.extensions) - - for (const absPath of files) { - const relPath = path.relative(root, absPath) - reports.push(await transformDocFile(absPath, relPath, options)) - } - } - - // Process single-file targets - for (const relPath of SINGLE_FILES) { - const absPath = path.join(root, relPath) - reports.push(await transformDocFile(absPath, relPath, options)) - } - - return reports -} diff --git a/script/upstream/transforms/workflows.ts b/script/upstream/transforms/workflows.ts deleted file mode 100644 index d8c1e3d3cf..0000000000 --- a/script/upstream/transforms/workflows.ts +++ /dev/null @@ -1,89 +0,0 @@ -import fs from "fs" -import path from "path" -import { repoRoot } from "../utils/config" -import type { FileReport, Change } from "../utils/report" -import { noChanges } from "../utils/report" - -/** Directory containing GitHub workflow files. */ -const WORKFLOWS_DIR = ".github/workflows" - -/** - * Patterns that indicate upstream branding in workflow files. - * This is a diagnostic tool — workflows are in keepOurs, so they - * are not auto-transformed. This audits for any upstream branding - * that might have been introduced when selectively accepting - * upstream workflow changes. - */ -const AUDIT_PATTERNS: Array<{ - match: RegExp - description: string -}> = [ - { match: /anomalyco\/opencode/g, description: "GitHub owner/repo reference" }, - { match: /opencode\.ai/g, description: "upstream website URL" }, - { match: /name:\s*OpenCode\b/g, description: "workflow name referencing OpenCode" }, - { match: /npm install.*\bopencode\b/g, description: "npm install referencing opencode" }, - { match: /opencode serve/g, description: "CLI command in workflow" }, -] - -/** Recursively find YAML workflow files. */ -function findWorkflowFiles(dir: string): string[] { - if (!fs.existsSync(dir)) return [] - - const results: string[] = [] - - for (const entry of fs.readdirSync(dir, { withFileTypes: true })) { - const fullPath = path.join(dir, entry.name) - if (entry.isDirectory()) { - results.push(...findWorkflowFiles(fullPath)) - } else if (entry.name.endsWith(".yml") || entry.name.endsWith(".yaml")) { - results.push(fullPath) - } - } - - return results -} - -/** - * Audit workflow files for any upstream branding that should be updated. - * - * This is a diagnostic tool, not an auto-transformer. Since workflows - * are in keepOurs, they are fully controlled by us. This function - * detects any upstream branding that may have been accidentally - * introduced. - */ -export async function auditWorkflows( - options?: { dryRun?: boolean }, -): Promise<FileReport[]> { - const root = repoRoot() - const workflowDir = path.join(root, WORKFLOWS_DIR) - const files = findWorkflowFiles(workflowDir) - const reports: FileReport[] = [] - - for (const absPath of files) { - const relPath = path.relative(root, absPath) - const content = fs.readFileSync(absPath, "utf-8") - const changes: Change[] = [] - - for (const pattern of AUDIT_PATTERNS) { - // Reset regex state - pattern.match.lastIndex = 0 - const matches = content.match(pattern.match) - if (matches) { - for (const match of matches) { - changes.push({ - description: `${pattern.description}: "${match}"`, - }) - } - } - } - - if (changes.length > 0) { - // Audit never applies changes — it only reports findings - reports.push({ filePath: relPath, applied: false, changes }) - } else { - reports.push(noChanges(relPath)) - } - } - - return reports -} diff --git a/script/upstream/utils/config.ts b/script/upstream/utils/config.ts index 91a228f986..fc6dfba322 100644 --- a/script/upstream/utils/config.ts +++ b/script/upstream/utils/config.ts @@ -245,7 +245,6 @@ export const defaultConfig: MergeConfig = { keepOurs: [ "README.md", - "README.*.md", "CONTRIBUTING.md", "SECURITY.md", "CODE_OF_CONDUCT.md", @@ -258,7 +257,6 @@ export const defaultConfig: MergeConfig = { "github/README.md", "github/index.ts", "install", - "infra/**", "packages/altimate-engine/**", "packages/opencode/src/altimate/**", "packages/opencode/src/bridge/**", @@ -297,6 +295,28 @@ export const defaultConfig: MergeConfig = { "specs/**", // Translated READMEs "README.*.md", + // Translation glossaries (we don't ship translations) + ".opencode/glossary/**", + ".opencode/agent/translator.md", + // Upstream project-specific dev tools and agents + ".opencode/tool/github-triage.ts", + ".opencode/tool/github-triage.txt", + ".opencode/tool/github-pr-search.txt", + ".opencode/tool/github-pr-search.ts", + ".opencode/agent/duplicate-pr.md", + ".opencode/agent/triage.md", + ".opencode/agent/docs.md", + ".opencode/themes/mytheme.json", + ".opencode/env.d.ts", + ".opencode/command/rmslop.md", + ".opencode/command/ai-deps.md", + ".opencode/command/spellcheck.md", + // Storybook CI (packages/storybook and packages/ui are deleted) + ".github/workflows/storybook.yml", + // Upstream Zed extension sync (no workflow references it) + "script/sync-zed.ts", + // Upstream AGENTS.md references dev branch, misleading for our fork + "AGENTS.md", ], brandingRules: [ diff --git a/script/upstream/utils/git.ts b/script/upstream/utils/git.ts index c2b0ec2285..3ebfc86c73 100644 --- a/script/upstream/utils/git.ts +++ b/script/upstream/utils/git.ts @@ -89,9 +89,20 @@ export async function getModifiedFiles(base: string): Promise<string[]> { return git(`diff --name-only ${base}...HEAD`).split("\n").filter((f) => f.length > 0) } -/** Stage all changes (tracked and untracked). */ +/** Stage all changes to tracked files (avoids picking up untracked experiment dirs). */ export async function stageAll(): Promise<void> { - git("add -A") + git("add -u") +} + +/** Stage specific files. */ +export async function stageFiles(files: string[]): Promise<void> { + if (files.length === 0) return + // Stage in batches to avoid arg-list-too-long + const batchSize = 100 + for (let i = 0; i < files.length; i += batchSize) { + const batch = files.slice(i, i + batchSize) + git(`add -- ${batch.map((f) => JSON.stringify(f)).join(" ")}`) + } } /** Create a commit with the given message. */ @@ -102,9 +113,9 @@ export async function commit(message: string): Promise<void> { }) } -/** Fetch all refs from a remote. */ +/** Fetch all refs from a remote. Uses --force for tags to handle local/upstream tag conflicts. */ export async function fetchRemote(remote: string): Promise<void> { - git(`fetch ${remote} --tags`) + git(`fetch ${remote} --tags --force`) } /** List all tags from a remote, returned as an array of tag names. */ diff --git a/script/upstream/utils/logger.ts b/script/upstream/utils/logger.ts index da32e5aba2..92c53903be 100644 --- a/script/upstream/utils/logger.ts +++ b/script/upstream/utils/logger.ts @@ -1,17 +1,17 @@ // Colored logging utility using ANSI escape codes. // No external dependencies — works directly with stdout/stderr. -const RESET = "\x1b[0m" -const BOLD = "\x1b[1m" -const DIM = "\x1b[2m" - -const BLUE = "\x1b[34m" -const YELLOW = "\x1b[33m" -const RED = "\x1b[31m" -const GREEN = "\x1b[32m" -const GRAY = "\x1b[90m" -const CYAN = "\x1b[36m" -const MAGENTA = "\x1b[35m" +export const RESET = "\x1b[0m" +export const BOLD = "\x1b[1m" +export const DIM = "\x1b[2m" + +export const BLUE = "\x1b[34m" +export const YELLOW = "\x1b[33m" +export const RED = "\x1b[31m" +export const GREEN = "\x1b[32m" +export const GRAY = "\x1b[90m" +export const CYAN = "\x1b[36m" +export const MAGENTA = "\x1b[35m" const MAX_LINE_WIDTH = 80 @@ -21,6 +21,24 @@ function truncate(str: string, maxLen: number = MAX_LINE_WIDTH): string { return str.slice(0, maxLen - 1) + "\u2026" } +// ── Formatting helpers ────────────────────────────────────────────────────── + +export function bold(s: string): string { return `${BOLD}${s}${RESET}` } +export function dim(s: string): string { return `${DIM}${s}${RESET}` } +export function cyan(s: string): string { return `${CYAN}${s}${RESET}` } +export function green(s: string): string { return `${GREEN}${s}${RESET}` } +export function red(s: string): string { return `${RED}${s}${RESET}` } +export function yellow(s: string): string { return `${YELLOW}${s}${RESET}` } + +export function banner(text: string): void { + const line = "═".repeat(60) + console.log(`\n${CYAN}${line}${RESET}`) + console.log(`${CYAN} ${BOLD}${text}${RESET}`) + console.log(`${CYAN}${line}${RESET}\n`) +} + +// ── Logging functions ─────────────────────────────────────────────────────── + /** Log an informational message. */ export function info(msg: string): void { console.log(`${BLUE}info${RESET} ${msg}`)