diff --git a/.docs/remote-architecture.md b/.docs/remote-architecture.md index 32e35d7cafa..75274095a12 100644 --- a/.docs/remote-architecture.md +++ b/.docs/remote-architecture.md @@ -93,6 +93,8 @@ Examples: A known environment may or may not know the target `environmentId` before first successful connect. +In the hosted web app, known environments are browser-local. A hosted pairing URL can create the saved entry, but it does not give the hosted app a server-side control plane or a copy of the session state. + ### AccessEndpoint An `AccessEndpoint` is one concrete way to reach a known environment. @@ -108,6 +110,67 @@ A single environment may have many endpoints: The environment stays the same. Only the access path changes. +### AdvertisedEndpoint + +An `AdvertisedEndpoint` is a server or desktop-authored candidate endpoint for an environment. It is how the backend tells the client which URLs may be useful for pairing and reconnecting. + +`AdvertisedEndpoint` is deliberately narrower than the full access model: + +- it describes a concrete HTTP and WebSocket base URL pair +- it can mark the endpoint as default, available, or unavailable +- it includes reachability hints such as loopback, LAN, private, public, or tunnel +- it includes compatibility hints such as whether the endpoint can be used from the hosted HTTPS app + +Clients should treat advertised endpoints as hints, not as proof that a route works from the current device. The final connection attempt still decides whether the endpoint is reachable. + +The UI presents one default advertised endpoint in the network-access summary and keeps the rest behind an expandable advanced list. The default controls pairing QR codes and primary copy actions. Users can override it, but that override is a UI preference, not backend configuration. + +Persist the override by stable endpoint kind rather than raw URL whenever possible. For example, a LAN endpoint should be stored as the desktop LAN endpoint preference, not as `192.168.x.y`, because the address can change when the user switches networks. Provider endpoints should use provider-specific stable keys such as Tailscale IP or Tailscale MagicDNS HTTPS. Custom endpoints may fall back to their concrete identity. + +When no user default is saved, endpoint selection should prefer: + +1. endpoints compatible with the hosted HTTPS app +2. explicitly default endpoints +3. non-loopback endpoints +4. loopback endpoints only for same-machine clients + +This keeps endpoint discovery centralized without making any one provider, such as Tailscale or a future tunnel service, part of the core environment model. + +### Endpoint providers + +Endpoint providers are add-ons that contribute advertised endpoints for the current environment. + +The provider boundary is intentionally outside the core environment model: + +- core owns `ExecutionEnvironment`, saved environments, pairing, and connection lifecycle +- providers discover or synthesize endpoints +- providers return normalized `AdvertisedEndpoint` records +- the UI and pairing logic select from those records without knowing provider-specific commands + +The first provider is Tailscale. It can discover Tailnet IP and MagicDNS addresses from the local machine and publish them as additional endpoint candidates. Future providers, such as a hosted tunnel service, should plug into the same shape rather than adding a separate remote environment path. + +Provider-specific confidence should remain a hint. A Tailscale endpoint still needs a successful browser or desktop connection before the client treats it as connected. + +### Hosted pairing request + +A hosted pairing request is a bootstrap URL for the static web app, not a transport. + +Example: + +```text +https://app.t3.codes/pair?host=https://backend.example.com:3773#token=PAIRCODE +``` + +The hosted app reads the `host` parameter and pairing token, exchanges the token directly with that backend, then saves the resulting environment record in browser local storage. + +Important constraints: + +- the hosted app does not proxy HTTP or WebSocket traffic +- the backend must still be reachable directly from the browser +- HTTPS pages can only connect to HTTPS/WSS backends +- HTTP LAN endpoints should keep using direct desktop or CLI pairing URLs +- the token belongs in the URL hash so it is not sent to the hosted app origin + ### RepositoryIdentity `RepositoryIdentity` remains a best-effort logical repo grouping mechanism across environments. @@ -151,6 +214,8 @@ Benefits: - no client-specific process management required - best fit for hosted or self-managed remote T3 deployments +Browser security rules are part of this access method. A hosted HTTPS web client can connect to `wss://` backends, but it cannot connect to plain `ws://` or `http://` LAN backends because that would be mixed content. + ### 2. Tunneled WebSocket access Examples: @@ -170,6 +235,8 @@ This is especially useful when: - mobile must reach a desktop-hosted environment - a machine should be reachable without exposing raw LAN or public ports +Tailscale-backed access sits here architecturally even though the current implementation is endpoint discovery rather than a T3-managed tunnel. It contributes private-network endpoints and lets the existing HTTP/WebSocket client path do the actual connection. + ### 3. Desktop-managed SSH access SSH is an access and launch helper, not a separate environment type. @@ -185,6 +252,8 @@ After that, the renderer should still connect using an ordinary WebSocket URL ag This keeps the renderer transport model consistent with every other access method. +The desktop main process owns the SSH bridge because it can spawn local SSH processes, manage askpass prompts, write temporary launch scripts, and clean up forwards. The renderer receives a saved environment record and connects through the forwarded URL; it should not need SSH-specific RPC paths for normal environment traffic. + ## Launch methods Launch methods answer a different question: @@ -227,6 +296,15 @@ The recommended T3 flow is: 4. Desktop establishes local port forwarding. 5. Renderer connects to the forwarded WebSocket endpoint as a normal environment. +The saved environment should remember that it was created by desktop SSH launch only for reconnect and lifecycle UX. That metadata should not change the server protocol or the environment identity model. + +Failure handling should be explicit: + +- SSH authentication failure should surface before any environment is saved +- remote launch failure should include remote logs or the launcher command output when available +- forwarded-port failure should leave the saved environment disconnected rather than falling back to an unrelated endpoint +- reconnect should attempt to restore the SSH bridge before reconnecting the normal WebSocket client + ### 3. Client-managed local publish This is the inverse of remote launch: a local T3 server is already running, and the client publishes it through a tunnel. @@ -267,6 +345,8 @@ T3 already supports a WebSocket auth token on the server. That should become a f For publicly reachable environments, authenticated access should be treated as required. +Hosted pairing should be treated as a client-side convenience only. The hosted app must not receive pairing tokens through query parameters, must not store pairing state server-side, and must not imply that an HTTP backend is safe or reachable from an HTTPS browser context. + ## Relationship to Zed Zed is a useful reference implementation for managed remote launch and reconnect behavior. diff --git a/.github/VOUCHED.td b/.github/VOUCHED.td index 5535d54a5b4..73376110d9a 100644 --- a/.github/VOUCHED.td +++ b/.github/VOUCHED.td @@ -30,3 +30,6 @@ github:Yash-Singh1 github:eggfriedrice24 github:Ymit24 github:shivamhwp +github:jappyjan +github:justsomelegs +github:UtkarshUsername diff --git a/.gitignore b/.gitignore index 6c48782f9ac..d349da81db0 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,5 @@ apps/web/src/components/__screenshots__ __screenshots__/ .tanstack squashfs-root/ +.vercel +.claude/worktrees/ diff --git a/.plans/19-remote-endpoints-hosted-static.md b/.plans/19-remote-endpoints-hosted-static.md new file mode 100644 index 00000000000..2fa0bc70211 --- /dev/null +++ b/.plans/19-remote-endpoints-hosted-static.md @@ -0,0 +1,350 @@ +# Remote Endpoints and Hosted Static App Plan + +## Purpose + +Make remote access feel first-class while keeping the free DIY path open. + +The immediate product goal is: + +- users can expose a backend through LAN, their own Tailscale, MagicDNS, a manual HTTPS endpoint, or later T3 Tunnel +- users can generate a hosted pairing link for `app.t3.codes` +- the hosted app can pair, persist, reconnect, and operate against saved environments without requiring a backend at the hosted app origin +- all transports reuse the same backend auth, WebSocket runtime, saved environment registry, and pairing UX + +This plan intentionally leaves the paid T3 cloud tunnel fabric out of scope. It defines the OSS foundation that T3 Tunnel should later plug into. + +## Current State + +Already present or in progress: + +- Server auth distinguishes bootstrap credentials from session credentials. +- One-time pairing credentials can be exchanged for browser sessions or bearer sessions. +- Saved remote environments store `httpBaseUrl`, `wsBaseUrl`, and a bearer token. +- Remote environment WebSocket connections use a short-lived WebSocket token. +- Pairing URLs can carry tokens in the URL fragment. +- Hosted `/pair?host=...#token=...` can add a saved environment. +- Hosted static startup can avoid assuming the page origin is the backend. + +Main gaps: + +- Reachability is represented ad hoc as `endpointUrl`, manual host input, or saved environment URLs. +- Desktop exposure, hosted pairing, manual remote environments, and future tunnels do not share one endpoint model. +- Tailscale/MagicDNS endpoints are not detected or surfaced. +- Hosted-static empty/offline states are still thin. +- Browser compatibility is not explicitly modeled, especially HTTPS hosted app to HTTP backend mixed-content failure. + +## Core Decision: Add `AdvertisedEndpoint` + +Add a new first-class contract instead of extending the environment descriptor. + +### Why not extend `ExecutionEnvironmentDescriptor` + +`ExecutionEnvironmentDescriptor` answers: "What environment is this?" + +Examples: + +- environment id +- label +- platform +- server version +- capabilities + +`AdvertisedEndpoint` answers: "How can a client reach this environment right now?" + +Examples: + +- loopback URL +- LAN URL +- Tailscale IP URL +- MagicDNS/Serve URL +- manual URL +- future T3 Tunnel URL +- browser compatibility and exposure level + +Those are different lifecycles. One environment can have many endpoints, endpoints can appear/disappear as network interfaces change, and the same descriptor is returned regardless of which endpoint the client used. Extending the descriptor would blur environment identity with transport reachability and make saved environments harder to reason about. + +### Target Contract + +Add a schema in `packages/contracts`, likely `remoteAccess.ts`: + +```ts +type AdvertisedEndpointProvider = + | "loopback" + | "lan" + | "tailscale-ip" + | "tailscale-magicdns" + | "manual" + | "t3-tunnel"; + +type AdvertisedEndpointVisibility = "local" | "private-network" | "tailnet" | "public"; + +type AdvertisedEndpointCompatibility = { + hostedHttpsApp: "compatible" | "mixed-content-blocked" | "untrusted-certificate" | "unknown"; + desktopApp: "compatible" | "unknown"; +}; + +type AdvertisedEndpoint = { + id: string; + provider: AdvertisedEndpointProvider; + label: string; + httpBaseUrl: string; + wsBaseUrl: string; + visibility: AdvertisedEndpointVisibility; + compatibility: AdvertisedEndpointCompatibility; + source: "server" | "desktop" | "user"; + status: "available" | "unavailable" | "unknown"; + isDefault?: boolean; +}; +``` + +Keep the contract schema-only. All classification logic belongs in `packages/shared`, `apps/server`, `apps/desktop`, or `apps/web`. + +## HTTP/WS and HTTPS/WSS Readiness + +The codebase is partially ready, but the UX and compatibility model are not explicit enough. + +What is ready: + +- Remote target parsing already derives `ws://` from `http://` and `wss://` from `https://`. +- Saved environments store both HTTP and WebSocket base URLs. +- Remote auth uses bearer tokens instead of cookies, so cross-origin hosted clients are viable. +- WebSocket connections can use a dynamically issued `wsToken`. +- Server CORS support exists for browser remote auth endpoints. + +What is not solved by code alone: + +- `https://app.t3.codes` cannot reliably call `http://...` or `ws://...` endpoints because browsers block mixed content. +- `wss://100.x.y.z:3773` needs a certificate the browser trusts. A raw Tailscale IP does not solve certificate trust. +- LAN `http://192.168.x.y:3773` is usable from another desktop/native context but not from the hosted HTTPS app. +- The UI needs to explain why an endpoint is copyable for desktop pairing but not hosted-app compatible. + +Policy: + +- Support both HTTP/WS and HTTPS/WSS at the runtime layer. +- Mark endpoint compatibility at the product layer. +- Generate `app.t3.codes` links only from endpoints that are likely hosted-browser compatible, or show a warning with an explicit fallback. + +## Architecture + +### Endpoint Sources + +Endpoint records can come from several providers: + +1. **Server runtime** + - headless bind host and port + - server-known explicit advertised host config + +2. **Desktop shell** + - loopback backend URL + - LAN exposure state + - network interface discovery + - Tailscale CLI/status discovery + +3. **User configuration** + - manually added hostnames + - preferred endpoint labels + - hidden/disabled endpoints + +4. **Future cloud provider** + - T3 Tunnel endpoint + - billing/account status + - tunnel lifecycle state + +### Endpoint Registry + +Create a central runtime registry: + +- `packages/contracts/src/remoteAccess.ts` +- `packages/shared/src/remoteAccess.ts` for URL normalization and compatibility classification +- `apps/server/src/remoteAccess/*` for server/headless endpoints +- `apps/desktop/src/remoteAccess/*` for desktop-discovered endpoints +- `apps/web/src/environments/endpoints/*` for client-side display and pairing selection + +The web app should consume endpoint records and not care whether they came from LAN, Tailscale, or a future tunnel. + +### Pairing Link Generation + +Move hosted pairing link generation to endpoint-driven input: + +```ts +buildHostedPairingUrl({ + endpoint: AdvertisedEndpoint, + token, +}); +``` + +Generated URL: + +```text +https://app.t3.codes/pair?host=#token= +``` + +Use fragment tokens by default. Continue accepting `?token=` for compatibility. + +## Phase 1: Endpoint Abstraction + +### Goals + +- Centralize URL normalization, protocol derivation, and compatibility checks. +- Replace ad hoc desktop `endpointUrl` pairing logic with endpoint selection. +- Preserve all current remote behavior. + +### Tasks + +1. Add `AdvertisedEndpoint` schemas to `packages/contracts`. +2. Add shared helpers: + - normalize HTTP base URL + - derive WebSocket base URL + - classify loopback/private/LAN/Tailscale/public host + - classify hosted HTTPS compatibility +3. Add server endpoint discovery: + - loopback endpoint + - configured non-loopback endpoint + - explicit advertised host override +4. Add desktop endpoint discovery: + - local loopback + - LAN exposure endpoint + - endpoint status labels +5. Add WebSocket/API method or existing config field for endpoint snapshots. +6. Refactor settings connections UI: + - render endpoint rows + - endpoint picker for pairing link copy + - show compatibility warnings +7. Refactor hosted link builder to accept endpoint records. +8. Add tests for URL normalization and compatibility classification. + +### Acceptance Criteria + +- Existing LAN/network access UI still works. +- Pairing links are generated from endpoint records. +- Loopback endpoints never produce hosted pairing links silently. +- HTTP private-network endpoints are marked incompatible with `app.t3.codes`. +- No remote environment runtime changes are required for existing saved environments. + +## Phase 2: BYO Tailscale/MagicDNS + +### Goals + +- Detect free DIY Tailscale reachability. +- Surface Tailscale endpoints as normal advertised endpoints. +- Keep users in control of their own tailnet. + +### Tasks + +1. Detect Tailscale IPs from network interfaces: + - IPv4 `100.64.0.0/10` + - mark as `provider: "tailscale-ip"` +2. Add optional desktop-side `tailscale status --json` discovery: + - MagicDNS hostname + - Tailscale Serve/Funnel HTTPS endpoint if discoverable + - graceful failure if CLI is missing +3. Add manual Tailscale endpoint override: + - hostname + - label + - preferred/default flag +4. Show Tailscale endpoint rows in settings: + - raw IP HTTP endpoint: desktop-compatible, hosted-app likely blocked + - HTTPS MagicDNS/Serve endpoint: hosted-compatible if URL is HTTPS +5. Generate pairing links using selected Tailscale endpoint. +6. Document DIY setup: + - local desktop-to-desktop over Tailscale + - hosted app requirements + - why HTTPS matters + +### Acceptance Criteria + +- A machine on Tailscale shows a Tailscale endpoint without paid features. +- Users can copy a Tailscale-hosted pairing link when the endpoint is HTTPS-compatible. +- Users can still copy token-only/manual values when endpoint compatibility is unknown. +- Tailscale is optional and never required for regular LAN/loopback use. + +## Phase 3: Hosted Static App Completion + +### Goals + +- `app.t3.codes` works as a real client shell. +- It can pair, persist, reconnect, and clearly explain offline/incompatible states. + +### Tasks + +1. Finish hosted-static root behavior: + - no primary backend required + - saved environment hydration before initial routing decisions + - first saved environment selected as active +2. Add hosted empty state: + - no saved environments + - paste pairing URL + - add host + token +3. Add offline saved environment UI: + - last connected + - reconnect + - remove + - copy/add alternate endpoint +4. Audit primary-backend assumptions: + - command palette + - settings pages + - server config atom defaults + - keybindings + - provider/model lists + - update/desktop-only affordances +5. Add route tests for: + - hosted `/pair?host=...#token=...` + - hosted root with no saved environments + - hosted root with saved environment + - primary backend unavailable but saved environment present +6. Add deployment hardening: + - SPA fallback + - strict CSP + - no third-party scripts + - no query token logging + - disable or hide source maps in production if needed +7. Add browser error messages: + - mixed content + - unreachable backend + - CORS failure + - certificate failure + +### Acceptance Criteria + +- `app.t3.codes` can pair a reachable HTTPS backend and reconnect after reload. +- A saved environment can be used without any backend at `app.t3.codes`. +- Offline machines show a useful state instead of a generic boot error. +- HTTP endpoints are still supported in desktop/native/local contexts. +- Hosted HTTPS app only promises compatibility for HTTPS/WSS endpoints. + +## Phase 4: Future T3 Tunnel Provider + +Not part of the current implementation, but the endpoint abstraction should make it straightforward. + +Future tunnel provider responsibilities: + +- create endpoint with `provider: "t3-tunnel"` +- surface tunnel status +- provide stable HTTPS URL +- use existing backend pairing/session auth +- never bypass server auth + +The tunnel fabric can later be Pipenet-derived, Tailscale-derived, or another reverse tunnel implementation. The rest of T3 Code should only see an `AdvertisedEndpoint`. + +## Security Checklist + +- Pairing tokens are short-lived and one-time. +- Generated hosted pairing links put tokens in the fragment. +- The backend remains the authorization boundary. +- Endpoint discovery never disables backend auth. +- Hosted app does not silently downgrade to HTTP. +- Tunnel/public endpoints require explicit user action. +- Client sessions remain revocable. +- Endpoint URLs and request logs must avoid recording pairing tokens. +- Future cloud tunnel must authenticate tunnel creation and tunnel data connections separately from backend pairing. + +## Verification + +Each implementation PR should run: + +- `bun fmt` +- `bun lint` +- `bun typecheck` +- focused tests for changed backend/web behavior +- backend tests for any server-side endpoint discovery or auth changes using `bun run test`, never `bun test` + diff --git a/.plans/19-version-control-phase-1-vcs-driver-foundation.md b/.plans/19-version-control-phase-1-vcs-driver-foundation.md new file mode 100644 index 00000000000..e71c22d0ce3 --- /dev/null +++ b/.plans/19-version-control-phase-1-vcs-driver-foundation.md @@ -0,0 +1,216 @@ +# Version Control Phase 1: VCS Driver Foundation + +## Goal + +Introduce a provider-neutral VCS layer and rewrite the local Git implementation as an Effect-native driver. This phase should preserve user-visible behavior while replacing the Git-first service boundary with an abstraction that can support Git, Jujutsu, and later Sapling or another viable VCS. + +The existing `GitCore` implementation is a behavior reference and source of regression tests, not the target architecture. New code should follow the newer package style used by `effect-acp` and `effect-codex-app-server`: typed service tags, schema-backed tagged errors, scoped process usage, explicit decode boundaries, and no Promise-based process helper as the core execution primitive. + +## Scope + +- Add VCS-domain contracts in `packages/contracts/src/vcs.ts`. +- Add shared runtime parsing helpers in `packages/shared/src/vcs/*` only when they are useful to both server and web. +- Add server services under `apps/server/src/vcs`: + - `Services/VcsDriver.ts` + - `Services/VcsRepositoryResolver.ts` + - `Services/VcsProcess.ts` + - `Layers/GitVcsDriver.ts` + - `errors.ts` +- Migrate server callers from Git-specific terms where the operation is actually VCS-generic. +- Update active consumers to the new VCS APIs in the same phase; do not add backwards-compatible export shims. +- Leave source-control hosting providers out of this phase except for remote metadata needed to describe repository status. + +## Non-Goals + +- No GitLab, Azure DevOps, or GitHub provider rewrite yet. +- No Jujutsu driver yet, but every interface must be designed so a Jujutsu driver does not have to pretend to be Git. +- No T3 Review implementation yet. +- No broad UI redesign. + +## Driver Model + +Use provider-neutral nouns in new APIs: + +- `VcsDriver`: local repository mechanics. +- `RepositoryIdentity`: detected VCS kind, root path, common metadata path when available, remotes. +- `WorkingCopyStatus`: dirty state, changed files, aggregate insertions/deletions, current branch/bookmark/change name. +- `ChangeSet`: a committed or pending unit of change, not necessarily a Git commit. +- `RefName`: branch, bookmark, tag, or provider-specific ref. + +The initial driver capabilities should be explicit: + +```ts +export interface VcsDriverCapabilities { + readonly kind: "git" | "jj" | "sapling" | "unknown"; + readonly supportsWorktrees: boolean; + readonly supportsBookmarks: boolean; + readonly supportsAtomicSnapshot: boolean; + readonly supportsPushDefaultRemote: boolean; +} +``` + +Do not model Jujutsu as `GitCoreShape extends ...`. The Git driver can expose Git-specific implementation details internally, but the public VCS layer should describe operations by intent: + +- `detectRepository(cwd)` +- `status(cwd, options)` +- `listRefs(cwd, query/pagination)` +- `checkoutRef(cwd, ref)` +- `createRef(cwd, ref, from?)` +- `createWorkspace(cwd, ref, path?)` +- `removeWorkspace(path)` +- `prepareChangeContext(cwd, filePaths?)` +- `createChange(cwd, message, options)` +- `push(cwd, target?)` +- `rangeContext(cwd, base, head)` +- `listWorkspaceFiles(cwd, options)` + +## Effect Process Layer + +Create a small reusable `VcsProcess` service instead of using `runProcess`. + +Requirements: + +- Implement with `ChildProcess` and `ChildProcessSpawner` from `effect/unstable/process`. +- Support scoped acquisition/release for long-running commands and interruption. +- Support bounded stdout/stderr collection with truncation markers. + - DO not eagerly consume full stdout/stderr, return stream apis and expose helpers for consumers so we don't consume streams to memory unnecessarily... +- Support stdin. +- Support timeout through Effect scheduling/interruption, not ad-hoc timers. +- Stream output lines to progress callbacks as Effects. +- Return a typed `ProcessOutput` value for successful execution. +- Fail with typed errors, not generic thrown exceptions. + +Errors should be schema-backed tagged classes, for example: + +- `VcsProcessSpawnError` +- `VcsProcessExitError` +- `VcsProcessTimeoutError` +- `VcsOutputDecodeError` +- `VcsRepositoryDetectionError` +- `VcsUnsupportedOperationError` + +Every error should carry operation name, command display string, cwd when applicable, exit code when applicable, stderr/stdout tails when useful, and original cause where available. Override `message` for user readable messages that provides meaning and hints where appropriate. Errors are schema backed so the full error details will be persisted and serialized properly when stored to DB/Logfiles. + +## Git Driver Rewrite + +Rewrite Git support against `VcsProcess`. + +Carry forward current behavior from: + +- `apps/server/src/git/Layers/GitCore.ts` +- `apps/server/src/git/Layers/GitCore.test.ts` +- current Git status/branch/worktree contracts + +But split the implementation into smaller modules: + +- command execution and hardening config +- repository detection +- status parsing +- branch/ref parsing +- worktree operations +- commit/range context generation +- push/pull operations + +Keep parsing deterministic. Prefer Git porcelain formats, null-separated output, and schema decoding for JSON-like command output. Avoid regex parsing where Git gives a structured format. + +## Freshness and Local Caching + +Define freshness rules in the VCS layer before adding more providers. Local VCS status is cheap enough to refresh often; network-backed status is not. + +Treat these as live/local: + +- repository detection for the active cwd +- working copy dirty state +- staged/unstaged/untracked file summaries +- current branch/bookmark/change name +- local branch/bookmark lists +- local worktree/workspace lists + +These may run on user-visible polling, but should still be debounced and coalesced per repository root. Prefer filesystem-triggered invalidation where available, with a short fallback poll interval. Concurrent requests for the same repository/status shape should share one in-flight Effect. + +Treat these as cached or explicit-refresh only: + +- remote tracking branch refreshes +- ahead/behind counts that require network fetches +- default branch discovery from a remote provider +- remote branch lists beyond locally known refs + +The VCS driver should expose freshness metadata with status results: + +```ts +export interface VcsFreshness { + readonly source: "live-local" | "cached-local" | "cached-remote" | "explicit-remote"; + readonly observedAt: string; + readonly expiresAt?: string; +} +``` + +Remote refreshes should be opt-in per operation, for example `refresh: "local-only" | "allow-cached-remote" | "force-remote"`. The default for background status should be `local-only`. + +Use Effect `Cache` for repository identity and expensive local metadata: + +- key by resolved repository root plus VCS kind +- invalidate on cwd/root changes and workspace mutation operations +- use short TTLs for local status caches when filesystem events are unavailable +- never hide command failures behind stale values unless the caller explicitly accepts stale data + +## Cutover Policy + +Prefer direct migration and deletion over compatibility wrappers. + +Rules: + +- Update consumers to call `VcsDriver`/`VcsRepositoryResolver` directly as soon as the new API exists. +- Delete migrated `GitCore` service methods and tests in the same PR that moves their consumers. +- Do not keep backwards-compatible export shims, barrel aliases, or old service names for convenience. +- Transitional modules are allowed only when a caller group is too complex or risky to migrate in the same PR. +- Every transitional module must have a narrow owner, a removal checklist, and a test proving it delegates to the new implementation. +- No new feature work may depend on transitional modules. + +Expected transitional candidates: + +- The highest-level `GitManager` orchestration can be migrated in slices if doing the full Commit + PR flow in one PR is too risky. +- WebSocket payload compatibility can remain only where changing it would require a coordinated UI/server protocol migration. Internal server code should still use the new VCS contracts. + +## Tests + +Add integration-style tests with real temporary Git repositories for the new Git driver: + +- non-repository detection +- status for clean/dirty/untracked/staged states +- branch/ref list with pagination +- checkout/create branch +- worktree create/remove +- commit context generation with file filters +- commit creation with hook progress events +- push behavior against a local bare remote +- status polling does not perform remote network refresh by default +- concurrent duplicate status requests are coalesced +- bounded output/truncation +- timeout/interruption +- typed error shape for command failure and missing executable + +Move or duplicate only the tests needed to prove behavior, then delete the old service tests in the same migration slice. + +## Migration Steps + +1. Add `vcs` contracts and tagged errors. +2. Add `VcsProcess` and unit tests around process execution semantics. +3. Add `VcsDriver` and `VcsRepositoryResolver` service contracts. +4. Implement `GitVcsDriver` with real Git command integration tests. +5. Move `GitStatusBroadcaster` and branch/worktree flows to the VCS service directly. +6. Move commit/range/push callers to the VCS service directly. +7. Delete migrated `GitCore` internals and tests as each caller group moves. +8. Add a transitional adapter only for any remaining `GitManager` path that is explicitly too complex to cut over safely in one PR. +9. Remove every transitional adapter before starting Phase 2 unless the adapter is documented as blocking on the provider cutover. + +## Acceptance Criteria + +- Current Git branch/status/worktree/commit behavior remains intact. +- New Git implementation does not depend on `processRunner.ts`. +- New errors are typed and inspectable by tests. +- VCS interfaces contain no GitHub/GitLab/Azure concepts. +- Active consumers use the new VCS APIs directly; any remaining transitional module has a written removal checklist and no compatibility export shim. +- Background status refresh is local-only by default and cannot hit provider rate limits. +- Jujutsu can be added by implementing a real driver instead of conforming to Git command semantics. +- `bun fmt`, `bun lint`, and `bun typecheck` pass. diff --git a/.plans/20-version-control-phase-2-source-control-provider-foundation.md b/.plans/20-version-control-phase-2-source-control-provider-foundation.md new file mode 100644 index 00000000000..ac1186ba5f9 --- /dev/null +++ b/.plans/20-version-control-phase-2-source-control-provider-foundation.md @@ -0,0 +1,268 @@ +# Version Control Phase 2: Source Control Provider Foundation + +## Goal + +Introduce a pluggable source-control provider layer and rewrite GitHub support as an Effect-native provider. This phase should preserve the existing GitHub Commit + PR flow while making GitLab and Azure DevOps additive drivers rather than branches inside GitHub-oriented code. + +The existing `GitHubCli` service and GitHub-specific `GitManager` paths are behavior references. The new provider layer should use detailed tagged errors, schema decode boundaries, `effect/unstable/process`, capability flags, and provider-neutral change-request types. + +## Scope + +- Add provider-domain contracts in `packages/contracts/src/sourceControl.ts`. +- Add provider URL/reference parsing helpers in `packages/shared/src/sourceControl/*`. +- Add server services under `apps/server/src/sourceControl`: + - `Services/SourceControlProvider.ts` + - `Services/SourceControlProviderRegistry.ts` + - `Services/SourceControlProcess.ts` + - `Layers/GitHubSourceControlProvider.ts` + - `errors.ts` +- Migrate PR creation, PR lookup, default-branch lookup, clone URL lookup, and PR checkout through the provider layer. +- Update active consumers to the provider APIs directly; do not add backwards-compatible `GitHubCli` export shims. +- Keep GitHub as the only production provider at the end of this phase, but make GitLab and Azure implementation paths obvious and bounded. + +## Non-Goals + +- No GitLab implementation in this phase, except fixtures/contracts that prove the abstraction can represent merge requests. +- No Azure DevOps implementation in this phase, except URL/reference parser test cases if cheap. +- No in-app review UI yet. +- No hard dependency on one CLI forever. The first GitHub driver may use `gh`, but the interface should support REST/GraphQL implementations later. + +## Provider Model + +Use provider-neutral names: + +- `SourceControlProvider`: hosted repository and change-request mechanics. +- `ChangeRequest`: GitHub pull request, GitLab merge request, Azure pull request. +- `ChangeRequestThread`: review or discussion thread. +- `ChangeRequestComment`: top-level or inline comment. +- `ProviderRepository`: owner/project/repo identity plus clone URLs. + +Core provider operations: + +- `detectRemote(remoteUrl)` +- `checkAuth(cwd)` +- `getRepository(cwd | remoteUrl)` +- `getDefaultTargetRef(repository)` +- `listChangeRequests(repository, filters)` +- `getChangeRequest(repository, reference)` +- `createChangeRequest(repository, input)` +- `checkoutChangeRequest(cwd, changeRequest, options)` +- `getCloneUrls(repository)` + +Review-facing operations should be designed now, even if unimplemented: + +- `listReviewThreads(changeRequest)` +- `createReviewComment(changeRequest, input)` +- `replyToReviewThread(thread, input)` +- `resolveReviewThread(thread)` +- `submitReview(changeRequest, input)` + +Each operation should be guarded by capabilities: + +```ts +export interface SourceControlProviderCapabilities { + readonly kind: "github" | "gitlab" | "azure-devops" | "unknown"; + readonly supportsCreateChangeRequest: boolean; + readonly supportsCheckoutChangeRequest: boolean; + readonly supportsReviewThreads: boolean; + readonly supportsInlineComments: boolean; + readonly supportsDraftChangeRequests: boolean; +} +``` + +## Provider Registry + +Add a registry that resolves a provider from repository remotes and explicit user input. + +Rules: + +- Detection should be pure where possible and testable without spawning CLIs. +- Remote URL parsing belongs in `packages/shared`, not server-only provider layers. +- Unknown providers should return explicit unsupported-operation errors, not silently fall back to GitHub. +- Provider selection should be stable per operation and logged with enough context to debug bad remote detection. + +The registry should support multiple provider implementations at runtime, not a single dispatcher file with inline provider branches. + +## Rate Limits and Provider Caching + +Design the provider layer around a strict freshness budget. Provider API and CLI calls must not be part of frequent background polling unless the operation is explicitly marked safe and cached. + +Default behavior: + +- Pure URL/remote parsing is always live because it is local. +- Provider detection from local remotes is live-local. +- Authentication checks are cached. +- Repository metadata is cached. +- Default branch metadata is cached. +- Change-request lists are cached and refreshed on explicit user actions or coarse intervals. +- Full review threads, comments, file diffs, and timeline data are fetched only when the user opens the relevant review surface or explicitly refreshes it. +- Create/update operations invalidate affected cache keys immediately after success. + +The provider API should make freshness explicit: + +```ts +export interface SourceControlFreshness { + readonly source: "live-local" | "cached-provider" | "live-provider"; + readonly observedAt: string; + readonly expiresAt?: string; + readonly stale?: boolean; +} + +export type ProviderRefreshPolicy = + | "cache-first" + | "stale-while-revalidate" + | "force-refresh" + | "local-only"; +``` + +Every read operation that can touch a provider should accept a refresh policy. Background UI reads should default to `cache-first` or `stale-while-revalidate`; direct user actions like pressing refresh can use `force-refresh`. + +Use Effect `Cache` for provider data: + +- auth status: key by provider kind, hostname, workspace identity, and account if known; TTL around minutes, not seconds +- repository metadata/default branch: key by provider repository stable ID or normalized remote URL; TTL around tens of minutes +- change-request summary lists: key by provider repository, state/filter, source ref, target ref; short TTL with stale-while-revalidate +- individual change-request summaries: key by provider repository and provider CR ID; short TTL, invalidated after create/update/comment operations +- review threads/comments/diffs: key by provider CR ID and head SHA/version when available; fetch on demand for T3 Review + +Provider drivers should surface rate-limit signals when available: + +- remaining quota +- reset time +- retry-after duration +- whether the limit is primary, secondary/abuse, or unknown + +Rate-limit errors should be typed, retryable when the provider gives a reset/retry time, and visible enough for the UI to avoid repeatedly retrying a blocked operation. + +Avoid rate-limit footguns: + +- no provider calls from render loops or fast status polling +- no listing all PRs/MRs across all repos to infer one branch state +- no silent GitHub fallback for unknown providers +- no unbounded cache cardinality for branch names or free-form search queries +- no per-thread duplicate provider refresh when multiple views observe the same repository + +## GitHub Provider Rewrite + +Rewrite GitHub support as `GitHubSourceControlProvider`. + +Carry forward behavior from: + +- `apps/server/src/git/Layers/GitHubCli.ts` +- `apps/server/src/git/Layers/GitHubCli.test.ts` +- `apps/server/src/git/githubPullRequests.ts` +- GitHub-specific `GitManager` PR paths + +Implementation requirements: + +- Use `SourceControlProcess` built on `effect/unstable/process`, not `runProcess`. +- Decode `gh api` and `gh pr --json` responses with Effect Schema. +- Use typed errors for auth failure, missing CLI, command failure, output decode failure, unsupported reference, and provider mismatch. +- Keep stdout/stderr bounded. +- Avoid global mutable auth caches unless they are Effect `Cache` values with explicit keys, TTLs, and invalidation behavior. +- Parse provider rate-limit headers or CLI/API error payloads when available and map them to typed rate-limit errors. +- Keep GitHub nouns inside the GitHub driver; convert to `ChangeRequest` at the provider boundary. + +## GitManager Cutover + +Refactor `GitManager` so it coordinates three independent services: + +- `VcsDriver` for local repository mechanics. +- `SourceControlProviderRegistry` for hosted provider selection. +- `TextGeneration` for message/body generation. + +`GitManager` should stop depending directly on GitHub services. User-visible step labels should be provider-neutral unless the selected provider is known and the label is intentionally provider-specific. + +The Commit + PR flow should become: + +1. Resolve VCS repository and local status. +2. Resolve source-control provider from remotes. +3. Generate commit content through the existing text generation service. +4. Create local change through `VcsDriver`. +5. Push through `VcsDriver` or a narrow provider push helper only if the VCS requires provider-specific target syntax. +6. Generate change-request title/body. +7. Create the change request through `SourceControlProvider`. + +## Cutover Policy + +This phase should aggressively remove old GitHub-specific internals. + +Rules: + +- Move each active consumer directly to `SourceControlProviderRegistry` or a concrete provider test layer. +- Delete migrated `GitHubCli` methods, tests, and GitHub-specific helper exports in the same PR that moves their final consumer. +- Do not add compatibility export shims from `apps/server/src/git` to `apps/server/src/sourceControl`. +- Transitional modules are allowed only for a bounded `GitManager` slice that cannot move safely with the rest of the provider cutover. +- Every transitional module must have an owner comment, a removal checklist, and no public exports consumed by new code. +- Provider-neutral web parsing should replace GitHub-only parsing directly; do not keep parallel parser stacks unless a route still requires both during a single PR. + +## GitLab and Azure Readiness + +Use the triaged references as implementation inputs, not merge targets: + +- GitLab PR #592 is useful for `glab mr` command mapping and JSON normalization. +- Azure issue #1138 defines a good first Azure slice: remote/URL detection and change-request thread setup for same-repo URLs. + +The abstraction should let Phase 3 add: + +- `GitLabSourceControlProvider` using `glab`. +- `AzureDevOpsSourceControlProvider` using `az repos pr` or REST APIs. + +No provider should need to edit GitHub code to join the registry. + +## T3 Review Design Constraint + +Do not optimize only for creation/checkout. The provider layer must be able to support a future in-app review surface. + +That means contracts should include stable IDs and enough metadata for: + +- file-level diffs +- inline review threads +- resolved/unresolved state +- top-level discussion comments +- pending review submission +- provider URL back-links + +Provider-specific fields can live in a metadata bag, but core review behavior should not require the UI to know whether the backing service is GitHub, GitLab, or Azure DevOps. + +## Tests + +Add tests at three levels: + +- Pure parser tests for GitHub, GitLab, and Azure remote URLs and change-request references. +- Provider unit tests with fake `SourceControlProcess` output and schema decode failures. +- Integration-style GitHub CLI tests only where they can run hermetically or be skipped without hiding unit coverage. + +Required cases: + +- GitHub PR URL, number, and branch-ish references. +- GitLab MR URL/reference parsing. +- Azure DevOps PR URL parsing for same-repo URLs. +- unknown provider returns unsupported-operation errors. +- missing CLI and auth failures produce distinct typed errors. +- invalid CLI JSON fails at decode boundary with useful context. + +## Migration Steps + +1. Add `sourceControl` contracts and provider-neutral schemas. +2. Add shared remote/reference parser helpers and tests. +3. Add `SourceControlProcess` and provider errors. +4. Add provider registry with GitHub-only registration. +5. Implement `GitHubSourceControlProvider` from scratch against the new process layer. +6. Cut GitHub PR operations in `GitManager` over to the provider registry. +7. Replace web PR-reference parsing with provider-neutral parser output while keeping current GitHub UX. +8. Add provider cache metrics and tests for cache hit, stale refresh, invalidation, and rate-limit error mapping. +9. Delete the migrated `GitHubCli` implementation, tests, and GitHub-specific helper exports unless an explicit transitional checklist remains. + +## Acceptance Criteria + +- Existing GitHub Commit + PR and PR checkout flows still work. +- `GitManager` no longer imports or depends on `GitHubCli`. +- Active consumers use source-control provider APIs directly; any remaining transitional module has a written removal checklist and no compatibility export shim. +- Source-control contracts can represent GitHub PRs, GitLab MRs, and Azure DevOps PRs. +- Unknown/unsupported providers fail explicitly and visibly. +- GitHub command execution does not depend on `processRunner.ts`. +- Background provider reads are cached/coalesced and do not consume provider API quota on every status refresh. +- Rate-limit responses become typed errors with retry/reset metadata where available. +- The provider API includes the review operations needed by future T3 Review work, even if they are capability-gated. +- `bun fmt`, `bun lint`, and `bun typecheck` pass. diff --git a/.plans/README.md b/.plans/README.md index 7bb69a3b912..379158d4efd 100644 --- a/.plans/README.md +++ b/.plans/README.md @@ -10,3 +10,5 @@ 8. `08-precommit-format-and-lint.md` 9. `09-event-state-test-expansion.md` 10. `10-unify-process-session-abstraction.md` +19. `19-version-control-phase-1-vcs-driver-foundation.md` +20. `20-version-control-phase-2-source-control-provider-foundation.md` diff --git a/REMOTE.md b/REMOTE.md index c0389d235ba..cbe207823f1 100644 --- a/REMOTE.md +++ b/REMOTE.md @@ -28,9 +28,41 @@ If you are already running the desktop app and want to make it reachable from ot 1. Open **Settings** → **Connections**. 2. Under **Manage Local Backend**, toggle **Network access** on. This will restart the app and run the backend on all network interfaces. -3. The settings panel will show the address the server is reachable at (e.g. `http://192.168.x.y:3773`). +3. The settings panel will show the default reachable endpoint, with a `+N` control when more endpoints are available. Expand it to inspect alternatives such as loopback, LAN, private-network, or HTTPS endpoints. 4. Use **Create Link** to generate a pairing link you can share with another device. +The default endpoint controls the QR code and primary copy action for pairing links. You can change it from the expanded endpoint list. The preference is stored by endpoint type, so choosing the local LAN endpoint survives normal IP address changes when you move between networks. + +When no user default is saved, the app uses the built-in LAN endpoint for pairing links when +available. You can set another endpoint as the default from the expanded endpoint list. + +- HTTPS/WSS-compatible endpoints work from `https://app.t3.codes`, but are not made the default + automatically. +- Non-loopback HTTP endpoints are useful for direct LAN pairing. +- Loopback-only endpoints are not useful for another device unless that device is the same machine. + +If the copied link points directly at `http://192.168.x.y:3773`, open it from a client that can reach that LAN address. If it points at `https://app.t3.codes/pair?...`, the hosted web app will save the environment and connect directly to the backend URL in the link. + +### Tailscale Endpoints + +When the desktop app can detect Tailscale, it adds Tailnet endpoints to the reachable endpoint list. + +Depending on your Tailscale setup, this may include: + +- the machine's `100.x.y.z` Tailnet IP +- a MagicDNS name +- an HTTPS MagicDNS endpoint when Tailscale Serve is configured for this backend + +The Tailscale HTTPS endpoint uses the clean MagicDNS URL, such as +`https://machine.tailnet.ts.net/`, and is disabled until the app verifies that the URL reaches this +backend. Use **Setup** on the Tailscale HTTPS row to opt in. The desktop app restarts the backend +with the same server-side behavior as `t3 serve --tailscale-serve`, then the server asks Tailscale +Serve to proxy HTTPS traffic to the local backend. + +The Tailscale support is an endpoint provider add-on. The core remote model still works without Tailscale: LAN HTTP endpoints, custom HTTPS endpoints, future tunnels, and SSH-launched environments all use the same saved environment and pairing flow. + +For `https://app.t3.codes`, prefer an HTTPS Tailnet or other HTTPS endpoint. A plain `http://100.x.y.z:3773` endpoint can still work from a desktop client or another browser page served over HTTP, but it will not work from the hosted HTTPS app because of browser mixed-content rules. + ### Option 2: Headless Server (CLI) Use this when you want to run the server without a GUI, for example on a remote machine over SSH. @@ -53,14 +85,42 @@ From there, connect from another device in either of these ways: - scan the QR code on your phone - in the desktop app, enter the full pairing URL - in the desktop app, enter the host and token separately +- in the hosted web app, open a hosted pairing URL when the backend is reachable over HTTPS Use `t3 serve --help` for the full flag reference. It supports the same general startup options as the normal server command, including an optional `cwd` argument. +For hosted web pairing over Tailscale HTTPS, opt in to Tailscale Serve: + +```bash +npx t3 serve --tailscale-serve +``` + +By default this configures Tailscale Serve on HTTPS port 443 and advertises +`https://machine.tailnet.ts.net/`. Advanced users can choose a different HTTPS port: + +```bash +npx t3 serve --tailscale-serve --tailscale-serve-port 8443 +``` + > Note > The GUIs do not currently support adding projects on remote environments. > For now, use `t3 project ...` on the server machine instead. > Full GUI support for remote project management is coming soon. +### Option 3: Desktop-Managed SSH Launch + +Use this when you want the desktop app to start or reuse T3 Code on another machine over SSH. + +1. Open **Settings** → **Connections**. +2. Under **Remote Environments**, choose **Add environment**. +3. Select the SSH launch flow. +4. Enter the SSH target, such as `user@example.com`. +5. Confirm the launch. The desktop app probes the host, starts or reuses a remote T3 server, opens a local port forward, and saves the environment. + +After setup, the renderer connects to a local forwarded HTTP/WebSocket endpoint. The remote host still owns the actual T3 server, projects, files, git state, terminals, and provider sessions. + +SSH launch is a desktop feature because it needs local process and SSH access. Once the environment is paired and saved, it uses the same environment list and connection model as direct LAN, Tailscale, HTTPS, or future tunnel-backed environments. + ## How Pairing Works The remote device does not need a long-lived secret up front. @@ -73,6 +133,20 @@ Instead: After pairing, future access is session-based. You do not need to keep reusing the original token unless you are pairing a new device. +## Hosted Web App Pairing + +The hosted web app at `https://app.t3.codes` can save a remote backend in browser local storage from a URL like: + +```text +https://app.t3.codes/pair?host=https://backend.example.com:3773#token=PAIRCODE +``` + +Use hosted pairing when the backend is reachable from the browser over HTTPS/WSS. This includes a backend behind a trusted HTTPS tunnel or another HTTPS endpoint you operate. + +Do not use hosted pairing for plain HTTP LAN URLs such as `http://192.168.x.y:3773`. Browsers block an HTTPS page from connecting to an insecure HTTP or WS backend. For those endpoints, use the direct pairing URL shown by the desktop app or CLI from a client that can open that HTTP URL directly. + +Hosted pairing does not proxy traffic through T3 Code. The browser still connects directly to the backend URL in the pairing link. + ## Managing Access Later Use `t3 auth` to manage access after the initial pairing flow. @@ -90,4 +164,5 @@ Use `t3 auth --help` and the nested subcommand help pages for the full reference - Treat pairing URLs and pairing tokens like passwords. - Prefer binding `--host` to a trusted private address, such as a Tailnet IP, instead of exposing the server broadly. - Anyone with a valid pairing credential can create a session until that credential expires or is revoked. +- Hosted pairing links keep the credential in the URL hash so it is not sent to the hosted app server, but it can still be exposed through browser history, screenshots, logs, or copy/paste. - Use `t3 auth` to revoke credentials or sessions you no longer trust. diff --git a/apps/desktop/package.json b/apps/desktop/package.json index 0be83f96dcf..9e4e5eec28c 100644 --- a/apps/desktop/package.json +++ b/apps/desktop/package.json @@ -15,21 +15,22 @@ "smoke-test": "node scripts/smoke-test.mjs" }, "dependencies": { + "@effect/platform-node": "catalog:", "effect": "catalog:", - "electron": "40.8.5", + "electron": "40.9.3", "electron-updater": "^6.6.2" }, "devDependencies": { + "@t3tools/client-runtime": "workspace:*", "@t3tools/contracts": "workspace:*", "@t3tools/shared": "workspace:*", + "@t3tools/ssh": "workspace:*", + "@t3tools/tailscale": "workspace:*", "@types/node": "catalog:", "effect-acp": "workspace:*", "tsdown": "catalog:", "typescript": "catalog:", "vitest": "catalog:" }, - "productName": "T3 Code (Alpha)", - "trustedDependencies": [ - "electron" - ] + "productName": "T3 Code (Alpha)" } diff --git a/apps/desktop/src/clientPersistence.test.ts b/apps/desktop/src/clientPersistence.test.ts index 192d7ac1064..d4c4768d2c8 100644 --- a/apps/desktop/src/clientPersistence.test.ts +++ b/apps/desktop/src/clientPersistence.test.ts @@ -52,8 +52,10 @@ const clientSettings: ClientSettings = { autoOpenPlanSidebar: false, confirmThreadArchive: true, confirmThreadDelete: false, + diffIgnoreWhitespace: true, diffWordWrap: true, favorites: [], + providerModelPreferences: {}, sidebarProjectGroupingMode: "repository_path", sidebarProjectGroupingOverrides: { "environment-1:/tmp/project-a": "separate", @@ -70,6 +72,12 @@ const savedRegistryRecord: PersistedSavedEnvironmentRecord = { wsBaseUrl: "wss://remote.example.com/", createdAt: "2026-04-09T00:00:00.000Z", lastConnectedAt: "2026-04-09T01:00:00.000Z", + desktopSsh: { + alias: "devbox", + hostname: "devbox.example.com", + username: "julius", + port: 22, + }, }; describe("clientPersistence", () => { diff --git a/apps/desktop/src/clientPersistence.ts b/apps/desktop/src/clientPersistence.ts index ad08a0036f1..09a3494dbc7 100644 --- a/apps/desktop/src/clientPersistence.ts +++ b/apps/desktop/src/clientPersistence.ts @@ -57,6 +57,12 @@ function isPersistedSavedEnvironmentStorageRecord( typeof value.wsBaseUrl === "string" && typeof value.createdAt === "string" && (value.lastConnectedAt === null || typeof value.lastConnectedAt === "string") && + (value.desktopSsh === undefined || + (Predicate.isObject(value.desktopSsh) && + typeof value.desktopSsh.alias === "string" && + typeof value.desktopSsh.hostname === "string" && + (value.desktopSsh.username === null || typeof value.desktopSsh.username === "string") && + (value.desktopSsh.port === null || typeof value.desktopSsh.port === "number"))) && (value.encryptedBearerToken === undefined || typeof value.encryptedBearerToken === "string") ); } @@ -77,7 +83,7 @@ function readSavedEnvironmentRegistryDocument(filePath: string): SavedEnvironmen function toPersistedSavedEnvironmentRecord( record: PersistedSavedEnvironmentStorageRecord, ): PersistedSavedEnvironmentRecord { - return { + const nextRecord = { environmentId: record.environmentId, label: record.label, httpBaseUrl: record.httpBaseUrl, @@ -85,6 +91,7 @@ function toPersistedSavedEnvironmentRecord( createdAt: record.createdAt, lastConnectedAt: record.lastConnectedAt, }; + return record.desktopSsh ? { ...nextRecord, desktopSsh: record.desktopSsh } : nextRecord; } export function readClientSettings(settingsPath: string): ClientSettings | null { @@ -134,6 +141,7 @@ export function writeSavedEnvironmentRegistry( wsBaseUrl: record.wsBaseUrl, createdAt: record.createdAt, lastConnectedAt: record.lastConnectedAt, + ...(record.desktopSsh ? { desktopSsh: record.desktopSsh } : {}), encryptedBearerToken, } : record; @@ -189,7 +197,7 @@ export function writeSavedEnvironmentSecret(input: { const encryptedBearerToken = input.secretStorage .encryptString(input.secret) .toString("base64"); - return { + const nextRecord = { environmentId: record.environmentId, label: record.label, httpBaseUrl: record.httpBaseUrl, @@ -197,7 +205,8 @@ export function writeSavedEnvironmentSecret(input: { createdAt: record.createdAt, lastConnectedAt: record.lastConnectedAt, encryptedBearerToken, - } satisfies PersistedSavedEnvironmentStorageRecord; + }; + return record.desktopSsh ? { ...nextRecord, desktopSsh: record.desktopSsh } : nextRecord; }), } satisfies SavedEnvironmentRegistryDocument); return found; diff --git a/apps/desktop/src/desktopSettings.test.ts b/apps/desktop/src/desktopSettings.test.ts index 9b467d22cab..cc58dc810ef 100644 --- a/apps/desktop/src/desktopSettings.test.ts +++ b/apps/desktop/src/desktopSettings.test.ts @@ -9,6 +9,7 @@ import { readDesktopSettings, resolveDefaultDesktopSettings, setDesktopServerExposurePreference, + setDesktopTailscaleServePreference, setDesktopUpdateChannelPreference, writeDesktopSettings, } from "./desktopSettings.ts"; @@ -35,6 +36,8 @@ describe("desktopSettings", () => { it("defaults packaged nightly builds to the nightly update channel", () => { expect(resolveDefaultDesktopSettings("0.0.17-nightly.20260415.1")).toEqual({ serverExposureMode: "local-only", + tailscaleServeEnabled: false, + tailscaleServePort: 443, updateChannel: "nightly", updateChannelConfiguredByUser: false, }); @@ -45,12 +48,16 @@ describe("desktopSettings", () => { writeDesktopSettings(settingsPath, { serverExposureMode: "network-accessible", + tailscaleServeEnabled: true, + tailscaleServePort: 8443, updateChannel: "latest", updateChannelConfiguredByUser: true, }); expect(readDesktopSettings(settingsPath, "0.0.17")).toEqual({ serverExposureMode: "network-accessible", + tailscaleServeEnabled: true, + tailscaleServePort: 8443, updateChannel: "latest", updateChannelConfiguredByUser: true, }); @@ -61,6 +68,8 @@ describe("desktopSettings", () => { setDesktopServerExposurePreference( { serverExposureMode: "local-only", + tailscaleServeEnabled: false, + tailscaleServePort: 443, updateChannel: "latest", updateChannelConfiguredByUser: false, }, @@ -68,6 +77,50 @@ describe("desktopSettings", () => { ), ).toEqual({ serverExposureMode: "network-accessible", + tailscaleServeEnabled: false, + tailscaleServePort: 443, + updateChannel: "latest", + updateChannelConfiguredByUser: false, + }); + }); + + it("persists the requested Tailscale Serve preference", () => { + expect( + setDesktopTailscaleServePreference( + { + serverExposureMode: "local-only", + tailscaleServeEnabled: false, + tailscaleServePort: 443, + updateChannel: "latest", + updateChannelConfiguredByUser: false, + }, + { enabled: true, port: 8443 }, + ), + ).toEqual({ + serverExposureMode: "local-only", + tailscaleServeEnabled: true, + tailscaleServePort: 8443, + updateChannel: "latest", + updateChannelConfiguredByUser: false, + }); + }); + + it("preserves the configured Tailscale Serve port when no new port is requested", () => { + expect( + setDesktopTailscaleServePreference( + { + serverExposureMode: "local-only", + tailscaleServeEnabled: false, + tailscaleServePort: 8443, + updateChannel: "latest", + updateChannelConfiguredByUser: false, + }, + { enabled: true }, + ), + ).toEqual({ + serverExposureMode: "local-only", + tailscaleServeEnabled: true, + tailscaleServePort: 8443, updateChannel: "latest", updateChannelConfiguredByUser: false, }); @@ -78,6 +131,8 @@ describe("desktopSettings", () => { setDesktopUpdateChannelPreference( { serverExposureMode: "local-only", + tailscaleServeEnabled: false, + tailscaleServePort: 443, updateChannel: "latest", updateChannelConfiguredByUser: false, }, @@ -85,6 +140,8 @@ describe("desktopSettings", () => { ), ).toEqual({ serverExposureMode: "local-only", + tailscaleServeEnabled: false, + tailscaleServePort: 443, updateChannel: "nightly", updateChannelConfiguredByUser: true, }); @@ -103,6 +160,8 @@ describe("desktopSettings", () => { expect(readDesktopSettings(settingsPath, "0.0.17-nightly.20260415.1")).toEqual({ serverExposureMode: "local-only", + tailscaleServeEnabled: false, + tailscaleServePort: 443, updateChannel: "nightly", updateChannelConfiguredByUser: false, }); @@ -121,6 +180,8 @@ describe("desktopSettings", () => { expect(readDesktopSettings(settingsPath, "0.0.17-nightly.20260415.1")).toEqual({ serverExposureMode: "local-only", + tailscaleServeEnabled: false, + tailscaleServePort: 443, updateChannel: "nightly", updateChannelConfiguredByUser: false, }); @@ -140,8 +201,30 @@ describe("desktopSettings", () => { expect(readDesktopSettings(settingsPath, "0.0.17-nightly.20260415.1")).toEqual({ serverExposureMode: "local-only", + tailscaleServeEnabled: false, + tailscaleServePort: 443, updateChannel: "latest", updateChannelConfiguredByUser: true, }); }); + + it("falls back to the default Tailscale Serve port when the persisted port is invalid", () => { + const settingsPath = makeSettingsPath(); + fs.writeFileSync( + settingsPath, + JSON.stringify({ + tailscaleServeEnabled: true, + tailscaleServePort: 0, + }), + "utf8", + ); + + expect(readDesktopSettings(settingsPath, "0.0.17")).toEqual({ + serverExposureMode: "local-only", + tailscaleServeEnabled: true, + tailscaleServePort: 443, + updateChannel: "latest", + updateChannelConfiguredByUser: false, + }); + }); }); diff --git a/apps/desktop/src/desktopSettings.ts b/apps/desktop/src/desktopSettings.ts index 6ece5189cce..5a61faef803 100644 --- a/apps/desktop/src/desktopSettings.ts +++ b/apps/desktop/src/desktopSettings.ts @@ -6,12 +6,18 @@ import { resolveDefaultDesktopUpdateChannel } from "./updateChannels.ts"; export interface DesktopSettings { readonly serverExposureMode: DesktopServerExposureMode; + readonly tailscaleServeEnabled: boolean; + readonly tailscaleServePort: number; readonly updateChannel: DesktopUpdateChannel; readonly updateChannelConfiguredByUser: boolean; } +export const DEFAULT_TAILSCALE_SERVE_PORT = 443; + export const DEFAULT_DESKTOP_SETTINGS: DesktopSettings = { serverExposureMode: "local-only", + tailscaleServeEnabled: false, + tailscaleServePort: DEFAULT_TAILSCALE_SERVE_PORT, updateChannel: "latest", updateChannelConfiguredByUser: false, }; @@ -35,6 +41,29 @@ export function setDesktopServerExposurePreference( }; } +export function setDesktopTailscaleServePreference( + settings: DesktopSettings, + input: { readonly enabled: boolean; readonly port?: number }, +): DesktopSettings { + const port = + input.port === undefined + ? settings.tailscaleServePort + : normalizeTailscaleServePort(input.port); + return settings.tailscaleServeEnabled === input.enabled && settings.tailscaleServePort === port + ? settings + : { + ...settings, + tailscaleServeEnabled: input.enabled, + tailscaleServePort: port, + }; +} + +export function normalizeTailscaleServePort(value: unknown): number { + return typeof value === "number" && Number.isInteger(value) && value >= 1 && value <= 65_535 + ? value + : DEFAULT_TAILSCALE_SERVE_PORT; +} + export function setDesktopUpdateChannelPreference( settings: DesktopSettings, requestedChannel: DesktopUpdateChannel, @@ -57,6 +86,8 @@ export function readDesktopSettings(settingsPath: string, appVersion: string): D const raw = FS.readFileSync(settingsPath, "utf8"); const parsed = JSON.parse(raw) as { readonly serverExposureMode?: unknown; + readonly tailscaleServeEnabled?: unknown; + readonly tailscaleServePort?: unknown; readonly updateChannel?: unknown; readonly updateChannelConfiguredByUser?: unknown; }; @@ -72,6 +103,8 @@ export function readDesktopSettings(settingsPath: string, appVersion: string): D return { serverExposureMode: parsed.serverExposureMode === "network-accessible" ? "network-accessible" : "local-only", + tailscaleServeEnabled: parsed.tailscaleServeEnabled === true, + tailscaleServePort: normalizeTailscaleServePort(parsed.tailscaleServePort), updateChannel: updateChannelConfiguredByUser && parsedUpdateChannel !== null ? parsedUpdateChannel diff --git a/apps/desktop/src/main.ts b/apps/desktop/src/main.ts index 5cf6a8fd152..78dc62e8885 100644 --- a/apps/desktop/src/main.ts +++ b/apps/desktop/src/main.ts @@ -36,11 +36,14 @@ import { autoUpdater } from "electron-updater"; import type { ContextMenuItem } from "@t3tools/contracts"; import { RotatingFileSink } from "@t3tools/shared/logging"; import { parsePersistedServerObservabilitySettings } from "@t3tools/shared/serverSettings"; +import type { RemoteT3RunnerOptions } from "@t3tools/ssh/tunnel"; import { DEFAULT_DESKTOP_BACKEND_PORT, resolveDesktopBackendPort } from "./backendPort.ts"; import { + type DesktopSettings, DEFAULT_DESKTOP_SETTINGS, readDesktopSettings, setDesktopServerExposurePreference, + setDesktopTailscaleServePreference, setDesktopUpdateChannelPreference, writeDesktopSettings, } from "./desktopSettings.ts"; @@ -55,7 +58,11 @@ import { } from "./clientPersistence.ts"; import { isBackendReadinessAborted, waitForHttpReady } from "./backendReadiness.ts"; import { showDesktopConfirmDialog } from "./confirmDialog.ts"; -import { resolveDesktopServerExposure } from "./serverExposure.ts"; +import { + resolveDesktopCoreAdvertisedEndpoints, + resolveDesktopServerExposure, +} from "./serverExposure.ts"; +import { DesktopSshEnvironmentBridge, resolveRemoteT3CliPackageSpec } from "./sshEnvironment.ts"; import { syncShellEnvironment } from "./syncShellEnvironment.ts"; import { waitForBackendStartupReady } from "./backendStartupReadiness.ts"; import { getAutoUpdateDisabledReason, shouldBroadcastDownloadProgress } from "./updateState.ts"; @@ -76,6 +83,7 @@ import { import { isArm64HostRunningIntelBuild, resolveDesktopRuntimeInfo } from "./runtimeArch.ts"; import { resolveDesktopAppBranding } from "./appBranding.ts"; import { bindFirstRevealTrigger, type RevealSubscription } from "./windowReveal.ts"; +import { resolveTailscaleAdvertisedEndpoints } from "./tailscaleEndpointProvider.ts"; syncShellEnvironment(); @@ -107,6 +115,8 @@ const SET_SAVED_ENVIRONMENT_SECRET_CHANNEL = "desktop:set-saved-environment-secr const REMOVE_SAVED_ENVIRONMENT_SECRET_CHANNEL = "desktop:remove-saved-environment-secret"; const GET_SERVER_EXPOSURE_STATE_CHANNEL = "desktop:get-server-exposure-state"; const SET_SERVER_EXPOSURE_MODE_CHANNEL = "desktop:set-server-exposure-mode"; +const SET_TAILSCALE_SERVE_ENABLED_CHANNEL = "desktop:set-tailscale-serve-enabled"; +const GET_ADVERTISED_ENDPOINTS_CHANNEL = "desktop:get-advertised-endpoints"; const BASE_DIR = process.env.T3CODE_HOME?.trim() || Path.join(OS.homedir(), ".t3"); const STATE_DIR = Path.join(BASE_DIR, "userdata"); const DESKTOP_SETTINGS_PATH = Path.join(STATE_DIR, "desktop-settings.json"); @@ -115,6 +125,11 @@ const SAVED_ENVIRONMENT_REGISTRY_PATH = Path.join(STATE_DIR, "saved-environments const DESKTOP_SCHEME = "t3"; const ROOT_DIR = Path.resolve(__dirname, "../../.."); const isDevelopment = Boolean(process.env.VITE_DEV_SERVER_URL); +// Dev-only SSH launcher override. Set this to an absolute path on the SSH host +// for a built server entry, for example: +// "/Users/julius/Development/Work/codething-mvp/apps/server/dist/bin.mjs" +const DEV_REMOTE_T3_SERVER_ENTRY_PATH = + process.env.T3CODE_DEV_REMOTE_T3_SERVER_ENTRY_PATH?.trim() ?? ""; const desktopAppBranding: DesktopAppBranding = resolveDesktopAppBranding({ isDevelopment, appVersion: app.getVersion(), @@ -302,6 +317,9 @@ function backendChildEnv(): NodeJS.ProcessEnv { delete env.T3CODE_DESKTOP_WS_URL; delete env.T3CODE_DESKTOP_LAN_ACCESS; delete env.T3CODE_DESKTOP_LAN_HOST; + delete env.T3CODE_DESKTOP_HTTPS_ENDPOINTS; + delete env.T3CODE_TAILSCALE_SERVE; + delete env.T3CODE_TAILSCALE_SERVE_PORT; return env; } @@ -310,9 +328,32 @@ function getDesktopServerExposureState(): DesktopServerExposureState { mode: desktopServerExposureMode, endpointUrl: backendEndpointUrl, advertisedHost: backendAdvertisedHost, + tailscaleServeEnabled: desktopSettings.tailscaleServeEnabled, + tailscaleServePort: desktopSettings.tailscaleServePort, }; } +async function getDesktopAdvertisedEndpoints() { + const exposure = resolveDesktopServerExposure({ + mode: desktopServerExposureMode, + port: backendPort, + networkInterfaces: OS.networkInterfaces(), + ...(backendAdvertisedHost ? { advertisedHostOverride: backendAdvertisedHost } : {}), + }); + const coreEndpoints = resolveDesktopCoreAdvertisedEndpoints({ + port: backendPort, + exposure, + customHttpsEndpointUrls: resolveCustomHttpsEndpointUrls(), + }); + const tailscaleEndpoints = await resolveTailscaleAdvertisedEndpoints({ + port: backendPort, + serveEnabled: desktopSettings.tailscaleServeEnabled, + servePort: desktopSettings.tailscaleServePort, + networkInterfaces: OS.networkInterfaces(), + }); + return [...coreEndpoints, ...tailscaleEndpoints]; +} + function getDesktopSecretStorage() { return { isEncryptionAvailable: () => safeStorage.isEncryptionAvailable(), @@ -326,9 +367,19 @@ function resolveAdvertisedHostOverride(): string | undefined { return override && override.length > 0 ? override : undefined; } +function resolveCustomHttpsEndpointUrls(): readonly string[] { + return (process.env.T3CODE_DESKTOP_HTTPS_ENDPOINTS ?? "") + .split(",") + .map((entry) => entry.trim()) + .filter((entry) => entry.length > 0); +} + async function applyDesktopServerExposureMode( mode: DesktopServerExposureMode, - options?: { readonly persist?: boolean; readonly rejectIfUnavailable?: boolean }, + options?: { + readonly persist?: boolean; + readonly rejectIfUnavailable?: boolean; + }, ): Promise { const advertisedHostOverride = resolveAdvertisedHostOverride(); const requestedMode = mode; @@ -366,6 +417,17 @@ async function applyDesktopServerExposureMode( return getDesktopServerExposureState(); } +async function applyDesktopTailscaleServeEnabled( + nextSettings: DesktopSettings, +): Promise { + desktopSettings = nextSettings; + writeDesktopSettings(DESKTOP_SETTINGS_PATH, desktopSettings); + relaunchDesktopApp( + desktopSettings.tailscaleServeEnabled ? "tailscale-serve-enabled" : "tailscale-serve-disabled", + ); + return getDesktopServerExposureState(); +} + function relaunchDesktopApp(reason: string): void { writeDesktopLogHeader(`desktop relaunch requested reason=${reason}`); setImmediate(() => { @@ -378,6 +440,7 @@ function relaunchDesktopApp(reason: string): void { `desktop relaunch backend shutdown warning message=${formatErrorMessage(error)}`, ); }) + .then(() => desktopSshEnvironmentBridge.dispose().catch(() => undefined)) .finally(() => { restoreStdIoCapture?.(); if (isDevelopment) { @@ -634,6 +697,22 @@ let updateInstallInFlight = false; let updaterConfigured = false; let updateState: DesktopUpdateState = initialUpdateState(); +const desktopSshEnvironmentBridge = new DesktopSshEnvironmentBridge({ + getMainWindow: () => mainWindow, + resolveCliRunner: (): RemoteT3RunnerOptions => { + if (isDevelopment && DEV_REMOTE_T3_SERVER_ENTRY_PATH.length > 0) { + return { nodeScriptPath: DEV_REMOTE_T3_SERVER_ENTRY_PATH }; + } + return { + packageSpec: resolveRemoteT3CliPackageSpec({ + appVersion: app.getVersion(), + updateChannel: desktopSettings.updateChannel, + isDevelopment, + }), + }; + }, +}); + function resolveUpdaterErrorContext(): DesktopUpdateErrorContext { if (updateInstallInFlight) return "install"; if (updateDownloadInFlight) return "download"; @@ -1185,7 +1264,10 @@ async function checkForUpdates(reason: string): Promise { } } -async function downloadAvailableUpdate(): Promise<{ accepted: boolean; completed: boolean }> { +async function downloadAvailableUpdate(): Promise<{ + accepted: boolean; + completed: boolean; +}> { if (!updaterConfigured || updateDownloadInFlight || updateState.status !== "available") { return { accepted: false, completed: false }; } @@ -1207,7 +1289,10 @@ async function downloadAvailableUpdate(): Promise<{ accepted: boolean; completed } } -async function installDownloadedUpdate(): Promise<{ accepted: boolean; completed: boolean }> { +async function installDownloadedUpdate(): Promise<{ + accepted: boolean; + completed: boolean; +}> { if (isQuitting || !updaterConfigured || updateState.status !== "downloaded") { return { accepted: false, completed: false }; } @@ -1393,6 +1478,8 @@ function startBackend(): void { t3Home: BASE_DIR, host: backendBindHost, desktopBootstrapToken: backendBootstrapToken, + tailscaleServeEnabled: desktopSettings.tailscaleServeEnabled, + tailscaleServePort: desktopSettings.tailscaleServePort, ...(backendObservabilitySettings.otlpTracesUrl ? { otlpTracesUrl: backendObservabilitySettings.otlpTracesUrl } : {}), @@ -1638,6 +1725,8 @@ function registerIpcHandlers(): void { }, ); + desktopSshEnvironmentBridge.registerIpcHandlers(ipcMain); + ipcMain.removeHandler(GET_SERVER_EXPOSURE_STATE_CHANNEL); ipcMain.handle(GET_SERVER_EXPOSURE_STATE_CHANNEL, async () => getDesktopServerExposureState()); @@ -1660,6 +1749,31 @@ function registerIpcHandlers(): void { return nextState; }); + ipcMain.removeHandler(SET_TAILSCALE_SERVE_ENABLED_CHANNEL); + ipcMain.handle(SET_TAILSCALE_SERVE_ENABLED_CHANNEL, async (_event, rawInput: unknown) => { + if (typeof rawInput !== "object" || rawInput === null) { + throw new Error("Invalid Tailscale Serve input."); + } + const input = rawInput as { + readonly enabled?: unknown; + readonly port?: unknown; + }; + if (typeof input.enabled !== "boolean") { + throw new Error("Invalid Tailscale Serve input."); + } + const nextSettings = setDesktopTailscaleServePreference(desktopSettings, { + enabled: input.enabled, + ...(typeof input.port === "number" ? { port: input.port } : {}), + }); + if (nextSettings === desktopSettings) { + return getDesktopServerExposureState(); + } + return applyDesktopTailscaleServeEnabled(nextSettings); + }); + + ipcMain.removeHandler(GET_ADVERTISED_ENDPOINTS_CHANNEL); + ipcMain.handle(GET_ADVERTISED_ENDPOINTS_CHANNEL, async () => getDesktopAdvertisedEndpoints()); + ipcMain.removeHandler(PICK_FOLDER_CHANNEL); ipcMain.handle(PICK_FOLDER_CHANNEL, async (_event, rawOptions: unknown) => { const owner = BrowserWindow.getFocusedWindow() ?? mainWindow; @@ -1987,7 +2101,10 @@ function createWindow(): BrowserWindow { const externalUrl = getSafeExternalUrl(params.linkURL); if (externalUrl) { menuTemplate.push( - { label: "Copy Link", click: () => clipboard.writeText(params.linkURL) }, + { + label: "Copy Link", + click: () => clipboard.writeText(params.linkURL), + }, { type: "separator" }, ); } @@ -2047,6 +2164,9 @@ function createWindow(): BrowserWindow { } window.on("closed", () => { + desktopSshEnvironmentBridge.cancelPendingPasswordPrompts( + "SSH authentication was cancelled because the app window closed.", + ); if (mainWindow === window) { mainWindow = null; } @@ -2137,6 +2257,7 @@ app.on("before-quit", () => { clearUpdatePollTimer(); cancelBackendReadinessWait(); stopBackend(); + void desktopSshEnvironmentBridge.dispose().catch(() => undefined); restoreStdIoCapture?.(); }); @@ -2186,6 +2307,7 @@ if (process.platform !== "win32") { clearUpdatePollTimer(); cancelBackendReadinessWait(); stopBackend(); + void desktopSshEnvironmentBridge.dispose().catch(() => undefined); restoreStdIoCapture?.(); app.quit(); }); @@ -2196,6 +2318,7 @@ if (process.platform !== "win32") { writeDesktopLogHeader("SIGTERM received"); clearUpdatePollTimer(); stopBackend(); + void desktopSshEnvironmentBridge.dispose().catch(() => undefined); restoreStdIoCapture?.(); app.quit(); }); diff --git a/apps/desktop/src/preload.ts b/apps/desktop/src/preload.ts index 2e345b9e555..91e99ad5294 100644 --- a/apps/desktop/src/preload.ts +++ b/apps/desktop/src/preload.ts @@ -26,8 +26,36 @@ const SET_SAVED_ENVIRONMENT_REGISTRY_CHANNEL = "desktop:set-saved-environment-re const GET_SAVED_ENVIRONMENT_SECRET_CHANNEL = "desktop:get-saved-environment-secret"; const SET_SAVED_ENVIRONMENT_SECRET_CHANNEL = "desktop:set-saved-environment-secret"; const REMOVE_SAVED_ENVIRONMENT_SECRET_CHANNEL = "desktop:remove-saved-environment-secret"; +const DISCOVER_SSH_HOSTS_CHANNEL = "desktop:discover-ssh-hosts"; +const ENSURE_SSH_ENVIRONMENT_CHANNEL = "desktop:ensure-ssh-environment"; +const DISCONNECT_SSH_ENVIRONMENT_CHANNEL = "desktop:disconnect-ssh-environment"; +const FETCH_SSH_ENVIRONMENT_DESCRIPTOR_CHANNEL = "desktop:fetch-ssh-environment-descriptor"; +const BOOTSTRAP_SSH_BEARER_SESSION_CHANNEL = "desktop:bootstrap-ssh-bearer-session"; +const FETCH_SSH_SESSION_STATE_CHANNEL = "desktop:fetch-ssh-session-state"; +const ISSUE_SSH_WEBSOCKET_TOKEN_CHANNEL = "desktop:issue-ssh-websocket-token"; +const SSH_PASSWORD_PROMPT_CHANNEL = "desktop:ssh-password-prompt"; +const RESOLVE_SSH_PASSWORD_PROMPT_CHANNEL = "desktop:resolve-ssh-password-prompt"; const GET_SERVER_EXPOSURE_STATE_CHANNEL = "desktop:get-server-exposure-state"; const SET_SERVER_EXPOSURE_MODE_CHANNEL = "desktop:set-server-exposure-mode"; +const SET_TAILSCALE_SERVE_ENABLED_CHANNEL = "desktop:set-tailscale-serve-enabled"; +const GET_ADVERTISED_ENDPOINTS_CHANNEL = "desktop:get-advertised-endpoints"; +const SSH_PASSWORD_PROMPT_CANCELLED_RESULT = "ssh-password-prompt-cancelled"; + +function unwrapEnsureSshEnvironmentResult(result: unknown) { + if ( + typeof result === "object" && + result !== null && + "type" in result && + result.type === SSH_PASSWORD_PROMPT_CANCELLED_RESULT + ) { + const message = + "message" in result && typeof result.message === "string" + ? result.message + : "SSH authentication cancelled."; + throw new Error(message); + } + return result as Awaited>; +} contextBridge.exposeInMainWorld("desktopBridge", { getAppBranding: () => { @@ -55,8 +83,39 @@ contextBridge.exposeInMainWorld("desktopBridge", { ipcRenderer.invoke(SET_SAVED_ENVIRONMENT_SECRET_CHANNEL, environmentId, secret), removeSavedEnvironmentSecret: (environmentId) => ipcRenderer.invoke(REMOVE_SAVED_ENVIRONMENT_SECRET_CHANNEL, environmentId), + discoverSshHosts: () => ipcRenderer.invoke(DISCOVER_SSH_HOSTS_CHANNEL), + ensureSshEnvironment: async (target, options) => + unwrapEnsureSshEnvironmentResult( + await ipcRenderer.invoke(ENSURE_SSH_ENVIRONMENT_CHANNEL, target, options), + ), + disconnectSshEnvironment: (target) => + ipcRenderer.invoke(DISCONNECT_SSH_ENVIRONMENT_CHANNEL, target), + fetchSshEnvironmentDescriptor: (httpBaseUrl) => + ipcRenderer.invoke(FETCH_SSH_ENVIRONMENT_DESCRIPTOR_CHANNEL, httpBaseUrl), + bootstrapSshBearerSession: (httpBaseUrl, credential) => + ipcRenderer.invoke(BOOTSTRAP_SSH_BEARER_SESSION_CHANNEL, httpBaseUrl, credential), + fetchSshSessionState: (httpBaseUrl, bearerToken) => + ipcRenderer.invoke(FETCH_SSH_SESSION_STATE_CHANNEL, httpBaseUrl, bearerToken), + issueSshWebSocketToken: (httpBaseUrl, bearerToken) => + ipcRenderer.invoke(ISSUE_SSH_WEBSOCKET_TOKEN_CHANNEL, httpBaseUrl, bearerToken), + onSshPasswordPrompt: (listener) => { + const wrappedListener = (_event: Electron.IpcRendererEvent, request: unknown) => { + if (typeof request !== "object" || request === null) return; + listener(request as Parameters[0]); + }; + + ipcRenderer.on(SSH_PASSWORD_PROMPT_CHANNEL, wrappedListener); + return () => { + ipcRenderer.removeListener(SSH_PASSWORD_PROMPT_CHANNEL, wrappedListener); + }; + }, + resolveSshPasswordPrompt: (requestId, password) => + ipcRenderer.invoke(RESOLVE_SSH_PASSWORD_PROMPT_CHANNEL, requestId, password), getServerExposureState: () => ipcRenderer.invoke(GET_SERVER_EXPOSURE_STATE_CHANNEL), setServerExposureMode: (mode) => ipcRenderer.invoke(SET_SERVER_EXPOSURE_MODE_CHANNEL, mode), + setTailscaleServeEnabled: (input) => + ipcRenderer.invoke(SET_TAILSCALE_SERVE_ENABLED_CHANNEL, input), + getAdvertisedEndpoints: () => ipcRenderer.invoke(GET_ADVERTISED_ENDPOINTS_CHANNEL), pickFolder: (options) => ipcRenderer.invoke(PICK_FOLDER_CHANNEL, options), confirm: (message) => ipcRenderer.invoke(CONFIRM_CHANNEL, message), setTheme: (theme) => ipcRenderer.invoke(SET_THEME_CHANNEL, theme), diff --git a/apps/desktop/src/serverExposure.test.ts b/apps/desktop/src/serverExposure.test.ts index c83bbc210e0..4e284ef42bd 100644 --- a/apps/desktop/src/serverExposure.test.ts +++ b/apps/desktop/src/serverExposure.test.ts @@ -1,6 +1,10 @@ import { describe, expect, it } from "vitest"; -import { resolveDesktopServerExposure, resolveLanAdvertisedHost } from "./serverExposure.ts"; +import { + resolveDesktopCoreAdvertisedEndpoints, + resolveDesktopServerExposure, + resolveLanAdvertisedHost, +} from "./serverExposure.ts"; describe("resolveLanAdvertisedHost", () => { it("prefers an explicit host override", () => { @@ -74,6 +78,121 @@ describe("resolveLanAdvertisedHost", () => { }); }); +describe("resolveDesktopCoreAdvertisedEndpoints", () => { + it("advertises loopback and LAN endpoints without provider-specific assumptions", () => { + const exposure = resolveDesktopServerExposure({ + mode: "network-accessible", + port: 3773, + networkInterfaces: { + en0: [ + { + address: "192.168.1.44", + family: "IPv4", + internal: false, + netmask: "255.255.255.0", + cidr: "192.168.1.44/24", + mac: "00:00:00:00:00:00", + }, + ], + }, + }); + + expect( + resolveDesktopCoreAdvertisedEndpoints({ + port: 3773, + exposure, + customHttpsEndpointUrls: [ + "https://desktop.example.ts.net", + "http://desktop.example.test:3773", + "not-a-url", + ], + }), + ).toEqual([ + { + id: "desktop-loopback:3773", + label: "This machine", + provider: { + id: "desktop-core", + label: "Desktop", + kind: "core", + isAddon: false, + }, + httpBaseUrl: "http://127.0.0.1:3773/", + wsBaseUrl: "ws://127.0.0.1:3773/", + reachability: "loopback", + compatibility: { + hostedHttpsApp: "mixed-content-blocked", + desktopApp: "compatible", + }, + source: "desktop-core", + status: "available", + description: "Loopback endpoint for this desktop app.", + }, + { + id: "desktop-lan:http://192.168.1.44:3773", + label: "Local network", + provider: { + id: "desktop-core", + label: "Desktop", + kind: "core", + isAddon: false, + }, + httpBaseUrl: "http://192.168.1.44:3773/", + wsBaseUrl: "ws://192.168.1.44:3773/", + reachability: "lan", + compatibility: { + hostedHttpsApp: "mixed-content-blocked", + desktopApp: "compatible", + }, + source: "desktop-core", + status: "available", + isDefault: true, + description: "Reachable from devices on the same network.", + }, + { + id: "manual:https://desktop.example.ts.net", + label: "Custom HTTPS", + provider: { + id: "manual", + label: "Manual", + kind: "manual", + isAddon: false, + }, + httpBaseUrl: "https://desktop.example.ts.net/", + wsBaseUrl: "wss://desktop.example.ts.net/", + reachability: "public", + compatibility: { + hostedHttpsApp: "compatible", + desktopApp: "compatible", + }, + source: "user", + status: "unknown", + description: "User-configured HTTPS endpoint for this desktop backend.", + }, + { + id: "manual:http://desktop.example.test:3773", + label: "Custom endpoint", + provider: { + id: "manual", + label: "Manual", + kind: "manual", + isAddon: false, + }, + httpBaseUrl: "http://desktop.example.test:3773/", + wsBaseUrl: "ws://desktop.example.test:3773/", + reachability: "public", + compatibility: { + hostedHttpsApp: "mixed-content-blocked", + desktopApp: "compatible", + }, + source: "user", + status: "unknown", + description: "User-configured endpoint for this desktop backend.", + }, + ]); + }); +}); + describe("resolveDesktopServerExposure", () => { it("keeps the desktop server loopback-only when local-only mode is selected", () => { expect( diff --git a/apps/desktop/src/serverExposure.ts b/apps/desktop/src/serverExposure.ts index 65c99b60e13..b73b850ad13 100644 --- a/apps/desktop/src/serverExposure.ts +++ b/apps/desktop/src/serverExposure.ts @@ -1,5 +1,13 @@ import type { NetworkInterfaceInfo } from "node:os"; -import type { DesktopServerExposureMode } from "@t3tools/contracts"; +import { + createAdvertisedEndpoint, + type CreateAdvertisedEndpointInput, +} from "@t3tools/client-runtime"; +import type { + AdvertisedEndpoint, + AdvertisedEndpointProvider, + DesktopServerExposureMode, +} from "@t3tools/contracts"; const DESKTOP_LOOPBACK_HOST = "127.0.0.1"; const DESKTOP_LAN_BIND_HOST = "0.0.0.0"; @@ -13,6 +21,26 @@ export interface DesktopServerExposure { readonly advertisedHost: string | null; } +export interface DesktopAdvertisedEndpointInput { + readonly port: number; + readonly exposure: DesktopServerExposure; + readonly customHttpsEndpointUrls?: readonly string[]; +} + +const DESKTOP_CORE_ENDPOINT_PROVIDER: AdvertisedEndpointProvider = { + id: "desktop-core", + label: "Desktop", + kind: "core", + isAddon: false, +}; + +const DESKTOP_MANUAL_ENDPOINT_PROVIDER: AdvertisedEndpointProvider = { + id: "manual", + label: "Manual", + kind: "manual", + isAddon: false, +}; + const normalizeOptionalHost = (value: string | undefined): string | undefined => { const normalized = value?.trim(); return normalized && normalized.length > 0 ? normalized : undefined; @@ -21,6 +49,14 @@ const normalizeOptionalHost = (value: string | undefined): string | undefined => const isUsableLanIpv4Address = (address: string): boolean => !address.startsWith("127.") && !address.startsWith("169.254."); +function isHttpsEndpointUrl(value: string): boolean { + try { + return new URL(value).protocol === "https:"; + } catch { + return false; + } +} + export function resolveLanAdvertisedHost( networkInterfaces: NodeJS.Dict, explicitHost: string | undefined, @@ -78,3 +114,75 @@ export function resolveDesktopServerExposure(input: { advertisedHost, }; } + +function createDesktopEndpoint( + input: Omit, +): AdvertisedEndpoint { + return createAdvertisedEndpoint({ + ...input, + provider: DESKTOP_CORE_ENDPOINT_PROVIDER, + source: "desktop-core", + }); +} + +function createManualEndpoint( + input: Omit, +): AdvertisedEndpoint { + return createAdvertisedEndpoint({ + ...input, + provider: DESKTOP_MANUAL_ENDPOINT_PROVIDER, + source: "user", + }); +} + +export function resolveDesktopCoreAdvertisedEndpoints( + input: DesktopAdvertisedEndpointInput, +): readonly AdvertisedEndpoint[] { + const endpoints: AdvertisedEndpoint[] = [ + createDesktopEndpoint({ + id: `desktop-loopback:${input.port}`, + label: "This machine", + httpBaseUrl: input.exposure.localHttpUrl, + reachability: "loopback", + status: "available", + description: "Loopback endpoint for this desktop app.", + }), + ]; + + if (input.exposure.endpointUrl) { + endpoints.push( + createDesktopEndpoint({ + id: `desktop-lan:${input.exposure.endpointUrl}`, + label: "Local network", + httpBaseUrl: input.exposure.endpointUrl, + reachability: "lan", + status: "available", + isDefault: true, + description: "Reachable from devices on the same network.", + }), + ); + } + + for (const customEndpointUrl of input.customHttpsEndpointUrls ?? []) { + try { + const isHttpsEndpoint = isHttpsEndpointUrl(customEndpointUrl); + endpoints.push( + createManualEndpoint({ + id: `manual:${customEndpointUrl}`, + label: isHttpsEndpoint ? "Custom HTTPS" : "Custom endpoint", + httpBaseUrl: customEndpointUrl, + reachability: "public", + ...(isHttpsEndpoint ? ({ hostedHttpsCompatibility: "compatible" } as const) : {}), + status: "unknown", + description: isHttpsEndpoint + ? "User-configured HTTPS endpoint for this desktop backend." + : "User-configured endpoint for this desktop backend.", + }), + ); + } catch { + // Ignore malformed user-configured endpoints without dropping valid endpoints. + } + } + + return endpoints; +} diff --git a/apps/desktop/src/sshEnvironment.test.ts b/apps/desktop/src/sshEnvironment.test.ts new file mode 100644 index 00000000000..d22e09957d1 --- /dev/null +++ b/apps/desktop/src/sshEnvironment.test.ts @@ -0,0 +1,98 @@ +import * as FS from "node:fs"; +import * as OS from "node:os"; +import * as Path from "node:path"; + +import { afterEach, describe, expect, it } from "vitest"; + +import { SshPasswordPromptError } from "@t3tools/ssh/errors"; + +import { discoverDesktopSshHosts, isSshPasswordPromptCancellation } from "./sshEnvironment.ts"; + +const tempDirectories: string[] = []; + +afterEach(() => { + for (const directory of tempDirectories.splice(0)) { + FS.rmSync(directory, { recursive: true, force: true }); + } +}); + +function makeTempHomeDir(): string { + const directory = FS.mkdtempSync(Path.join(OS.tmpdir(), "t3-ssh-env-test-")); + tempDirectories.push(directory); + return directory; +} + +describe("sshEnvironment", () => { + it("treats password prompt timeouts as cancellable authentication prompts", () => { + expect( + isSshPasswordPromptCancellation( + new SshPasswordPromptError({ + message: "SSH authentication timed out for devbox.", + }), + ), + ).toBe(true); + }); + + it("wires desktop host discovery through the ssh package runtime", async () => { + const homeDir = makeTempHomeDir(); + const sshDir = Path.join(homeDir, ".ssh"); + FS.mkdirSync(Path.join(sshDir, "config.d"), { recursive: true }); + FS.writeFileSync( + Path.join(sshDir, "config"), + ["Host devbox", " HostName devbox.example.com", "Include config.d/*.conf", ""].join("\n"), + "utf8", + ); + FS.writeFileSync( + Path.join(sshDir, "config.d", "team.conf"), + [ + "Host staging", + " HostName staging.example.com", + "Host *", + " ServerAliveInterval 30", + "", + ].join("\n"), + "utf8", + ); + FS.writeFileSync( + Path.join(sshDir, "known_hosts"), + [ + "known.example.com ssh-ed25519 AAAA", + "|1|hashed|entry ssh-ed25519 AAAA", + "[bastion.example.com]:2222 ssh-ed25519 AAAA", + "", + ].join("\n"), + "utf8", + ); + + await expect(discoverDesktopSshHosts({ homeDir })).resolves.toEqual([ + { + alias: "bastion.example.com", + hostname: "bastion.example.com", + username: null, + port: null, + source: "known-hosts", + }, + { + alias: "devbox", + hostname: "devbox", + username: null, + port: null, + source: "ssh-config", + }, + { + alias: "known.example.com", + hostname: "known.example.com", + username: null, + port: null, + source: "known-hosts", + }, + { + alias: "staging", + hostname: "staging", + username: null, + port: null, + source: "ssh-config", + }, + ]); + }); +}); diff --git a/apps/desktop/src/sshEnvironment.ts b/apps/desktop/src/sshEnvironment.ts new file mode 100644 index 00000000000..e847e07d498 --- /dev/null +++ b/apps/desktop/src/sshEnvironment.ts @@ -0,0 +1,420 @@ +import * as Crypto from "node:crypto"; + +import * as NodeHttpClient from "@effect/platform-node/NodeHttpClient"; +import * as NodeServices from "@effect/platform-node/NodeServices"; +import { NetService } from "@t3tools/shared/Net"; +import type { + AuthBearerBootstrapResult, + AuthSessionState, + AuthWebSocketTokenResult, + DesktopDiscoveredSshHost, + DesktopSshEnvironmentTarget, + DesktopSshPasswordPromptRequest, + ExecutionEnvironmentDescriptor, +} from "@t3tools/contracts"; +import { + SshPasswordPrompt, + type SshPasswordPromptShape, + type SshPasswordRequest, +} from "@t3tools/ssh/auth"; +import { discoverSshHosts } from "@t3tools/ssh/config"; +import { SshPasswordPromptError } from "@t3tools/ssh/errors"; +import { + fetchLoopbackSshJson, + SshEnvironmentManager, + type RemoteT3RunnerOptions, +} from "@t3tools/ssh/tunnel"; +import { Effect, Exit, Layer, ManagedRuntime, Scope } from "effect"; + +export { resolveRemoteT3CliPackageSpec } from "@t3tools/ssh/command"; + +const DISCOVER_SSH_HOSTS_CHANNEL = "desktop:discover-ssh-hosts"; +const ENSURE_SSH_ENVIRONMENT_CHANNEL = "desktop:ensure-ssh-environment"; +const DISCONNECT_SSH_ENVIRONMENT_CHANNEL = "desktop:disconnect-ssh-environment"; +const FETCH_SSH_ENVIRONMENT_DESCRIPTOR_CHANNEL = "desktop:fetch-ssh-environment-descriptor"; +const BOOTSTRAP_SSH_BEARER_SESSION_CHANNEL = "desktop:bootstrap-ssh-bearer-session"; +const FETCH_SSH_SESSION_STATE_CHANNEL = "desktop:fetch-ssh-session-state"; +const ISSUE_SSH_WEBSOCKET_TOKEN_CHANNEL = "desktop:issue-ssh-websocket-token"; +const SSH_PASSWORD_PROMPT_CHANNEL = "desktop:ssh-password-prompt"; +const RESOLVE_SSH_PASSWORD_PROMPT_CHANNEL = "desktop:resolve-ssh-password-prompt"; +const DEFAULT_SSH_PASSWORD_PROMPT_TIMEOUT_MS = 3 * 60 * 1000; +const SSH_PASSWORD_PROMPT_CANCELLED_RESULT = "ssh-password-prompt-cancelled"; + +interface DesktopSshEnvironmentManagerOptions { + readonly passwordProvider?: (request: SshPasswordRequest) => Promise; + readonly resolveCliPackageSpec?: () => string; + readonly resolveCliRunner?: () => RemoteT3RunnerOptions; +} + +const sshRuntime = ManagedRuntime.make( + Layer.mergeAll(NodeServices.layer, NodeHttpClient.layerUndici, NetService.layer), +); + +function createDesktopSshRuntime( + passwordPrompt: SshPasswordPromptShape, + scope: Scope.Scope, + options: DesktopSshEnvironmentManagerOptions, +) { + return ManagedRuntime.make( + Layer.mergeAll( + NodeServices.layer, + NodeHttpClient.layerUndici, + NetService.layer, + Layer.succeed(Scope.Scope, scope), + Layer.succeed(SshPasswordPrompt, SshPasswordPrompt.of(passwordPrompt)), + SshEnvironmentManager.layer({ + ...(options.resolveCliPackageSpec === undefined + ? {} + : { resolveCliPackageSpec: options.resolveCliPackageSpec }), + ...(options.resolveCliRunner === undefined + ? {} + : { resolveCliRunner: options.resolveCliRunner }), + }), + ), + ); +} + +export async function discoverDesktopSshHosts(input?: { + readonly homeDir?: string; +}): Promise { + return await sshRuntime.runPromise(discoverSshHosts(input ?? {})); +} + +export class DesktopSshEnvironmentManager { + private readonly runtime: ReturnType; + private readonly scope: Scope.Scope; + + constructor(options: DesktopSshEnvironmentManagerOptions = {}) { + const passwordPrompt: SshPasswordPromptShape = { + isAvailable: options.passwordProvider !== undefined, + request: (request) => { + const passwordProvider = options.passwordProvider; + if (!passwordProvider) { + return Effect.succeed(null); + } + + return Effect.tryPromise({ + try: () => passwordProvider(request), + catch: (cause) => + new SshPasswordPromptError({ + message: cause instanceof Error ? cause.message : "SSH password prompt failed.", + cause, + }), + }); + }, + }; + this.scope = Effect.runSync(Scope.make()); + this.runtime = createDesktopSshRuntime(passwordPrompt, this.scope, options); + } + + async discoverHosts(): Promise { + return await discoverDesktopSshHosts(); + } + + async ensureEnvironment( + target: DesktopSshEnvironmentTarget, + options?: { readonly issuePairingToken?: boolean }, + ) { + return await this.runtime.runPromise( + Effect.service(SshEnvironmentManager).pipe( + Effect.flatMap((manager) => manager.ensureEnvironment(target, options)), + ), + ); + } + + async disconnectEnvironment(target: DesktopSshEnvironmentTarget): Promise { + await this.runtime.runPromise( + Effect.service(SshEnvironmentManager).pipe( + Effect.flatMap((manager) => manager.disconnectEnvironment(target)), + ), + ); + } + + async dispose(): Promise { + await this.runtime.runPromise(Scope.close(this.scope, Exit.void)); + await this.runtime.dispose(); + } +} + +function getSafeDesktopSshTarget(rawTarget: unknown): DesktopSshEnvironmentTarget | null { + if (typeof rawTarget !== "object" || rawTarget === null) { + return null; + } + + const target = rawTarget as Partial; + if (typeof target.alias !== "string" || typeof target.hostname !== "string") { + return null; + } + if ( + target.username !== null && + target.username !== undefined && + typeof target.username !== "string" + ) { + return null; + } + if (target.port !== null && target.port !== undefined && !Number.isInteger(target.port)) { + return null; + } + + const alias = target.alias.trim(); + const hostname = target.hostname.trim(); + if (alias.length === 0 || hostname.length === 0) { + return null; + } + + return { + alias, + hostname, + username: target.username?.trim() || null, + port: target.port ?? null, + }; +} + +/** Minimal subset of Electron's BrowserWindow used by the SSH bridge. */ +export interface DesktopSshBridgeWindow { + isDestroyed(): boolean; + isMinimized(): boolean; + restore(): void; + focus(): void; + readonly webContents: { + send(channel: string, ...args: readonly unknown[]): void; + }; +} + +/** Minimal subset of Electron's ipcMain used by the SSH bridge. */ +export interface DesktopSshBridgeIpcMain { + removeHandler(channel: string): void; + handle( + channel: string, + listener: (event: unknown, ...args: readonly unknown[]) => unknown | Promise, + ): void; +} + +export interface DesktopSshEnvironmentBridgeOptions { + readonly getMainWindow: () => DesktopSshBridgeWindow | null; + readonly resolveCliPackageSpec?: () => string; + readonly resolveCliRunner?: () => RemoteT3RunnerOptions; + readonly passwordPromptTimeoutMs?: number; +} + +interface PendingSshPasswordPrompt { + readonly resolve: (password: string | null) => void; + readonly reject: (error: Error) => void; + readonly timeout: ReturnType; +} + +export function isSshPasswordPromptCancellation(error: unknown): error is SshPasswordPromptError { + const message = error instanceof SshPasswordPromptError ? error.message.toLowerCase() : ""; + return ( + error instanceof SshPasswordPromptError && + (message.includes("cancelled") || message.includes("timed out")) + ); +} + +/** + * Wires the SSH environment manager to Electron IPC, owning the renderer-facing + * password prompt state so `main.ts` only needs to register, cancel, and dispose. + */ +export class DesktopSshEnvironmentBridge { + private readonly options: DesktopSshEnvironmentBridgeOptions; + private readonly manager: DesktopSshEnvironmentManager; + private readonly pendingPrompts = new Map(); + private readonly passwordPromptTimeoutMs: number; + + constructor(options: DesktopSshEnvironmentBridgeOptions) { + this.options = options; + this.passwordPromptTimeoutMs = + options.passwordPromptTimeoutMs ?? DEFAULT_SSH_PASSWORD_PROMPT_TIMEOUT_MS; + this.manager = new DesktopSshEnvironmentManager({ + passwordProvider: (request) => this.requestPasswordFromRenderer(request), + ...(options.resolveCliPackageSpec === undefined + ? {} + : { resolveCliPackageSpec: options.resolveCliPackageSpec }), + ...(options.resolveCliRunner === undefined + ? {} + : { resolveCliRunner: options.resolveCliRunner }), + }); + } + + registerIpcHandlers(ipcMain: DesktopSshBridgeIpcMain): void { + ipcMain.removeHandler(DISCOVER_SSH_HOSTS_CHANNEL); + ipcMain.handle(DISCOVER_SSH_HOSTS_CHANNEL, async () => this.manager.discoverHosts()); + + ipcMain.removeHandler(ENSURE_SSH_ENVIRONMENT_CHANNEL); + ipcMain.handle(ENSURE_SSH_ENVIRONMENT_CHANNEL, async (_event, rawTarget, rawOptions) => { + const target = getSafeDesktopSshTarget(rawTarget); + if (!target) { + throw new Error("Invalid desktop SSH target."); + } + + const issuePairingToken = + typeof rawOptions === "object" && + rawOptions !== null && + "issuePairingToken" in rawOptions && + (rawOptions as { issuePairingToken?: unknown }).issuePairingToken === true; + + try { + return await this.manager.ensureEnvironment(target, { + issuePairingToken, + }); + } catch (error) { + if (isSshPasswordPromptCancellation(error)) { + return { + type: SSH_PASSWORD_PROMPT_CANCELLED_RESULT, + message: error.message, + }; + } + throw error; + } + }); + + ipcMain.removeHandler(DISCONNECT_SSH_ENVIRONMENT_CHANNEL); + ipcMain.handle(DISCONNECT_SSH_ENVIRONMENT_CHANNEL, async (_event, rawTarget) => { + const target = getSafeDesktopSshTarget(rawTarget); + if (!target) { + throw new Error("Invalid desktop SSH target."); + } + + await this.manager.disconnectEnvironment(target); + }); + + ipcMain.removeHandler(FETCH_SSH_ENVIRONMENT_DESCRIPTOR_CHANNEL); + ipcMain.handle(FETCH_SSH_ENVIRONMENT_DESCRIPTOR_CHANNEL, async (_event, rawHttpBaseUrl) => + sshRuntime.runPromise( + fetchLoopbackSshJson({ + httpBaseUrl: rawHttpBaseUrl, + pathname: "/.well-known/t3/environment", + }), + ), + ); + + ipcMain.removeHandler(BOOTSTRAP_SSH_BEARER_SESSION_CHANNEL); + ipcMain.handle( + BOOTSTRAP_SSH_BEARER_SESSION_CHANNEL, + async (_event, rawHttpBaseUrl, rawCredential) => + sshRuntime.runPromise( + fetchLoopbackSshJson({ + httpBaseUrl: rawHttpBaseUrl, + pathname: "/api/auth/bootstrap/bearer", + method: "POST", + body: { credential: rawCredential }, + }), + ), + ); + + ipcMain.removeHandler(FETCH_SSH_SESSION_STATE_CHANNEL); + ipcMain.handle( + FETCH_SSH_SESSION_STATE_CHANNEL, + async (_event, rawHttpBaseUrl, rawBearerToken) => + sshRuntime.runPromise( + fetchLoopbackSshJson({ + httpBaseUrl: rawHttpBaseUrl, + pathname: "/api/auth/session", + bearerToken: rawBearerToken, + }), + ), + ); + + ipcMain.removeHandler(ISSUE_SSH_WEBSOCKET_TOKEN_CHANNEL); + ipcMain.handle( + ISSUE_SSH_WEBSOCKET_TOKEN_CHANNEL, + async (_event, rawHttpBaseUrl, rawBearerToken) => + sshRuntime.runPromise( + fetchLoopbackSshJson({ + httpBaseUrl: rawHttpBaseUrl, + pathname: "/api/auth/ws-token", + method: "POST", + bearerToken: rawBearerToken, + }), + ), + ); + + ipcMain.removeHandler(RESOLVE_SSH_PASSWORD_PROMPT_CHANNEL); + ipcMain.handle( + RESOLVE_SSH_PASSWORD_PROMPT_CHANNEL, + async (_event, rawRequestId, rawPassword) => { + if (typeof rawRequestId !== "string" || rawRequestId.trim().length === 0) { + throw new Error("Invalid SSH password prompt id."); + } + if (rawPassword !== null && typeof rawPassword !== "string") { + throw new Error("Invalid SSH password prompt response."); + } + + const pending = this.pendingPrompts.get(rawRequestId); + if (!pending) { + throw new Error("SSH password prompt expired. Try connecting again."); + } + + clearTimeout(pending.timeout); + this.pendingPrompts.delete(rawRequestId); + pending.resolve(rawPassword); + }, + ); + } + + cancelPendingPasswordPrompts(reason: string): void { + for (const [requestId, pending] of this.pendingPrompts) { + clearTimeout(pending.timeout); + this.pendingPrompts.delete(requestId); + pending.reject(new Error(reason)); + } + } + + async dispose(): Promise { + this.cancelPendingPasswordPrompts("SSH environment bridge disposed."); + await this.manager.dispose(); + } + + private async requestPasswordFromRenderer(input: SshPasswordRequest): Promise { + const window = this.options.getMainWindow(); + if (!window || window.isDestroyed()) { + throw new Error("T3 Code window is not available for SSH authentication."); + } + + const request: DesktopSshPasswordPromptRequest = { + requestId: Crypto.randomUUID(), + destination: input.destination, + username: input.username, + prompt: input.prompt, + expiresAt: new Date(Date.now() + this.passwordPromptTimeoutMs).toISOString(), + }; + + return await new Promise((resolve, reject) => { + const rejectPrompt = (error: Error) => { + clearTimeout(timeout); + this.pendingPrompts.delete(request.requestId); + reject(error); + }; + const timeout = setTimeout(() => { + this.pendingPrompts.delete(request.requestId); + reject(new Error(`SSH authentication timed out for ${input.destination}.`)); + }, this.passwordPromptTimeoutMs); + timeout.unref(); + + this.pendingPrompts.set(request.requestId, { resolve, reject, timeout }); + + try { + if (window.isDestroyed()) { + throw new Error("T3 Code window is not available for SSH authentication."); + } + window.webContents.send(SSH_PASSWORD_PROMPT_CHANNEL, request); + if (window.isDestroyed()) { + throw new Error("T3 Code window is not available for SSH authentication."); + } + if (window.isMinimized()) { + window.restore(); + } + if (window.isDestroyed()) { + throw new Error("T3 Code window is not available for SSH authentication."); + } + window.focus(); + } catch (error) { + rejectPrompt( + error instanceof Error + ? error + : new Error("T3 Code window is not available for SSH authentication."), + ); + } + }); + } +} diff --git a/apps/desktop/src/tailscaleEndpointProvider.test.ts b/apps/desktop/src/tailscaleEndpointProvider.test.ts new file mode 100644 index 00000000000..2e92b7ee5d3 --- /dev/null +++ b/apps/desktop/src/tailscaleEndpointProvider.test.ts @@ -0,0 +1,122 @@ +import { describe, expect, it } from "vitest"; +import { Effect } from "effect"; + +import { + isTailscaleIpv4Address, + parseTailscaleMagicDnsName, + resolveTailscaleAdvertisedEndpoints, +} from "./tailscaleEndpointProvider.ts"; + +describe("tailscale endpoint provider", () => { + it("detects Tailnet IPv4 addresses", () => { + expect(isTailscaleIpv4Address("100.64.0.1")).toBe(true); + expect(isTailscaleIpv4Address("100.127.255.254")).toBe(true); + expect(isTailscaleIpv4Address("100.128.0.1")).toBe(false); + expect(isTailscaleIpv4Address("192.168.1.44")).toBe(false); + }); + + it("parses MagicDNS names from tailscale status", async () => { + expect( + Effect.runSync( + parseTailscaleMagicDnsName(JSON.stringify({ Self: { DNSName: "desktop.tail.ts.net." } })), + ), + ).toBe("desktop.tail.ts.net"); + expect(Effect.runSync(parseTailscaleMagicDnsName("{}"))).toBeNull(); + await expect(Effect.runPromise(parseTailscaleMagicDnsName("not-json"))).rejects.toBeDefined(); + }); + + it("resolves Tailscale endpoints as add-on advertised endpoints", async () => { + await expect( + resolveTailscaleAdvertisedEndpoints({ + port: 3773, + networkInterfaces: { + tailscale0: [ + { + address: "100.100.100.100", + family: "IPv4", + internal: false, + netmask: "255.192.0.0", + cidr: "100.100.100.100/10", + mac: "00:00:00:00:00:00", + }, + ], + }, + statusJson: JSON.stringify({ Self: { DNSName: "desktop.tail.ts.net." } }), + }), + ).resolves.toEqual([ + { + id: "tailscale-ip:http://100.100.100.100:3773", + label: "Tailscale IP", + provider: { + id: "tailscale", + label: "Tailscale", + kind: "private-network", + isAddon: true, + }, + httpBaseUrl: "http://100.100.100.100:3773/", + wsBaseUrl: "ws://100.100.100.100:3773/", + reachability: "private-network", + compatibility: { + hostedHttpsApp: "mixed-content-blocked", + desktopApp: "compatible", + }, + source: "desktop-addon", + status: "available", + description: "Reachable from devices on the same Tailnet.", + }, + { + id: "tailscale-magicdns:https://desktop.tail.ts.net/", + label: "Tailscale HTTPS", + provider: { + id: "tailscale", + label: "Tailscale", + kind: "private-network", + isAddon: true, + }, + httpBaseUrl: "https://desktop.tail.ts.net/", + wsBaseUrl: "wss://desktop.tail.ts.net/", + reachability: "private-network", + compatibility: { + hostedHttpsApp: "requires-configuration", + desktopApp: "compatible", + }, + source: "desktop-addon", + status: "unavailable", + description: "MagicDNS hostname. Configure Tailscale Serve for HTTPS access.", + }, + ]); + }); + + it("marks the Tailscale HTTPS endpoint available after Serve is enabled and reachable", async () => { + await expect( + resolveTailscaleAdvertisedEndpoints({ + port: 3773, + networkInterfaces: {}, + statusJson: JSON.stringify({ Self: { DNSName: "desktop.tail.ts.net." } }), + serveEnabled: true, + probe: async () => true, + }), + ).resolves.toEqual([ + { + id: "tailscale-magicdns:https://desktop.tail.ts.net/", + label: "Tailscale HTTPS", + provider: { + id: "tailscale", + label: "Tailscale", + kind: "private-network", + isAddon: true, + }, + httpBaseUrl: "https://desktop.tail.ts.net/", + wsBaseUrl: "wss://desktop.tail.ts.net/", + reachability: "private-network", + compatibility: { + hostedHttpsApp: "compatible", + desktopApp: "compatible", + }, + source: "desktop-addon", + status: "available", + description: "HTTPS endpoint served by Tailscale Serve.", + }, + ]); + }); +}); diff --git a/apps/desktop/src/tailscaleEndpointProvider.ts b/apps/desktop/src/tailscaleEndpointProvider.ts new file mode 100644 index 00000000000..053eac5d442 --- /dev/null +++ b/apps/desktop/src/tailscaleEndpointProvider.ts @@ -0,0 +1,142 @@ +import type { NetworkInterfaceInfo } from "node:os"; + +import * as NodeHttpClient from "@effect/platform-node/NodeHttpClient"; +import * as NodeServices from "@effect/platform-node/NodeServices"; +import { + createAdvertisedEndpoint, + type CreateAdvertisedEndpointInput, +} from "@t3tools/client-runtime"; +import type { AdvertisedEndpoint, AdvertisedEndpointProvider } from "@t3tools/contracts"; +import { + buildTailscaleHttpsBaseUrl, + isTailscaleIpv4Address, + parseTailscaleMagicDnsName, + probeTailscaleHttpsEndpoint, + readTailscaleStatus, +} from "@t3tools/tailscale"; +import { Effect, Layer } from "effect"; + +export { isTailscaleIpv4Address, parseTailscaleMagicDnsName } from "@t3tools/tailscale"; + +const TailscaleDesktopLayer = Layer.mergeAll(NodeServices.layer, NodeHttpClient.layerUndici); + +const TAILSCALE_ENDPOINT_PROVIDER: AdvertisedEndpointProvider = { + id: "tailscale", + label: "Tailscale", + kind: "private-network", + isAddon: true, +}; + +function createTailscaleEndpoint( + input: Omit, +): AdvertisedEndpoint { + return createAdvertisedEndpoint({ + ...input, + provider: TAILSCALE_ENDPOINT_PROVIDER, + source: "desktop-addon", + }); +} + +export function resolveTailscaleIpAdvertisedEndpoints(input: { + readonly port: number; + readonly networkInterfaces: NodeJS.Dict; +}): readonly AdvertisedEndpoint[] { + const seen = new Set(); + const endpoints: AdvertisedEndpoint[] = []; + + for (const interfaceAddresses of Object.values(input.networkInterfaces)) { + if (!interfaceAddresses) continue; + + for (const address of interfaceAddresses) { + if (address.internal) continue; + if (address.family !== "IPv4") continue; + if (!isTailscaleIpv4Address(address.address)) continue; + if (seen.has(address.address)) continue; + seen.add(address.address); + + endpoints.push( + createTailscaleEndpoint({ + id: `tailscale-ip:http://${address.address}:${input.port}`, + label: "Tailscale IP", + httpBaseUrl: `http://${address.address}:${input.port}`, + reachability: "private-network", + status: "available", + description: "Reachable from devices on the same Tailnet.", + }), + ); + } + } + + return endpoints; +} + +export async function resolveTailscaleMagicDnsAdvertisedEndpoint(input: { + readonly dnsName: string | null; + readonly serveEnabled: boolean; + readonly servePort?: number; + readonly probe?: (baseUrl: string) => Promise; +}): Promise { + if (!input.dnsName) { + return null; + } + + const httpBaseUrl = buildTailscaleHttpsBaseUrl({ + magicDnsName: input.dnsName, + ...(input.servePort === undefined ? {} : { servePort: input.servePort }), + }); + const isReachable = input.serveEnabled + ? await (input.probe?.(httpBaseUrl) ?? + Effect.runPromise( + probeTailscaleHttpsEndpoint({ baseUrl: httpBaseUrl }).pipe( + Effect.provide(TailscaleDesktopLayer), + ), + )) + : false; + + return createTailscaleEndpoint({ + id: `tailscale-magicdns:${httpBaseUrl}`, + label: "Tailscale HTTPS", + httpBaseUrl, + reachability: "private-network", + hostedHttpsCompatibility: isReachable ? "compatible" : "requires-configuration", + status: isReachable ? "available" : "unavailable", + description: isReachable + ? "HTTPS endpoint served by Tailscale Serve." + : "MagicDNS hostname. Configure Tailscale Serve for HTTPS access.", + }); +} + +export async function resolveTailscaleAdvertisedEndpoints(input: { + readonly port: number; + readonly serveEnabled?: boolean; + readonly servePort?: number; + readonly networkInterfaces: NodeJS.Dict; + readonly statusJson?: string | null; + readonly probe?: (baseUrl: string) => Promise; +}): Promise { + const ipEndpoints = resolveTailscaleIpAdvertisedEndpoints(input); + const dnsName = + input.statusJson === undefined + ? await Effect.runPromise( + readTailscaleStatus.pipe( + Effect.map((status) => status.magicDnsName), + Effect.catch(() => Effect.succeed(null)), + Effect.provide(TailscaleDesktopLayer), + ), + ) + : input.statusJson + ? await Effect.runPromise( + parseTailscaleMagicDnsName(input.statusJson).pipe( + Effect.catch(() => Effect.succeed(null)), + ), + ) + : null; + const magicDnsEndpoint = await resolveTailscaleMagicDnsAdvertisedEndpoint({ + dnsName, + serveEnabled: input.serveEnabled === true, + ...(input.servePort === undefined ? {} : { servePort: input.servePort }), + ...(input.probe === undefined ? {} : { probe: input.probe }), + }); + + return magicDnsEndpoint ? [...ipEndpoints, magicDnsEndpoint] : ipEndpoints; +} diff --git a/apps/server/integration/OrchestrationEngineHarness.integration.ts b/apps/server/integration/OrchestrationEngineHarness.integration.ts index 8f59fd3f46c..24c17561195 100644 --- a/apps/server/integration/OrchestrationEngineHarness.integration.ts +++ b/apps/server/integration/OrchestrationEngineHarness.integration.ts @@ -3,7 +3,8 @@ import { execFileSync } from "node:child_process"; import * as NodeServices from "@effect/platform-node/NodeServices"; import { ApprovalRequestId, - ProviderKind, + CodexSettings, + ProviderDriverKind, type OrchestrationEvent, type OrchestrationThread, } from "@t3tools/contracts"; @@ -24,10 +25,7 @@ import { import { CheckpointStoreLive } from "../src/checkpointing/Layers/CheckpointStore.ts"; import { CheckpointStore } from "../src/checkpointing/Services/CheckpointStore.ts"; -import { GitCoreLive } from "../src/git/Layers/GitCore.ts"; -import { GitCore, type GitCoreShape } from "../src/git/Services/GitCore.ts"; -import { GitStatusBroadcaster } from "../src/git/Services/GitStatusBroadcaster.ts"; -import { TextGeneration, type TextGenerationShape } from "../src/git/Services/TextGeneration.ts"; +import { TextGeneration, type TextGenerationShape } from "../src/textGeneration/TextGeneration.ts"; import { OrchestrationCommandReceiptRepositoryLive } from "../src/persistence/Layers/OrchestrationCommandReceipts.ts"; import { OrchestrationEventStoreLive } from "../src/persistence/Layers/OrchestrationEventStore.ts"; import { ProjectionCheckpointRepositoryLive } from "../src/persistence/Layers/ProjectionCheckpoints.ts"; @@ -36,13 +34,16 @@ import { ProviderSessionRuntimeRepositoryLive } from "../src/persistence/Layers/ import { makeSqlitePersistenceLive } from "../src/persistence/Layers/Sqlite.ts"; import { ProjectionCheckpointRepository } from "../src/persistence/Services/ProjectionCheckpoints.ts"; import { ProjectionPendingApprovalRepository } from "../src/persistence/Services/ProjectionPendingApprovals.ts"; -import { ProviderUnsupportedError } from "../src/provider/Errors.ts"; +import { makeAdapterRegistryMock } from "../src/provider/testUtils/providerAdapterRegistryMock.ts"; import { ProviderAdapterRegistry } from "../src/provider/Services/ProviderAdapterRegistry.ts"; import { ProviderSessionDirectoryLive } from "../src/provider/Layers/ProviderSessionDirectory.ts"; import { ServerSettingsService } from "../src/serverSettings.ts"; import { makeProviderServiceLive } from "../src/provider/Layers/ProviderService.ts"; -import { makeCodexAdapterLive } from "../src/provider/Layers/CodexAdapter.ts"; -import { CodexAdapter } from "../src/provider/Services/CodexAdapter.ts"; +import { makeCodexAdapter } from "../src/provider/Layers/CodexAdapter.ts"; +import { + NoOpProviderEventLoggers, + ProviderEventLoggers, +} from "../src/provider/Layers/ProviderEventLoggers.ts"; import { ProviderService } from "../src/provider/Services/ProviderService.ts"; import { AnalyticsService } from "../src/telemetry/Services/AnalyticsService.ts"; import { CheckpointReactorLive } from "../src/orchestration/Layers/CheckpointReactor.ts"; @@ -73,6 +74,11 @@ import { import { deriveServerPaths, ServerConfig } from "../src/config.ts"; import { WorkspaceEntriesLive } from "../src/workspace/Layers/WorkspaceEntries.ts"; import { WorkspacePathsLive } from "../src/workspace/Layers/WorkspacePaths.ts"; +import * as GitVcsDriver from "../src/vcs/GitVcsDriver.ts"; +import * as VcsDriverRegistry from "../src/vcs/VcsDriverRegistry.ts"; +import { VcsStatusBroadcaster } from "../src/vcs/VcsStatusBroadcaster.ts"; +import { GitWorkflowService } from "../src/git/GitWorkflowService.ts"; +import * as VcsProcess from "../src/vcs/VcsProcess.ts"; function runGit(cwd: string, args: ReadonlyArray) { return execFileSync("git", args, { @@ -214,7 +220,7 @@ export interface OrchestrationIntegrationHarness { } interface MakeOrchestrationIntegrationHarnessOptions { - readonly provider?: ProviderKind; + readonly provider?: ProviderDriverKind; readonly realCodex?: boolean; } @@ -225,7 +231,7 @@ export const makeOrchestrationIntegrationHarness = ( const path = yield* Path.Path; const fileSystem = yield* FileSystem.FileSystem; - const provider = options?.provider ?? "codex"; + const provider = options?.provider ?? ProviderDriverKind.make("codex"); const useRealCodex = options?.realCodex === true; const adapterHarness = useRealCodex ? null @@ -233,13 +239,10 @@ export const makeOrchestrationIntegrationHarness = ( provider, }); const fakeRegistry = adapterHarness - ? Layer.succeed(ProviderAdapterRegistry, { - getByProvider: (resolvedProvider) => - resolvedProvider === adapterHarness.provider - ? Effect.succeed(adapterHarness.adapter) - : Effect.fail(new ProviderUnsupportedError({ provider: resolvedProvider })), - listProviders: () => Effect.succeed([adapterHarness.provider]), - } as typeof ProviderAdapterRegistry.Service) + ? Layer.succeed( + ProviderAdapterRegistry, + makeAdapterRegistryMock({ [adapterHarness.provider]: adapterHarness.adapter }), + ) : null; const rootDir = yield* fileSystem.makeTempDirectoryScoped({ prefix: "t3-orchestration-integration-", @@ -264,34 +267,33 @@ export const makeOrchestrationIntegrationHarness = ( const realCodexRegistry = Layer.effect( ProviderAdapterRegistry, Effect.gen(function* () { - const codexAdapter = yield* CodexAdapter; - return { - getByProvider: (resolvedProvider) => - resolvedProvider === "codex" - ? Effect.succeed(codexAdapter) - : Effect.fail(new ProviderUnsupportedError({ provider: resolvedProvider })), - listProviders: () => Effect.succeed(["codex"] as const), - } as typeof ProviderAdapterRegistry.Service; + const codexSettings = Schema.decodeSync(CodexSettings)({}); + const codexAdapter = yield* makeCodexAdapter(codexSettings); + return makeAdapterRegistryMock({ + [ProviderDriverKind.make("codex")]: codexAdapter, + }); }), ).pipe( - Layer.provide(makeCodexAdapterLive()), Layer.provideMerge(ServerConfig.layerTest(workspaceDir, rootDir)), Layer.provideMerge(NodeServices.layer), Layer.provideMerge(providerSessionDirectoryLayer), ); + const providerEventLoggersLayer = Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers); const providerLayer = useRealCodex ? makeProviderServiceLive().pipe( Layer.provide(providerSessionDirectoryLayer), Layer.provide(realCodexRegistry), Layer.provide(AnalyticsService.layerTest), + Layer.provide(providerEventLoggersLayer), ) : makeProviderServiceLive().pipe( Layer.provide(providerSessionDirectoryLayer), Layer.provide(fakeRegistry!), Layer.provide(AnalyticsService.layerTest), + Layer.provide(providerEventLoggersLayer), ); - const checkpointStoreLayer = CheckpointStoreLive.pipe(Layer.provide(GitCoreLive)); + const checkpointStoreLayer = CheckpointStoreLive.pipe(Layer.provide(VcsDriverRegistry.layer)); const projectionSnapshotQueryLayer = OrchestrationProjectionSnapshotQueryLive; const runtimeServicesLayer = Layer.mergeAll( projectionSnapshotQueryLayer, @@ -307,31 +309,31 @@ export const makeOrchestrationIntegrationHarness = ( Layer.provideMerge(runtimeServicesLayer), Layer.provideMerge(serverSettingsLayer), ); - const gitCoreLayer = Layer.succeed(GitCore, { - renameBranch: (input: Parameters[0]) => + const gitWorkflowLayer = Layer.mock(GitWorkflowService)({ + renameBranch: (input: Parameters[0]) => Effect.succeed({ branch: input.newBranch }), - } as unknown as GitCoreShape); + }); const textGenerationLayer = Layer.succeed(TextGeneration, { generateBranchName: () => Effect.succeed({ branch: "update" }), generateThreadTitle: () => Effect.succeed({ title: "New thread" }), } as unknown as TextGenerationShape); const providerCommandReactorLayer = ProviderCommandReactorLive.pipe( Layer.provideMerge(runtimeServicesLayer), - Layer.provideMerge(gitCoreLayer), + Layer.provideMerge(gitWorkflowLayer), Layer.provideMerge(textGenerationLayer), Layer.provideMerge(serverSettingsLayer), ); const checkpointReactorLayer = CheckpointReactorLive.pipe( Layer.provideMerge(runtimeServicesLayer), Layer.provideMerge( - Layer.succeed(GitStatusBroadcaster, { + Layer.succeed(VcsStatusBroadcaster, { getStatus: () => Effect.die("getStatus should not be called in this test"), refreshLocalStatus: () => Effect.succeed({ isRepo: true, - hasOriginRemote: false, - isDefaultBranch: true, - branch: "main", + hasPrimaryRemote: false, + isDefaultRef: true, + refName: "main", hasWorkingTreeChanges: false, workingTree: { files: [], insertions: 0, deletions: 0 }, }), @@ -342,11 +344,12 @@ export const makeOrchestrationIntegrationHarness = ( Layer.provideMerge( WorkspaceEntriesLive.pipe( Layer.provide(WorkspacePathsLive), - Layer.provideMerge(gitCoreLayer), + Layer.provideMerge(VcsDriverRegistry.layer), Layer.provide(NodeServices.layer), ), ), Layer.provideMerge(WorkspacePathsLive), + Layer.provideMerge(VcsProcess.layer), ); const orchestrationReactorLayer = OrchestrationReactorLive.pipe( Layer.provideMerge(runtimeIngestionLayer), diff --git a/apps/server/integration/TestProviderAdapter.integration.ts b/apps/server/integration/TestProviderAdapter.integration.ts index 69a4d528cdd..3907e038be5 100644 --- a/apps/server/integration/TestProviderAdapter.integration.ts +++ b/apps/server/integration/TestProviderAdapter.integration.ts @@ -10,7 +10,7 @@ import { ProviderTurnStartResult, ThreadId, TurnId, - ProviderKind, + ProviderDriverKind, } from "@t3tools/contracts"; import { Effect, Queue, Stream } from "effect"; @@ -24,7 +24,6 @@ import type { ProviderThreadSnapshot, ProviderThreadTurnSnapshot, } from "../src/provider/Services/ProviderAdapter.ts"; -import { getProviderCapabilities } from "../src/provider/Services/ProviderAdapter.ts"; export interface TestTurnResponse { readonly events: ReadonlyArray; @@ -37,7 +36,7 @@ export interface TestTurnResponse { export type FixtureProviderRuntimeEvent = { readonly type: string; readonly eventId: EventId; - readonly provider: ProviderKind; + readonly provider: ProviderDriverKind; readonly createdAt: string; readonly threadId: string; readonly turnId?: string | undefined; @@ -179,7 +178,7 @@ function normalizeFixtureEvent(rawEvent: Record): ProviderRunti export interface TestProviderAdapterHarness { readonly adapter: ProviderAdapterShape; - readonly provider: ProviderKind; + readonly provider: ProviderDriverKind; readonly queueTurnResponse: ( threadId: ThreadId, response: TestTurnResponse, @@ -199,7 +198,7 @@ export interface TestProviderAdapterHarness { } interface MakeTestProviderAdapterHarnessOptions { - readonly provider?: ProviderKind; + readonly provider?: ProviderDriverKind; } function nowIso(): string { @@ -207,7 +206,7 @@ function nowIso(): string { } function sessionNotFound( - provider: ProviderKind, + provider: ProviderDriverKind, threadId: ThreadId, ): ProviderAdapterSessionNotFoundError { return new ProviderAdapterSessionNotFoundError({ @@ -217,7 +216,7 @@ function sessionNotFound( } function missingSessionEffect( - provider: ProviderKind, + provider: ProviderDriverKind, threadId: ThreadId, ): Effect.Effect { return Effect.fail(sessionNotFound(provider, threadId)); @@ -225,7 +224,7 @@ function missingSessionEffect( export const makeTestProviderAdapterHarness = (options?: MakeTestProviderAdapterHarnessOptions) => Effect.gen(function* () { - const provider = options?.provider ?? "codex"; + const provider = options?.provider ?? ProviderDriverKind.make("codex"); const runtimeEvents = yield* Queue.unbounded(); let sessionCount = 0; const sessions = new Map(); @@ -258,6 +257,9 @@ export const makeTestProviderAdapterHarness = (options?: MakeTestProviderAdapter const session: ProviderSession = { provider, + ...(input.providerInstanceId !== undefined + ? { providerInstanceId: input.providerInstanceId } + : {}), status: "ready", runtimeMode: input.runtimeMode, threadId, @@ -475,7 +477,7 @@ export const makeTestProviderAdapterHarness = (options?: MakeTestProviderAdapter const adapter: ProviderAdapterShape = { provider, - capabilities: getProviderCapabilities(provider), + capabilities: { sessionModelSwitch: "in-session" }, startSession, sendTurn, interruptTurn, diff --git a/apps/server/integration/fixtures/providerRuntime.ts b/apps/server/integration/fixtures/providerRuntime.ts index 14a45518c3c..e1258c4cc62 100644 --- a/apps/server/integration/fixtures/providerRuntime.ts +++ b/apps/server/integration/fixtures/providerRuntime.ts @@ -1,7 +1,7 @@ -import { EventId, RuntimeRequestId } from "@t3tools/contracts"; +import { EventId, ProviderDriverKind, RuntimeRequestId } from "@t3tools/contracts"; import type { LegacyProviderRuntimeEvent } from "../TestProviderAdapter.integration.ts"; -const PROVIDER = "codex" as const; +const PROVIDER = ProviderDriverKind.make("codex"); const SESSION_ID = "fixture-session"; const THREAD_ID = "fixture-thread"; const TURN_ID = "fixture-turn"; diff --git a/apps/server/integration/orchestrationEngine.integration.test.ts b/apps/server/integration/orchestrationEngine.integration.test.ts index 261bd11991c..6bd94819839 100644 --- a/apps/server/integration/orchestrationEngine.integration.test.ts +++ b/apps/server/integration/orchestrationEngine.integration.test.ts @@ -4,14 +4,17 @@ import path from "node:path"; import { ApprovalRequestId, CommandId, + defaultInstanceIdForDriver, DEFAULT_PROVIDER_INTERACTION_MODE, + DEFAULT_MODEL, DEFAULT_MODEL_BY_PROVIDER, EventId, MessageId, ProjectId, - ProviderKind, + ProviderDriverKind, ThreadId, ModelSelection, + ProviderInstanceId, } from "@t3tools/contracts"; import { assert, it } from "@effect/vitest"; import { Effect, Option, Schema } from "effect"; @@ -39,7 +42,9 @@ const PROJECT_ID = asProjectId("project-1"); const THREAD_ID = ThreadId.make("thread-1"); const FIXTURE_TURN_ID = "fixture-turn"; const APPROVAL_REQUEST_ID = asApprovalRequestId("req-approval-1"); -type IntegrationProvider = ProviderKind; +type IntegrationProvider = ProviderDriverKind; +const CODEX_PROVIDER = ProviderDriverKind.make("codex"); +const CLAUDE_AGENT_PROVIDER = ProviderDriverKind.make("claudeAgent"); function nowIso() { return new Date().toISOString(); @@ -74,7 +79,11 @@ function waitForSync( }); } -function runtimeBase(eventId: string, createdAt: string, provider: IntegrationProvider = "codex") { +function runtimeBase( + eventId: string, + createdAt: string, + provider: IntegrationProvider = CODEX_PROVIDER, +) { return { eventId: asEventId(eventId), provider, @@ -84,7 +93,7 @@ function runtimeBase(eventId: string, createdAt: string, provider: IntegrationPr function withHarness( use: (harness: OrchestrationIntegrationHarness) => Effect.Effect, - provider: IntegrationProvider = "codex", + provider: IntegrationProvider = CODEX_PROVIDER, ) { return Effect.acquireUseRelease( makeOrchestrationIntegrationHarness({ provider }), @@ -97,7 +106,7 @@ function withRealCodexHarness( use: (harness: OrchestrationIntegrationHarness) => Effect.Effect, ) { return Effect.acquireUseRelease( - makeOrchestrationIntegrationHarness({ provider: "codex", realCodex: true }), + makeOrchestrationIntegrationHarness({ provider: CODEX_PROVIDER, realCodex: true }), use, (harness) => harness.dispose, ).pipe(Effect.provide(NodeServices.layer)); @@ -106,8 +115,9 @@ function withRealCodexHarness( const seedProjectAndThread = (harness: OrchestrationIntegrationHarness) => Effect.gen(function* () { const createdAt = nowIso(); - const provider = harness.adapterHarness?.provider ?? "codex"; - const defaultModel = DEFAULT_MODEL_BY_PROVIDER[provider]; + const provider = harness.adapterHarness?.provider ?? CODEX_PROVIDER; + const defaultModel = DEFAULT_MODEL_BY_PROVIDER[provider] ?? DEFAULT_MODEL; + const instanceId = defaultInstanceIdForDriver(provider); yield* harness.engine.dispatch({ type: "project.create", @@ -116,7 +126,7 @@ const seedProjectAndThread = (harness: OrchestrationIntegrationHarness) => title: "Integration Project", workspaceRoot: harness.workspaceDir, defaultModelSelection: { - provider, + instanceId, model: defaultModel, }, createdAt, @@ -129,7 +139,7 @@ const seedProjectAndThread = (harness: OrchestrationIntegrationHarness) => projectId: PROJECT_ID, title: "Integration Thread", modelSelection: { - provider, + instanceId, model: defaultModel, }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -265,7 +275,7 @@ it.live.skipIf(!process.env.CODEX_BINARY_PATH)( title: "Integration Project", workspaceRoot: harness.workspaceDir, defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.3-codex", }, createdAt, @@ -278,7 +288,7 @@ it.live.skipIf(!process.env.CODEX_BINARY_PATH)( projectId: PROJECT_ID, title: "Integration Thread", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.3-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -496,6 +506,7 @@ it.live("runs multi-turn file edits and persists checkpoint diffs", () => fromCheckpointRef: checkpointRefForThreadTurn(THREAD_ID, 1), toCheckpointRef: checkpointRefForThreadTurn(THREAD_ID, 2), fallbackFromToHead: false, + ignoreWhitespace: false, }); assert.equal(incrementalDiff.includes("README.md"), true); @@ -504,6 +515,7 @@ it.live("runs multi-turn file edits and persists checkpoint diffs", () => fromCheckpointRef: checkpointRefForThreadTurn(THREAD_ID, 0), toCheckpointRef: checkpointRefForThreadTurn(THREAD_ID, 2), fallbackFromToHead: false, + ignoreWhitespace: false, }); assert.equal(fullDiff.includes("README.md"), true); @@ -744,18 +756,6 @@ it.live("reverts to an earlier checkpoint and trims checkpoint projections + git messageId: "msg-user-revert-1", text: "First edit", }); - yield* harness.waitForReceipt( - (receipt): receipt is CheckpointDiffFinalizedReceipt => - receipt.type === "checkpoint.diff.finalized" && - receipt.threadId === THREAD_ID && - receipt.checkpointTurnCount === 1, - ); - yield* harness.waitForReceipt( - (receipt): receipt is TurnProcessingQuiescedReceipt => - receipt.type === "turn.processing.quiesced" && - receipt.threadId === THREAD_ID && - receipt.checkpointTurnCount === 1, - ); yield* harness.waitForThread( THREAD_ID, @@ -814,18 +814,6 @@ it.live("reverts to an earlier checkpoint and trims checkpoint projections + git messageId: "msg-user-revert-2", text: "Second edit", }); - yield* harness.waitForReceipt( - (receipt): receipt is CheckpointDiffFinalizedReceipt => - receipt.type === "checkpoint.diff.finalized" && - receipt.threadId === THREAD_ID && - receipt.checkpointTurnCount === 2, - ); - yield* harness.waitForReceipt( - (receipt): receipt is TurnProcessingQuiescedReceipt => - receipt.type === "turn.processing.quiesced" && - receipt.threadId === THREAD_ID && - receipt.checkpointTurnCount === 2, - ); yield* harness.waitForThread( THREAD_ID, @@ -833,6 +821,7 @@ it.live("reverts to an earlier checkpoint and trims checkpoint projections + git entry.latestTurn?.turnId === "turn-2" && entry.checkpoints.length === 2 && entry.activities.some((activity) => activity.turnId === "turn-2"), + 8000, ); yield* harness.engine.dispatch({ @@ -935,20 +924,32 @@ it.live("starts a claudeAgent session on first turn when provider is requested", events: [ { type: "turn.started", - ...runtimeBase("evt-claude-start-1", "2026-02-24T10:10:00.000Z", "claudeAgent"), + ...runtimeBase( + "evt-claude-start-1", + "2026-02-24T10:10:00.000Z", + CLAUDE_AGENT_PROVIDER, + ), threadId: THREAD_ID, turnId: FIXTURE_TURN_ID, }, { type: "message.delta", - ...runtimeBase("evt-claude-start-2", "2026-02-24T10:10:00.050Z", "claudeAgent"), + ...runtimeBase( + "evt-claude-start-2", + "2026-02-24T10:10:00.050Z", + CLAUDE_AGENT_PROVIDER, + ), threadId: THREAD_ID, turnId: FIXTURE_TURN_ID, delta: "Claude first turn.\n", }, { type: "turn.completed", - ...runtimeBase("evt-claude-start-3", "2026-02-24T10:10:00.100Z", "claudeAgent"), + ...runtimeBase( + "evt-claude-start-3", + "2026-02-24T10:10:00.100Z", + CLAUDE_AGENT_PROVIDER, + ), threadId: THREAD_ID, turnId: FIXTURE_TURN_ID, status: "completed", @@ -962,7 +963,7 @@ it.live("starts a claudeAgent session on first turn when provider is requested", messageId: "msg-user-claude-initial", text: "Use Claude", modelSelection: { - provider: "claudeAgent", + instanceId: ProviderInstanceId.make("claudeAgent"), model: "claude-sonnet-4-6", }, }); @@ -978,119 +979,140 @@ it.live("starts a claudeAgent session on first turn when provider is requested", ); assert.equal(thread.session?.providerName, "claudeAgent"); }), - "claudeAgent", + CLAUDE_AGENT_PROVIDER, ), ); -// Skip: flaky timeout in CI after upstream sync — needs investigation -it.live.skip( - "recovers claudeAgent sessions after provider stopAll using persisted resume state", - () => - withHarness( - (harness) => - Effect.gen(function* () { - yield* seedProjectAndThread(harness); - - yield* harness.adapterHarness!.queueTurnResponseForNextSession({ - events: [ - { - type: "turn.started", - ...runtimeBase("evt-claude-recover-1", "2026-02-24T10:11:00.000Z", "claudeAgent"), - threadId: THREAD_ID, - turnId: FIXTURE_TURN_ID, - }, - { - type: "message.delta", - ...runtimeBase("evt-claude-recover-2", "2026-02-24T10:11:00.050Z", "claudeAgent"), - threadId: THREAD_ID, - turnId: FIXTURE_TURN_ID, - delta: "Turn before restart.\n", - }, - { - type: "turn.completed", - ...runtimeBase("evt-claude-recover-3", "2026-02-24T10:11:00.100Z", "claudeAgent"), - threadId: THREAD_ID, - turnId: FIXTURE_TURN_ID, - status: "completed", - }, - ], - }); - - yield* startTurn({ - harness, - commandId: "cmd-turn-start-claude-recover-1", - messageId: "msg-user-claude-recover-1", - text: "Before restart", - modelSelection: { - provider: "claudeAgent", - model: "claude-sonnet-4-6", +it.live("recovers claudeAgent sessions after provider stopAll using persisted resume state", () => + withHarness( + (harness) => + Effect.gen(function* () { + yield* seedProjectAndThread(harness); + + yield* harness.adapterHarness!.queueTurnResponseForNextSession({ + events: [ + { + type: "turn.started", + ...runtimeBase( + "evt-claude-recover-1", + "2026-02-24T10:11:00.000Z", + CLAUDE_AGENT_PROVIDER, + ), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, }, - }); - - yield* harness.waitForThread( - THREAD_ID, - (entry) => - entry.latestTurn?.turnId === "turn-1" && entry.session?.threadId === "thread-1", - ); - - yield* harness.providerService.stopSession({ threadId: THREAD_ID }); - yield* waitForSync( - () => harness.adapterHarness!.listActiveSessionIds(), - (sessionIds) => sessionIds.length === 0, - "provider stopSession", - ); - - yield* harness.adapterHarness!.queueTurnResponseForNextSession({ - events: [ - { - type: "turn.started", - ...runtimeBase("evt-claude-recover-4", "2026-02-24T10:11:01.000Z", "claudeAgent"), - threadId: THREAD_ID, - turnId: FIXTURE_TURN_ID, - }, - { - type: "message.delta", - ...runtimeBase("evt-claude-recover-5", "2026-02-24T10:11:01.050Z", "claudeAgent"), - threadId: THREAD_ID, - turnId: FIXTURE_TURN_ID, - delta: "Turn after restart.\n", - }, - { - type: "turn.completed", - ...runtimeBase("evt-claude-recover-6", "2026-02-24T10:11:01.100Z", "claudeAgent"), - threadId: THREAD_ID, - turnId: FIXTURE_TURN_ID, - status: "completed", - }, - ], - }); - - yield* startTurn({ - harness, - commandId: "cmd-turn-start-claude-recover-2", - messageId: "msg-user-claude-recover-2", - text: "After restart", - }); - yield* waitForSync( - () => harness.adapterHarness!.getStartCount(), - (count) => count === 2, - "claude provider recovery start", - ); - - const recoveredThread = yield* harness.waitForThread( - THREAD_ID, - (entry) => - entry.session?.providerName === "claudeAgent" && - entry.messages.some( - (message) => message.role === "user" && message.text === "After restart", - ) && - !entry.activities.some((activity) => activity.kind === "provider.turn.start.failed"), - ); - assert.equal(recoveredThread.session?.providerName, "claudeAgent"); - assert.equal(recoveredThread.session?.threadId, "thread-1"); - }), - "claudeAgent", - ), + { + type: "message.delta", + ...runtimeBase( + "evt-claude-recover-2", + "2026-02-24T10:11:00.050Z", + CLAUDE_AGENT_PROVIDER, + ), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + delta: "Turn before restart.\n", + }, + { + type: "turn.completed", + ...runtimeBase( + "evt-claude-recover-3", + "2026-02-24T10:11:00.100Z", + CLAUDE_AGENT_PROVIDER, + ), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + status: "completed", + }, + ], + }); + + yield* startTurn({ + harness, + commandId: "cmd-turn-start-claude-recover-1", + messageId: "msg-user-claude-recover-1", + text: "Before restart", + modelSelection: { + instanceId: ProviderInstanceId.make("claudeAgent"), + model: "claude-sonnet-4-6", + }, + }); + + yield* harness.waitForThread( + THREAD_ID, + (entry) => + entry.latestTurn?.turnId === "turn-1" && entry.session?.threadId === "thread-1", + ); + + yield* harness.adapterHarness!.adapter.stopAll(); + yield* waitForSync( + () => harness.adapterHarness!.listActiveSessionIds(), + (sessionIds) => sessionIds.length === 0, + "provider stopAll", + ); + + yield* harness.adapterHarness!.queueTurnResponseForNextSession({ + events: [ + { + type: "turn.started", + ...runtimeBase( + "evt-claude-recover-4", + "2026-02-24T10:11:01.000Z", + CLAUDE_AGENT_PROVIDER, + ), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + }, + { + type: "message.delta", + ...runtimeBase( + "evt-claude-recover-5", + "2026-02-24T10:11:01.050Z", + CLAUDE_AGENT_PROVIDER, + ), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + delta: "Turn after restart.\n", + }, + { + type: "turn.completed", + ...runtimeBase( + "evt-claude-recover-6", + "2026-02-24T10:11:01.100Z", + CLAUDE_AGENT_PROVIDER, + ), + threadId: THREAD_ID, + turnId: FIXTURE_TURN_ID, + status: "completed", + }, + ], + }); + + yield* startTurn({ + harness, + commandId: "cmd-turn-start-claude-recover-2", + messageId: "msg-user-claude-recover-2", + text: "After restart", + }); + yield* waitForSync( + () => harness.adapterHarness!.getStartCount(), + (count) => count === 2, + "claude provider recovery start", + ); + + const recoveredThread = yield* harness.waitForThread( + THREAD_ID, + (entry) => + entry.session?.providerName === "claudeAgent" && + entry.messages.some( + (message) => message.role === "user" && message.text === "After restart", + ) && + !entry.activities.some((activity) => activity.kind === "provider.turn.start.failed"), + ); + assert.equal(recoveredThread.session?.providerName, "claudeAgent"); + assert.equal(recoveredThread.session?.threadId, "thread-1"); + }), + CLAUDE_AGENT_PROVIDER, + ), ); it.live("forwards claudeAgent approval responses to the provider session", () => @@ -1103,13 +1125,21 @@ it.live("forwards claudeAgent approval responses to the provider session", () => events: [ { type: "turn.started", - ...runtimeBase("evt-claude-approval-1", "2026-02-24T10:12:00.000Z", "claudeAgent"), + ...runtimeBase( + "evt-claude-approval-1", + "2026-02-24T10:12:00.000Z", + CLAUDE_AGENT_PROVIDER, + ), threadId: THREAD_ID, turnId: FIXTURE_TURN_ID, }, { type: "approval.requested", - ...runtimeBase("evt-claude-approval-2", "2026-02-24T10:12:00.050Z", "claudeAgent"), + ...runtimeBase( + "evt-claude-approval-2", + "2026-02-24T10:12:00.050Z", + CLAUDE_AGENT_PROVIDER, + ), threadId: THREAD_ID, turnId: FIXTURE_TURN_ID, requestId: APPROVAL_REQUEST_ID, @@ -1118,7 +1148,11 @@ it.live("forwards claudeAgent approval responses to the provider session", () => }, { type: "turn.completed", - ...runtimeBase("evt-claude-approval-3", "2026-02-24T10:12:00.100Z", "claudeAgent"), + ...runtimeBase( + "evt-claude-approval-3", + "2026-02-24T10:12:00.100Z", + CLAUDE_AGENT_PROVIDER, + ), threadId: THREAD_ID, turnId: FIXTURE_TURN_ID, status: "completed", @@ -1132,7 +1166,7 @@ it.live("forwards claudeAgent approval responses to the provider session", () => messageId: "msg-user-claude-approval", text: "Need approval", modelSelection: { - provider: "claudeAgent", + instanceId: ProviderInstanceId.make("claudeAgent"), model: "claude-sonnet-4-6", }, }); @@ -1163,7 +1197,7 @@ it.live("forwards claudeAgent approval responses to the provider session", () => ); assert.equal(approvalResponses[0]?.decision, "accept"); }), - "claudeAgent", + CLAUDE_AGENT_PROVIDER, ), ); @@ -1177,20 +1211,32 @@ it.live("forwards thread.turn.interrupt to claudeAgent provider sessions", () => events: [ { type: "turn.started", - ...runtimeBase("evt-claude-interrupt-1", "2026-02-24T10:13:00.000Z", "claudeAgent"), + ...runtimeBase( + "evt-claude-interrupt-1", + "2026-02-24T10:13:00.000Z", + CLAUDE_AGENT_PROVIDER, + ), threadId: THREAD_ID, turnId: FIXTURE_TURN_ID, }, { type: "message.delta", - ...runtimeBase("evt-claude-interrupt-2", "2026-02-24T10:13:00.050Z", "claudeAgent"), + ...runtimeBase( + "evt-claude-interrupt-2", + "2026-02-24T10:13:00.050Z", + CLAUDE_AGENT_PROVIDER, + ), threadId: THREAD_ID, turnId: FIXTURE_TURN_ID, delta: "Long running output.\n", }, { type: "turn.completed", - ...runtimeBase("evt-claude-interrupt-3", "2026-02-24T10:13:00.100Z", "claudeAgent"), + ...runtimeBase( + "evt-claude-interrupt-3", + "2026-02-24T10:13:00.100Z", + CLAUDE_AGENT_PROVIDER, + ), threadId: THREAD_ID, turnId: FIXTURE_TURN_ID, status: "completed", @@ -1204,7 +1250,7 @@ it.live("forwards thread.turn.interrupt to claudeAgent provider sessions", () => messageId: "msg-user-claude-interrupt", text: "Start long turn", modelSelection: { - provider: "claudeAgent", + instanceId: ProviderInstanceId.make("claudeAgent"), model: "claude-sonnet-4-6", }, }); @@ -1232,7 +1278,7 @@ it.live("forwards thread.turn.interrupt to claudeAgent provider sessions", () => ); assert.equal(interruptCalls.length, 1); }), - "claudeAgent", + CLAUDE_AGENT_PROVIDER, ), ); @@ -1246,20 +1292,32 @@ it.live("reverts claudeAgent turns and rolls back provider conversation state", events: [ { type: "turn.started", - ...runtimeBase("evt-claude-revert-1", "2026-02-24T10:14:00.000Z", "claudeAgent"), + ...runtimeBase( + "evt-claude-revert-1", + "2026-02-24T10:14:00.000Z", + CLAUDE_AGENT_PROVIDER, + ), threadId: THREAD_ID, turnId: FIXTURE_TURN_ID, }, { type: "message.delta", - ...runtimeBase("evt-claude-revert-2", "2026-02-24T10:14:00.050Z", "claudeAgent"), + ...runtimeBase( + "evt-claude-revert-2", + "2026-02-24T10:14:00.050Z", + CLAUDE_AGENT_PROVIDER, + ), threadId: THREAD_ID, turnId: FIXTURE_TURN_ID, delta: "README -> v2\n", }, { type: "turn.completed", - ...runtimeBase("evt-claude-revert-3", "2026-02-24T10:14:00.100Z", "claudeAgent"), + ...runtimeBase( + "evt-claude-revert-3", + "2026-02-24T10:14:00.100Z", + CLAUDE_AGENT_PROVIDER, + ), threadId: THREAD_ID, turnId: FIXTURE_TURN_ID, status: "completed", @@ -1277,7 +1335,7 @@ it.live("reverts claudeAgent turns and rolls back provider conversation state", messageId: "msg-user-claude-revert-1", text: "First Claude edit", modelSelection: { - provider: "claudeAgent", + instanceId: ProviderInstanceId.make("claudeAgent"), model: "claude-sonnet-4-6", }, }); @@ -1292,20 +1350,32 @@ it.live("reverts claudeAgent turns and rolls back provider conversation state", events: [ { type: "turn.started", - ...runtimeBase("evt-claude-revert-4", "2026-02-24T10:14:01.000Z", "claudeAgent"), + ...runtimeBase( + "evt-claude-revert-4", + "2026-02-24T10:14:01.000Z", + CLAUDE_AGENT_PROVIDER, + ), threadId: THREAD_ID, turnId: FIXTURE_TURN_ID, }, { type: "message.delta", - ...runtimeBase("evt-claude-revert-5", "2026-02-24T10:14:01.050Z", "claudeAgent"), + ...runtimeBase( + "evt-claude-revert-5", + "2026-02-24T10:14:01.050Z", + CLAUDE_AGENT_PROVIDER, + ), threadId: THREAD_ID, turnId: FIXTURE_TURN_ID, delta: "README -> v3\n", }, { type: "turn.completed", - ...runtimeBase("evt-claude-revert-6", "2026-02-24T10:14:01.100Z", "claudeAgent"), + ...runtimeBase( + "evt-claude-revert-6", + "2026-02-24T10:14:01.100Z", + CLAUDE_AGENT_PROVIDER, + ), threadId: THREAD_ID, turnId: FIXTURE_TURN_ID, status: "completed", @@ -1356,6 +1426,6 @@ it.live("reverts claudeAgent turns and rolls back provider conversation state", ); assert.deepEqual(harness.adapterHarness!.getRollbackCalls(THREAD_ID), [1]); }), - "claudeAgent", + CLAUDE_AGENT_PROVIDER, ), ); diff --git a/apps/server/integration/providerService.integration.test.ts b/apps/server/integration/providerService.integration.test.ts index 3dd127a5564..0939e7ba385 100644 --- a/apps/server/integration/providerService.integration.test.ts +++ b/apps/server/integration/providerService.integration.test.ts @@ -1,13 +1,17 @@ import type { ProviderRuntimeEvent } from "@t3tools/contracts"; -import { ThreadId } from "@t3tools/contracts"; +import { ProviderDriverKind, ProviderInstanceId, ThreadId } from "@t3tools/contracts"; import { DEFAULT_SERVER_SETTINGS } from "@t3tools/contracts/settings"; import * as NodeServices from "@effect/platform-node/NodeServices"; import { it, assert } from "@effect/vitest"; import { Effect, FileSystem, Layer, Path, Queue, Stream } from "effect"; -import { ProviderUnsupportedError } from "../src/provider/Errors.ts"; import { ProviderAdapterRegistry } from "../src/provider/Services/ProviderAdapterRegistry.ts"; +import { makeAdapterRegistryMock } from "../src/provider/testUtils/providerAdapterRegistryMock.ts"; import { ProviderSessionDirectoryLive } from "../src/provider/Layers/ProviderSessionDirectory.ts"; +import { + NoOpProviderEventLoggers, + ProviderEventLoggers, +} from "../src/provider/Layers/ProviderEventLoggers.ts"; import { makeProviderServiceLive } from "../src/provider/Layers/ProviderService.ts"; import { ProviderService, @@ -29,6 +33,8 @@ import { codexTurnTextFixture, } from "./fixtures/providerRuntime.ts"; +const codexInstanceId = ProviderInstanceId.make("codex"); + const makeWorkspaceDirectory = Effect.gen(function* () { const fs = yield* FileSystem.FileSystem; const pathService = yield* Path.Path; @@ -47,13 +53,9 @@ const makeIntegrationFixture = Effect.gen(function* () { const cwd = yield* makeWorkspaceDirectory; const harness = yield* makeTestProviderAdapterHarness(); - const registry: typeof ProviderAdapterRegistry.Service = { - getByProvider: (provider) => - provider === "codex" - ? Effect.succeed(harness.adapter) - : Effect.fail(new ProviderUnsupportedError({ provider })), - listProviders: () => Effect.succeed(["codex"]), - }; + const registry = makeAdapterRegistryMock({ + [ProviderDriverKind.make("codex")]: harness.adapter, + }); const directoryLayer = ProviderSessionDirectoryLive.pipe( Layer.provide(ProviderSessionRuntimeRepositoryLive), @@ -64,6 +66,7 @@ const makeIntegrationFixture = Effect.gen(function* () { Layer.succeed(ProviderAdapterRegistry, registry), ServerSettingsService.layerTest(DEFAULT_SERVER_SETTINGS), AnalyticsService.layerTest, + Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers), ).pipe(Layer.provide(SqlitePersistenceMemory)); const layer = makeProviderServiceLive().pipe(Layer.provide(shared)); @@ -124,7 +127,8 @@ it.live("replays typed runtime fixture events", () => const provider = yield* ProviderService; const session = yield* provider.startSession(ThreadId.make("thread-integration-typed"), { threadId: ThreadId.make("thread-integration-typed"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, cwd: fixture.cwd, runtimeMode: "full-access", }); @@ -142,6 +146,10 @@ it.live("replays typed runtime fixture events", () => observedEvents.map((event) => event.type), codexTurnTextFixture.map((event) => event.type), ); + assert.deepEqual( + observedEvents.map((event) => event.providerInstanceId), + codexTurnTextFixture.map(() => codexInstanceId), + ); }).pipe(Effect.provide(fixture.layer)); }).pipe(Effect.provide(NodeServices.layer)), ); @@ -156,7 +164,8 @@ it.live("replays file-changing fixture turn events", () => const provider = yield* ProviderService; const session = yield* provider.startSession(ThreadId.make("thread-integration-tools"), { threadId: ThreadId.make("thread-integration-tools"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, cwd: fixture.cwd, runtimeMode: "full-access", }); @@ -192,7 +201,8 @@ it.live("runs multi-turn tool/approval flow", () => const provider = yield* ProviderService; const session = yield* provider.startSession(ThreadId.make("thread-integration-multi"), { threadId: ThreadId.make("thread-integration-multi"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, cwd: fixture.cwd, runtimeMode: "full-access", }); @@ -243,7 +253,8 @@ it.live("rolls back provider conversation state only", () => const provider = yield* ProviderService; const session = yield* provider.startSession(ThreadId.make("thread-integration-rollback"), { threadId: ThreadId.make("thread-integration-rollback"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, cwd: fixture.cwd, runtimeMode: "full-access", }); diff --git a/apps/server/package.json b/apps/server/package.json index d9d84166b99..3f142bfc0d7 100644 --- a/apps/server/package.json +++ b/apps/server/package.json @@ -42,6 +42,7 @@ "@effect/vitest": "catalog:", "@t3tools/contracts": "workspace:*", "@t3tools/shared": "workspace:*", + "@t3tools/tailscale": "workspace:*", "@t3tools/web": "workspace:*", "@types/bun": "catalog:", "@types/node": "catalog:", diff --git a/apps/server/src/ampServerManager.ts b/apps/server/src/ampServerManager.ts index 46005126060..7acd8b77d4e 100644 --- a/apps/server/src/ampServerManager.ts +++ b/apps/server/src/ampServerManager.ts @@ -6,6 +6,7 @@ import readline from "node:readline"; import { ApprovalRequestId, EventId, + ProviderDriverKind, RuntimeItemId, RuntimeTaskId, ThreadId, @@ -24,9 +25,9 @@ import { createLogger } from "./logger.ts"; // ── Constants ─────────────────────────────────────────────────────── -const PROVIDER = "amp" as const; +const PROVIDER = ProviderDriverKind.make("amp"); -// ── Module-level usage tracking ────────────────────────────────────── +// ── Per-instance usage tracking ────────────────────────────────────── interface AmpUsageAccumulator { inputTokens: number; @@ -35,31 +36,6 @@ interface AmpUsageAccumulator { turnCount: number; } -let _ampUsageAccumulator: AmpUsageAccumulator = { - inputTokens: 0, - outputTokens: 0, - cachedTokens: 0, - turnCount: 0, -}; - -export function fetchAmpUsage(): ProviderUsageResult { - const acc = _ampUsageAccumulator; - let sessionUsage: ProviderSessionUsage | undefined; - if (acc.turnCount > 0) { - sessionUsage = { - inputTokens: acc.inputTokens, - outputTokens: acc.outputTokens, - ...(acc.cachedTokens > 0 ? { cachedTokens: acc.cachedTokens } : {}), - totalTokens: acc.inputTokens + acc.outputTokens, - turnCount: acc.turnCount, - }; - } - return { - provider: PROVIDER, - ...(sessionUsage ? { sessionUsage } : {}), - }; -} - // ── Types ─────────────────────────────────────────────────────────── type AmpProviderOptions = { @@ -173,8 +149,38 @@ export class AmpServerManager extends EventEmitter<{ }> { private readonly sessions = new Map(); private readonly logger = createLogger("amp"); + private readonly usageAccumulator: AmpUsageAccumulator = { + inputTokens: 0, + outputTokens: 0, + cachedTokens: 0, + turnCount: 0, + }; binaryPath: string | undefined; + /** + * Snapshot the per-instance usage counters in the canonical + * `ProviderUsageResult` shape. Returns the manager-local accumulator — + * each `AmpServerManager` instance keeps its own counters so two + * concurrent Amp providers do not cross-contaminate usage stats. + */ + fetchUsage(): ProviderUsageResult { + const acc = this.usageAccumulator; + let sessionUsage: ProviderSessionUsage | undefined; + if (acc.turnCount > 0) { + sessionUsage = { + inputTokens: acc.inputTokens, + outputTokens: acc.outputTokens, + ...(acc.cachedTokens > 0 ? { cachedTokens: acc.cachedTokens } : {}), + totalTokens: acc.inputTokens + acc.outputTokens, + turnCount: acc.turnCount, + }; + } + return { + provider: PROVIDER, + ...(sessionUsage ? { sessionUsage } : {}), + }; + } + // ── Session lifecycle ─────────────────────────────────────────── startSession(input: ProviderSessionStartInput): Promise { @@ -563,9 +569,9 @@ export class AmpServerManager extends EventEmitter<{ if (inner?.usage) { // Accumulate session-level usage if (typeof inner.usage.input_tokens === "number") - _ampUsageAccumulator.inputTokens += inner.usage.input_tokens; + this.usageAccumulator.inputTokens += inner.usage.input_tokens; if (typeof inner.usage.output_tokens === "number") - _ampUsageAccumulator.outputTokens += inner.usage.output_tokens; + this.usageAccumulator.outputTokens += inner.usage.output_tokens; const cached = (typeof inner.usage.cache_read_input_tokens === "number" ? inner.usage.cache_read_input_tokens @@ -573,7 +579,7 @@ export class AmpServerManager extends EventEmitter<{ (typeof inner.usage.cache_creation_input_tokens === "number" ? inner.usage.cache_creation_input_tokens : 0); - if (cached > 0) _ampUsageAccumulator.cachedTokens += cached; + if (cached > 0) this.usageAccumulator.cachedTokens += cached; this.emitEvent(threadId, session.activeTurnId, { type: "thread.token-usage.updated", @@ -584,7 +590,7 @@ export class AmpServerManager extends EventEmitter<{ // For persistent sessions, a turn completes when stop_reason is "end_turn". // Guard against duplicate turn.completed (handleResultMessage may also emit one). if (inner?.stop_reason === "end_turn" && session.activeTurnId && session.status !== "ready") { - _ampUsageAccumulator.turnCount++; + this.usageAccumulator.turnCount++; this.closeAllSubagentTasks(threadId, session); this.emitEvent(threadId, session.activeTurnId, { type: "turn.completed", diff --git a/apps/server/src/checkpointing/Errors.ts b/apps/server/src/checkpointing/Errors.ts index cb873559c16..c6875e585cf 100644 --- a/apps/server/src/checkpointing/Errors.ts +++ b/apps/server/src/checkpointing/Errors.ts @@ -1,6 +1,6 @@ import { Schema } from "effect"; import type { ProjectionRepositoryError } from "../persistence/Errors.ts"; -import { GitCommandError } from "@t3tools/contracts"; +import type { VcsError } from "@t3tools/contracts"; /** * CheckpointUnavailableError - Expected checkpoint does not exist. @@ -35,9 +35,6 @@ export class CheckpointInvariantError extends Schema.TaggedErrorClass { readonly fromCheckpointRef: CheckpointRef; readonly toCheckpointRef: CheckpointRef; readonly cwd: string; + readonly ignoreWhitespace: boolean; }> = []; const threadCheckpointContext = makeThreadCheckpointContext({ @@ -68,9 +69,14 @@ describe("CheckpointDiffQueryLive", () => { return true; }), restoreCheckpoint: () => Effect.succeed(true), - diffCheckpoints: ({ fromCheckpointRef, toCheckpointRef, cwd }) => + diffCheckpoints: ({ fromCheckpointRef, toCheckpointRef, cwd, ignoreWhitespace }) => Effect.sync(() => { - diffCheckpointsCalls.push({ fromCheckpointRef, toCheckpointRef, cwd }); + diffCheckpointsCalls.push({ + fromCheckpointRef, + toCheckpointRef, + cwd, + ignoreWhitespace, + }); return "diff patch"; }), deleteCheckpointRefs: () => Effect.void, @@ -102,6 +108,7 @@ describe("CheckpointDiffQueryLive", () => { threadId, fromTurnCount: 0, toTurnCount: 1, + ignoreWhitespace: true, }); }).pipe(Effect.provide(layer)), ); @@ -113,6 +120,7 @@ describe("CheckpointDiffQueryLive", () => { cwd: process.cwd(), fromCheckpointRef: expectedFromRef, toCheckpointRef, + ignoreWhitespace: true, }, ]); expect(result).toEqual({ @@ -123,6 +131,67 @@ describe("CheckpointDiffQueryLive", () => { }); }); + it("defaults to hide whitespace changes", async () => { + const projectId = ProjectId.make("project-default-whitespace"); + const threadId = ThreadId.make("thread-default-whitespace"); + const toCheckpointRef = checkpointRefForThreadTurn(threadId, 1); + const diffCheckpointsCalls: Array<{ readonly ignoreWhitespace: boolean }> = []; + + const threadCheckpointContext = makeThreadCheckpointContext({ + projectId, + threadId, + workspaceRoot: "/tmp/workspace", + worktreePath: null, + checkpointTurnCount: 1, + checkpointRef: toCheckpointRef, + }); + + const checkpointStore: CheckpointStoreShape = { + isGitRepository: () => Effect.succeed(true), + captureCheckpoint: () => Effect.void, + hasCheckpointRef: () => Effect.succeed(true), + restoreCheckpoint: () => Effect.succeed(true), + diffCheckpoints: ({ ignoreWhitespace }) => + Effect.sync(() => { + diffCheckpointsCalls.push({ ignoreWhitespace }); + return "diff patch"; + }), + deleteCheckpointRefs: () => Effect.void, + }; + + const layer = CheckpointDiffQueryLive.pipe( + Layer.provideMerge(Layer.succeed(CheckpointStore, checkpointStore)), + Layer.provideMerge( + Layer.succeed(ProjectionSnapshotQuery, { + getSnapshot: () => + Effect.die("CheckpointDiffQuery should not request the full orchestration snapshot"), + getShellSnapshot: () => + Effect.die("CheckpointDiffQuery should not request the orchestration shell snapshot"), + getCounts: () => Effect.succeed({ projectCount: 0, threadCount: 0 }), + getActiveProjectByWorkspaceRoot: () => Effect.succeed(Option.none()), + getProjectShellById: () => Effect.succeed(Option.none()), + getFirstActiveThreadIdByProjectId: () => Effect.succeed(Option.none()), + getThreadCheckpointContext: () => Effect.succeed(Option.some(threadCheckpointContext)), + getThreadShellById: () => Effect.succeed(Option.none()), + getThreadDetailById: () => Effect.succeed(Option.none()), + }), + ), + ); + + await Effect.runPromise( + Effect.gen(function* () { + const query = yield* CheckpointDiffQuery; + return yield* query.getTurnDiff({ + threadId, + fromTurnCount: 0, + toTurnCount: 1, + }); + }).pipe(Effect.provide(layer)), + ); + + expect(diffCheckpointsCalls).toEqual([{ ignoreWhitespace: true }]); + }); + it("fails when the thread is missing from the snapshot", async () => { const threadId = ThreadId.make("thread-missing"); diff --git a/apps/server/src/checkpointing/Layers/CheckpointDiffQuery.ts b/apps/server/src/checkpointing/Layers/CheckpointDiffQuery.ts index 1c2edee469e..c6b15134711 100644 --- a/apps/server/src/checkpointing/Layers/CheckpointDiffQuery.ts +++ b/apps/server/src/checkpointing/Layers/CheckpointDiffQuery.ts @@ -24,6 +24,7 @@ const make = Effect.gen(function* () { const getTurnDiff: CheckpointDiffQueryShape["getTurnDiff"] = Effect.fn("getTurnDiff")( function* (input) { const operation = "CheckpointDiffQuery.getTurnDiff"; + const ignoreWhitespace = input.ignoreWhitespace ?? true; if (input.fromTurnCount === input.toTurnCount) { const emptyDiff: OrchestrationGetTurnDiffResultType = { @@ -131,6 +132,7 @@ const make = Effect.gen(function* () { fromCheckpointRef, toCheckpointRef, fallbackFromToHead: false, + ignoreWhitespace, }); const turnDiff: OrchestrationGetTurnDiffResultType = { @@ -157,6 +159,7 @@ const make = Effect.gen(function* () { threadId: input.threadId, fromTurnCount: 0, toTurnCount: input.toTurnCount, + ignoreWhitespace: input.ignoreWhitespace ?? true, }).pipe(Effect.map((result): OrchestrationGetFullThreadDiffResult => result)); return { diff --git a/apps/server/src/checkpointing/Layers/CheckpointStore.test.ts b/apps/server/src/checkpointing/Layers/CheckpointStore.test.ts index fe377eb1ec3..204fd565740 100644 --- a/apps/server/src/checkpointing/Layers/CheckpointStore.test.ts +++ b/apps/server/src/checkpointing/Layers/CheckpointStore.test.ts @@ -8,24 +8,27 @@ import { describe, expect } from "vitest"; import { checkpointRefForThreadTurn } from "../Utils.ts"; import { CheckpointStoreLive } from "./CheckpointStore.ts"; import { CheckpointStore } from "../Services/CheckpointStore.ts"; -import { GitCoreLive } from "../../git/Layers/GitCore.ts"; -import { GitCore } from "../../git/Services/GitCore.ts"; -import { GitCommandError } from "@t3tools/contracts"; +import * as VcsDriverRegistry from "../../vcs/VcsDriverRegistry.ts"; +import * as VcsProcess from "../../vcs/VcsProcess.ts"; +import type { VcsError } from "@t3tools/contracts"; import { ServerConfig } from "../../config.ts"; import { ThreadId } from "@t3tools/contracts"; const ServerConfigLayer = ServerConfig.layerTest(process.cwd(), { prefix: "t3-checkpoint-store-test-", }); -const GitCoreTestLayer = GitCoreLive.pipe( - Layer.provide(ServerConfigLayer), - Layer.provide(NodeServices.layer), -); +const VcsProcessTestLayer = VcsProcess.layer.pipe(Layer.provide(NodeServices.layer)); +const VcsDriverTestLayer = VcsDriverRegistry.layer.pipe(Layer.provide(VcsProcessTestLayer)); const CheckpointStoreTestLayer = CheckpointStoreLive.pipe( - Layer.provide(GitCoreTestLayer), - Layer.provide(NodeServices.layer), + Layer.provideMerge(VcsDriverTestLayer), + Layer.provideMerge(NodeServices.layer), +); +const TestLayer = CheckpointStoreTestLayer.pipe( + Layer.provideMerge(VcsProcessTestLayer), + Layer.provideMerge(VcsDriverTestLayer), + Layer.provideMerge(ServerConfigLayer), + Layer.provideMerge(NodeServices.layer), ); -const TestLayer = Layer.mergeAll(NodeServices.layer, GitCoreTestLayer, CheckpointStoreTestLayer); function makeTmpDir( prefix = "checkpoint-store-test-", @@ -49,11 +52,12 @@ function writeTextFile( function git( cwd: string, args: ReadonlyArray, -): Effect.Effect { +): Effect.Effect { return Effect.gen(function* () { - const gitCore = yield* GitCore; - const result = yield* gitCore.execute({ + const process = yield* VcsProcess.VcsProcess; + const result = yield* process.run({ operation: "CheckpointStore.test.git", + command: "git", cwd, args, timeoutMs: 10_000, @@ -66,12 +70,11 @@ function initRepoWithCommit( cwd: string, ): Effect.Effect< void, - GitCommandError | PlatformError.PlatformError, - GitCore | FileSystem.FileSystem + VcsError | PlatformError.PlatformError, + VcsProcess.VcsProcess | FileSystem.FileSystem > { return Effect.gen(function* () { - const core = yield* GitCore; - yield* core.initRepo({ cwd }); + yield* git(cwd, ["init"]); yield* git(cwd, ["config", "user.email", "test@test.com"]); yield* git(cwd, ["config", "user.name", "Test"]); yield* writeTextFile(path.join(cwd, "README.md"), "# test\n"); @@ -111,6 +114,7 @@ it.layer(TestLayer)("CheckpointStoreLive", (it) => { cwd: tmp, fromCheckpointRef, toCheckpointRef, + ignoreWhitespace: true, }); expect(diff).toContain("diff --git"); @@ -118,5 +122,80 @@ it.layer(TestLayer)("CheckpointStoreLive", (it) => { expect(diff).toContain("+line 04999"); }), ); + + it.effect("can hide indentation churn when changes wrap existing lines", () => + Effect.gen(function* () { + const tmp = yield* makeTmpDir(); + yield* initRepoWithCommit(tmp); + const checkpointStore = yield* CheckpointStore; + const threadId = ThreadId.make("thread-checkpoint-store-whitespace"); + const fromCheckpointRef = checkpointRefForThreadTurn(threadId, 0); + const toCheckpointRef = checkpointRefForThreadTurn(threadId, 1); + + const componentPath = path.join(tmp, "Component.tsx"); + yield* writeTextFile( + componentPath, + [ + "export function View() {", + " return (", + "
", + "

Title

", + "

Body

", + "
", + " );", + "}", + "", + ].join("\n"), + ); + yield* checkpointStore.captureCheckpoint({ + cwd: tmp, + checkpointRef: fromCheckpointRef, + }); + yield* writeTextFile( + componentPath, + [ + "export function View() {", + " return (", + "
", + " {isReady ? (", + "
", + "

Title

", + "

Body

", + "
", + " ) : null}", + "
", + " );", + "}", + "", + ].join("\n"), + ); + yield* checkpointStore.captureCheckpoint({ + cwd: tmp, + checkpointRef: toCheckpointRef, + }); + + const normalDiff = yield* checkpointStore.diffCheckpoints({ + cwd: tmp, + fromCheckpointRef, + toCheckpointRef, + ignoreWhitespace: false, + }); + const whitespaceIgnoredDiff = yield* checkpointStore.diffCheckpoints({ + cwd: tmp, + fromCheckpointRef, + toCheckpointRef, + ignoreWhitespace: true, + }); + + expect(normalDiff).toContain("diff --git"); + expect(normalDiff).toContain("-

Title

"); + expect(normalDiff).toContain("+

Title

"); + expect(whitespaceIgnoredDiff).toContain("diff --git"); + expect(whitespaceIgnoredDiff).toContain("+ {isReady ? ("); + expect(whitespaceIgnoredDiff).toContain("+
"); + expect(whitespaceIgnoredDiff).not.toContain("-

Title

"); + expect(whitespaceIgnoredDiff).not.toContain("+

Title

"); + }), + ); }); }); diff --git a/apps/server/src/checkpointing/Layers/CheckpointStore.ts b/apps/server/src/checkpointing/Layers/CheckpointStore.ts index 211877e9b1a..7efe4cc0a56 100644 --- a/apps/server/src/checkpointing/Layers/CheckpointStore.ts +++ b/apps/server/src/checkpointing/Layers/CheckpointStore.ts @@ -14,8 +14,8 @@ import { randomUUID } from "node:crypto"; import { Effect, Layer, FileSystem, Path } from "effect"; import { CheckpointInvariantError } from "../Errors.ts"; -import { GitCommandError } from "@t3tools/contracts"; -import { GitCore } from "../../git/Services/GitCore.ts"; +import { VcsProcessExitError } from "@t3tools/contracts"; +import { VcsDriverRegistry } from "../../vcs/VcsDriverRegistry.ts"; import { CheckpointStore, type CheckpointStoreShape } from "../Services/CheckpointStore.ts"; import { CheckpointRef } from "@t3tools/contracts"; @@ -24,10 +24,26 @@ const CHECKPOINT_DIFF_MAX_OUTPUT_BYTES = 10_000_000; const makeCheckpointStore = Effect.gen(function* () { const fs = yield* FileSystem.FileSystem; const path = yield* Path.Path; - const git = yield* GitCore; - - const resolveHeadCommit = (cwd: string): Effect.Effect => - git + const vcsRegistry = yield* VcsDriverRegistry; + const vcs = { + execute: (input: { + readonly operation: string; + readonly cwd: string; + readonly args: ReadonlyArray; + readonly stdin?: string; + readonly env?: NodeJS.ProcessEnv; + readonly allowNonZeroExit?: boolean; + readonly timeoutMs?: number; + readonly maxOutputBytes?: number; + readonly truncateOutputAtMaxBytes?: boolean; + }) => + vcsRegistry + .resolve({ cwd: input.cwd, requestedKind: "git" }) + .pipe(Effect.flatMap((handle) => handle.driver.execute(input))), + }; + + const resolveHeadCommit = (cwd: string) => + vcs .execute({ operation: "CheckpointStore.resolveHeadCommit", cwd, @@ -36,7 +52,7 @@ const makeCheckpointStore = Effect.gen(function* () { }) .pipe( Effect.map((result) => { - if (result.code !== 0) { + if (result.exitCode !== 0) { return null; } const commit = result.stdout.trim(); @@ -44,21 +60,18 @@ const makeCheckpointStore = Effect.gen(function* () { }), ); - const hasHeadCommit = (cwd: string): Effect.Effect => - git + const hasHeadCommit = (cwd: string) => + vcs .execute({ operation: "CheckpointStore.hasHeadCommit", cwd, args: ["rev-parse", "--verify", "HEAD"], allowNonZeroExit: true, }) - .pipe(Effect.map((result) => result.code === 0)); + .pipe(Effect.map((result) => result.exitCode === 0)); - const resolveCheckpointCommit = ( - cwd: string, - checkpointRef: CheckpointRef, - ): Effect.Effect => - git + const resolveCheckpointCommit = (cwd: string, checkpointRef: CheckpointRef) => + vcs .execute({ operation: "CheckpointStore.resolveCheckpointCommit", cwd, @@ -67,7 +80,7 @@ const makeCheckpointStore = Effect.gen(function* () { }) .pipe( Effect.map((result) => { - if (result.code !== 0) { + if (result.exitCode !== 0) { return null; } const commit = result.stdout.trim(); @@ -76,7 +89,7 @@ const makeCheckpointStore = Effect.gen(function* () { ); const isGitRepository: CheckpointStoreShape["isGitRepository"] = (cwd) => - git + vcs .execute({ operation: "CheckpointStore.isGitRepository", cwd, @@ -84,7 +97,7 @@ const makeCheckpointStore = Effect.gen(function* () { allowNonZeroExit: true, }) .pipe( - Effect.map((result) => result.code === 0 && result.stdout.trim() === "true"), + Effect.map((result) => result.exitCode === 0 && result.stdout.trim() === "true"), Effect.catch(() => Effect.succeed(false)), ); @@ -108,7 +121,7 @@ const makeCheckpointStore = Effect.gen(function* () { const headExists = yield* hasHeadCommit(input.cwd); if (headExists) { - yield* git.execute({ + yield* vcs.execute({ operation, cwd: input.cwd, args: ["read-tree", "HEAD"], @@ -116,14 +129,14 @@ const makeCheckpointStore = Effect.gen(function* () { }); } - yield* git.execute({ + yield* vcs.execute({ operation, cwd: input.cwd, args: ["add", "-A", "--", "."], env: commitEnv, }); - const writeTreeResult = yield* git.execute({ + const writeTreeResult = yield* vcs.execute({ operation, cwd: input.cwd, args: ["write-tree"], @@ -131,16 +144,17 @@ const makeCheckpointStore = Effect.gen(function* () { }); const treeOid = writeTreeResult.stdout.trim(); if (treeOid.length === 0) { - return yield* new GitCommandError({ + return yield* new VcsProcessExitError({ operation, command: "git write-tree", cwd: input.cwd, + exitCode: 0, detail: "git write-tree returned an empty tree oid.", }); } const message = `t3 checkpoint ref=${input.checkpointRef}`; - const commitTreeResult = yield* git.execute({ + const commitTreeResult = yield* vcs.execute({ operation, cwd: input.cwd, args: ["commit-tree", treeOid, "-m", message], @@ -148,15 +162,16 @@ const makeCheckpointStore = Effect.gen(function* () { }); const commitOid = commitTreeResult.stdout.trim(); if (commitOid.length === 0) { - return yield* new GitCommandError({ + return yield* new VcsProcessExitError({ operation, command: "git commit-tree", cwd: input.cwd, + exitCode: 0, detail: "git commit-tree returned an empty commit oid.", }); } - yield* git.execute({ + yield* vcs.execute({ operation, cwd: input.cwd, args: ["update-ref", input.checkpointRef, commitOid], @@ -197,12 +212,12 @@ const makeCheckpointStore = Effect.gen(function* () { return false; } - yield* git.execute({ + yield* vcs.execute({ operation, cwd: input.cwd, args: ["restore", "--source", commitOid, "--worktree", "--staged", "--", "."], }); - yield* git.execute({ + yield* vcs.execute({ operation, cwd: input.cwd, args: ["clean", "-fd", "--", "."], @@ -210,7 +225,7 @@ const makeCheckpointStore = Effect.gen(function* () { const headExists = yield* hasHeadCommit(input.cwd); if (headExists) { - yield* git.execute({ + yield* vcs.execute({ operation, cwd: input.cwd, args: ["reset", "--quiet", "--", "."], @@ -235,18 +250,29 @@ const makeCheckpointStore = Effect.gen(function* () { } if (!fromCommitOid || !toCommitOid) { - return yield* new GitCommandError({ + return yield* new VcsProcessExitError({ operation, command: "git diff", cwd: input.cwd, + exitCode: 1, detail: "Checkpoint ref is unavailable for diff operation.", }); } - const result = yield* git.execute({ + const diffArgs = [ + "diff", + "--patch", + "--minimal", + "--no-color", + ...(input.ignoreWhitespace ? ["--ignore-all-space"] : []), + fromCommitOid, + toCommitOid, + ]; + + const result = yield* vcs.execute({ operation, cwd: input.cwd, - args: ["diff", "--patch", "--minimal", "--no-color", fromCommitOid, toCommitOid], + args: diffArgs, maxOutputBytes: CHECKPOINT_DIFF_MAX_OUTPUT_BYTES, }); @@ -262,7 +288,7 @@ const makeCheckpointStore = Effect.gen(function* () { yield* Effect.forEach( input.checkpointRefs, (checkpointRef) => - git.execute({ + vcs.execute({ operation, cwd: input.cwd, args: ["update-ref", "-d", checkpointRef], diff --git a/apps/server/src/checkpointing/Services/CheckpointStore.ts b/apps/server/src/checkpointing/Services/CheckpointStore.ts index d9a43fa4e95..be6bdbccfaa 100644 --- a/apps/server/src/checkpointing/Services/CheckpointStore.ts +++ b/apps/server/src/checkpointing/Services/CheckpointStore.ts @@ -32,6 +32,7 @@ export interface DiffCheckpointsInput { readonly fromCheckpointRef: CheckpointRef; readonly toCheckpointRef: CheckpointRef; readonly fallbackFromToHead?: boolean; + readonly ignoreWhitespace: boolean; } export interface DeleteCheckpointRefsInput { diff --git a/apps/server/src/cli-config.test.ts b/apps/server/src/cli-config.test.ts index 5adece73020..2e13d33aabf 100644 --- a/apps/server/src/cli-config.test.ts +++ b/apps/server/src/cli-config.test.ts @@ -46,6 +46,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { bootstrapFd: Option.none(), autoBootstrapProjectFromCwd: Option.none(), logWebSocketEvents: Option.none(), + tailscaleServeEnabled: Option.none(), + tailscaleServePort: Option.none(), }, Option.none(), ).pipe( @@ -87,6 +89,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { desktopBootstrapToken: undefined, autoBootstrapProjectFromCwd: false, logWebSocketEvents: true, + tailscaleServeEnabled: false, + tailscaleServePort: 443, }); }), ); @@ -108,6 +112,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { bootstrapFd: Option.none(), autoBootstrapProjectFromCwd: Option.some(true), logWebSocketEvents: Option.some(true), + tailscaleServeEnabled: Option.some(true), + tailscaleServePort: Option.some(8443), }, Option.some("Debug"), ).pipe( @@ -149,6 +155,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { desktopBootstrapToken: undefined, autoBootstrapProjectFromCwd: true, logWebSocketEvents: true, + tailscaleServeEnabled: true, + tailscaleServePort: 8443, }); }), ); @@ -161,6 +169,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { noBrowser: true, autoBootstrapProjectFromCwd: true, logWebSocketEvents: true, + tailscaleServeEnabled: false, + tailscaleServePort: 443, }); const derivedPaths = yield* deriveServerPaths(baseDir, new URL("http://127.0.0.1:4173")); @@ -176,6 +186,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { bootstrapFd: Option.none(), autoBootstrapProjectFromCwd: Option.some(false), logWebSocketEvents: Option.some(false), + tailscaleServeEnabled: Option.none(), + tailscaleServePort: Option.none(), }, Option.none(), ).pipe( @@ -212,6 +224,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { desktopBootstrapToken: undefined, autoBootstrapProjectFromCwd: false, logWebSocketEvents: false, + tailscaleServeEnabled: false, + tailscaleServePort: 443, }); }), ); @@ -229,6 +243,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { noBrowser: true, autoBootstrapProjectFromCwd: false, logWebSocketEvents: true, + tailscaleServeEnabled: false, + tailscaleServePort: 443, otlpTracesUrl: "http://localhost:4318/v1/traces", otlpMetricsUrl: "http://localhost:4318/v1/metrics", }); @@ -246,6 +262,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { bootstrapFd: Option.none(), autoBootstrapProjectFromCwd: Option.none(), logWebSocketEvents: Option.none(), + tailscaleServeEnabled: Option.none(), + tailscaleServePort: Option.none(), }, Option.none(), ).pipe( @@ -281,6 +299,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { desktopBootstrapToken: undefined, autoBootstrapProjectFromCwd: false, logWebSocketEvents: true, + tailscaleServeEnabled: false, + tailscaleServePort: 443, }); assert.equal(join(baseDir, "dev"), resolved.stateDir); }), @@ -305,6 +325,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { bootstrapFd: Option.none(), autoBootstrapProjectFromCwd: Option.none(), logWebSocketEvents: Option.none(), + tailscaleServeEnabled: Option.none(), + tailscaleServePort: Option.none(), }, Option.none(), ).pipe( @@ -346,6 +368,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { noBrowser: false, autoBootstrapProjectFromCwd: false, logWebSocketEvents: false, + tailscaleServeEnabled: false, + tailscaleServePort: 443, }); const derivedPaths = yield* deriveServerPaths(baseDir, new URL("http://127.0.0.1:4173")); @@ -361,6 +385,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { bootstrapFd: Option.none(), autoBootstrapProjectFromCwd: Option.none(), logWebSocketEvents: Option.none(), + tailscaleServeEnabled: Option.none(), + tailscaleServePort: Option.none(), }, Option.some("Debug"), ).pipe( @@ -399,6 +425,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { desktopBootstrapToken: undefined, autoBootstrapProjectFromCwd: true, logWebSocketEvents: true, + tailscaleServeEnabled: false, + tailscaleServePort: 443, }); }), ); @@ -432,6 +460,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { bootstrapFd: Option.none(), autoBootstrapProjectFromCwd: Option.none(), logWebSocketEvents: Option.none(), + tailscaleServeEnabled: Option.none(), + tailscaleServePort: Option.none(), }, Option.none(), ).pipe( @@ -463,6 +493,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { desktopBootstrapToken: undefined, autoBootstrapProjectFromCwd: false, logWebSocketEvents: false, + tailscaleServeEnabled: false, + tailscaleServePort: 443, }); }), ); @@ -485,6 +517,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { bootstrapFd: Option.none(), autoBootstrapProjectFromCwd: Option.none(), logWebSocketEvents: Option.none(), + tailscaleServeEnabled: Option.none(), + tailscaleServePort: Option.none(), }, Option.none(), { @@ -522,6 +556,8 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { desktopBootstrapToken: undefined, autoBootstrapProjectFromCwd: false, logWebSocketEvents: false, + tailscaleServeEnabled: false, + tailscaleServePort: 443, }); }), ); diff --git a/apps/server/src/cli.test.ts b/apps/server/src/cli.test.ts index 7ebde01067a..3ef8c441959 100644 --- a/apps/server/src/cli.test.ts +++ b/apps/server/src/cli.test.ts @@ -76,6 +76,8 @@ const makeCliTestServerConfig = (baseDir: string) => desktopBootstrapToken: undefined, autoBootstrapProjectFromCwd: false, logWebSocketEvents: false, + tailscaleServeEnabled: false, + tailscaleServePort: 443, } satisfies ServerConfigShape; }); diff --git a/apps/server/src/cli.ts b/apps/server/src/cli.ts index 4fc23a1ded0..bcf8bee861c 100644 --- a/apps/server/src/cli.ts +++ b/apps/server/src/cli.ts @@ -78,6 +78,8 @@ const BootstrapEnvelopeSchema = Schema.Struct({ desktopBootstrapToken: Schema.optional(Schema.String), autoBootstrapProjectFromCwd: Schema.optional(Schema.Boolean), logWebSocketEvents: Schema.optional(Schema.Boolean), + tailscaleServeEnabled: Schema.optional(Schema.Boolean), + tailscaleServePort: Schema.optional(PortSchema), otlpTracesUrl: Schema.optional(Schema.String), otlpMetricsUrl: Schema.optional(Schema.String), }); @@ -126,6 +128,17 @@ const logWebSocketEventsFlag = Flag.boolean("log-websocket-events").pipe( Flag.withAlias("log-ws-events"), Flag.optional, ); +const tailscaleServeFlag = Flag.boolean("tailscale-serve").pipe( + Flag.withDescription( + "Configure Tailscale Serve to expose this backend over HTTPS on the Tailnet.", + ), + Flag.optional, +); +const tailscaleServePortFlag = Flag.integer("tailscale-serve-port").pipe( + Flag.withSchema(PortSchema), + Flag.withDescription("HTTPS port for Tailscale Serve when --tailscale-serve is enabled."), + Flag.optional, +); const EnvServerConfig = Config.all({ logLevel: Config.logLevel("T3CODE_LOG_LEVEL").pipe(Config.withDefault("Info")), @@ -174,6 +187,14 @@ const EnvServerConfig = Config.all({ Config.option, Config.map(Option.getOrUndefined), ), + tailscaleServeEnabled: Config.boolean("T3CODE_TAILSCALE_SERVE").pipe( + Config.option, + Config.map(Option.getOrUndefined), + ), + tailscaleServePort: Config.port("T3CODE_TAILSCALE_SERVE_PORT").pipe( + Config.option, + Config.map(Option.getOrUndefined), + ), }); interface CliServerFlags { @@ -187,6 +208,8 @@ interface CliServerFlags { readonly bootstrapFd: Option.Option; readonly autoBootstrapProjectFromCwd: Option.Option; readonly logWebSocketEvents: Option.Option; + readonly tailscaleServeEnabled: Option.Option; + readonly tailscaleServePort: Option.Option; } interface CliAuthLocationFlags { @@ -233,6 +256,8 @@ export const resolveServerConfig = ( bootstrapFd: flags.bootstrapFd ?? Option.none(), autoBootstrapProjectFromCwd: flags.autoBootstrapProjectFromCwd ?? Option.none(), logWebSocketEvents: flags.logWebSocketEvents ?? Option.none(), + tailscaleServeEnabled: flags.tailscaleServeEnabled ?? Option.none(), + tailscaleServePort: flags.tailscaleServePort ?? Option.none(), } satisfies CliServerFlags; const bootstrapFd = Option.getOrUndefined(normalizedFlags.bootstrapFd) ?? env.bootstrapFd; const bootstrapEnvelope = @@ -323,6 +348,22 @@ export const resolveServerConfig = ( ), () => Boolean(devUrl), ); + const tailscaleServeEnabled = Option.getOrElse( + resolveOptionPrecedence( + normalizedFlags.tailscaleServeEnabled, + Option.fromUndefinedOr(env.tailscaleServeEnabled), + Option.fromUndefinedOr(bootstrap?.tailscaleServeEnabled), + ), + () => false, + ); + const tailscaleServePort = Option.getOrElse( + resolveOptionPrecedence( + normalizedFlags.tailscaleServePort, + Option.fromUndefinedOr(env.tailscaleServePort), + Option.fromUndefinedOr(bootstrap?.tailscaleServePort), + ), + () => 443, + ); const staticDir = devUrl ? undefined : yield* resolveStaticDir(); const host = Option.getOrElse( resolveOptionPrecedence( @@ -365,6 +406,8 @@ export const resolveServerConfig = ( desktopBootstrapToken, autoBootstrapProjectFromCwd, logWebSocketEvents, + tailscaleServeEnabled, + tailscaleServePort, }; return config; @@ -386,6 +429,8 @@ const resolveCliAuthConfig = ( bootstrapFd: Option.none(), autoBootstrapProjectFromCwd: Option.none(), logWebSocketEvents: Option.none(), + tailscaleServeEnabled: Option.none(), + tailscaleServePort: Option.none(), }, cliLogLevel, ); @@ -766,6 +811,8 @@ const sharedServerCommandFlags = { bootstrapFd: bootstrapFdFlag, autoBootstrapProjectFromCwd: autoBootstrapProjectFromCwdFlag, logWebSocketEvents: logWebSocketEventsFlag, + tailscaleServeEnabled: tailscaleServeFlag, + tailscaleServePort: tailscaleServePortFlag, } as const; const authLocationFlags = sharedServerLocationFlags; diff --git a/apps/server/src/config.ts b/apps/server/src/config.ts index 7840c761151..4d8e1cb7e79 100644 --- a/apps/server/src/config.ts +++ b/apps/server/src/config.ts @@ -65,6 +65,8 @@ export interface ServerConfigShape extends ServerDerivedPaths { readonly desktopBootstrapToken: string | undefined; readonly autoBootstrapProjectFromCwd: boolean; readonly logWebSocketEvents: boolean; + readonly tailscaleServeEnabled: boolean; + readonly tailscaleServePort: number; } export const deriveServerPaths = Effect.fn(function* ( @@ -158,6 +160,8 @@ export class ServerConfig extends Context.Service { try { const session = await manager.startSession({ threadId: asThreadId("thread-1"), - provider: "geminiCli", + provider: ProviderDriverKind.make("geminiCli"), runtimeMode: "full-access", cwd: "/tmp", - modelSelection: { provider: "geminiCli", model: "gemini-2.5-pro" }, + modelSelection: { instanceId: ProviderInstanceId.make("geminiCli"), model: "gemini-2.5-pro" }, }); expect(session.provider).toBe("geminiCli"); @@ -116,7 +122,7 @@ describe("GeminiCliServerManager", () => { try { const session = await manager.startSession({ threadId: asThreadId("thread-1"), - provider: "geminiCli", + provider: ProviderDriverKind.make("geminiCli"), runtimeMode: "full-access", resumeCursor: { sessionId: "gemini-session-123", @@ -137,14 +143,14 @@ describe("GeminiCliServerManager", () => { try { await manager.startSession({ threadId: asThreadId("thread-1"), - provider: "geminiCli", + provider: ProviderDriverKind.make("geminiCli"), runtimeMode: "full-access", }); expect(() => manager.startSession({ threadId: asThreadId("thread-1"), - provider: "geminiCli", + provider: ProviderDriverKind.make("geminiCli"), runtimeMode: "full-access", }), ).toThrow("already exists"); @@ -161,7 +167,7 @@ describe("GeminiCliServerManager", () => { try { await manager.startSession({ threadId: asThreadId("thread-1"), - provider: "geminiCli", + provider: ProviderDriverKind.make("geminiCli"), runtimeMode: "full-access", }); @@ -189,7 +195,7 @@ describe("GeminiCliServerManager", () => { const manager = new GeminiCliServerManager(); await manager.startSession({ threadId: asThreadId("thread-1"), - provider: "geminiCli", + provider: ProviderDriverKind.make("geminiCli"), runtimeMode: "full-access", }); @@ -213,7 +219,7 @@ describe("GeminiCliServerManager", () => { const manager = new GeminiCliServerManager(); await manager.startSession({ threadId: asThreadId("thread-1"), - provider: "geminiCli", + provider: ProviderDriverKind.make("geminiCli"), runtimeMode: "full-access", }); @@ -237,7 +243,7 @@ describe("GeminiCliServerManager", () => { try { await manager.startSession({ threadId: asThreadId("thread-1"), - provider: "geminiCli", + provider: ProviderDriverKind.make("geminiCli"), runtimeMode: "full-access", }); @@ -259,7 +265,7 @@ describe("GeminiCliServerManager", () => { const manager = new GeminiCliServerManager(); await manager.startSession({ threadId: asThreadId("thread-1"), - provider: "geminiCli", + provider: ProviderDriverKind.make("geminiCli"), runtimeMode: "full-access", }); @@ -280,15 +286,15 @@ describe("GeminiCliServerManager", () => { try { await manager.startSession({ threadId: asThreadId("thread-1"), - provider: "geminiCli", + provider: ProviderDriverKind.make("geminiCli"), runtimeMode: "full-access", - modelSelection: { provider: "geminiCli", model: "gemini-3-flash" }, + modelSelection: { instanceId: ProviderInstanceId.make("geminiCli"), model: "gemini-3-flash" }, }); await manager.startSession({ threadId: asThreadId("thread-2"), - provider: "geminiCli", + provider: ProviderDriverKind.make("geminiCli"), runtimeMode: "full-access", - modelSelection: { provider: "geminiCli", model: "gemini-2.5-pro" }, + modelSelection: { instanceId: ProviderInstanceId.make("geminiCli"), model: "gemini-2.5-pro" }, }); const sessions = manager.listSessions(); @@ -309,7 +315,7 @@ describe("GeminiCliServerManager", () => { try { await manager.startSession({ threadId: asThreadId("thread-1"), - provider: "geminiCli", + provider: ProviderDriverKind.make("geminiCli"), runtimeMode: "full-access", }); @@ -333,7 +339,7 @@ describe("GeminiCliServerManager", () => { try { await manager.startSession({ threadId: asThreadId("thread-1"), - provider: "geminiCli", + provider: ProviderDriverKind.make("geminiCli"), runtimeMode: "full-access", }); @@ -371,9 +377,9 @@ describe("GeminiCliServerManager JSON event mapping", () => { await manager.startSession({ threadId: asThreadId("thread-json"), - provider: "geminiCli", + provider: ProviderDriverKind.make("geminiCli"), runtimeMode: "full-access", - modelSelection: { provider: "geminiCli", model: "gemini-2.5-pro" }, + modelSelection: { instanceId: ProviderInstanceId.make("geminiCli"), model: "gemini-2.5-pro" }, cwd: "/tmp", }); @@ -709,9 +715,9 @@ describe.skipIf(!hasGemini || process.env.RUN_GEMINI_LIVE_TESTS !== "1")( try { await manager.startSession({ threadId: asThreadId("live-thread"), - provider: "geminiCli", + provider: ProviderDriverKind.make("geminiCli"), runtimeMode: "full-access", - modelSelection: { provider: "geminiCli", model: "gemini-2.5-flash" }, + modelSelection: { instanceId: ProviderInstanceId.make("geminiCli"), model: "gemini-2.5-flash" }, }); const result = await manager.sendTurn({ diff --git a/apps/server/src/geminiCliServerManager.ts b/apps/server/src/geminiCliServerManager.ts index fb682a19ee3..5ab113f56d1 100644 --- a/apps/server/src/geminiCliServerManager.ts +++ b/apps/server/src/geminiCliServerManager.ts @@ -13,6 +13,7 @@ import readline from "node:readline"; import { ApprovalRequestId, EventId, + ProviderDriverKind, RuntimeItemId, ThreadId, TurnId, @@ -28,9 +29,9 @@ import type { ProviderSessionUsage, ProviderUsageResult } from "@t3tools/contrac import type { ProviderThreadSnapshot } from "./provider/Services/ProviderAdapter.ts"; import { resolveCommandPath } from "./commandPath.ts"; -const PROVIDER = "geminiCli" as const; +const PROVIDER = ProviderDriverKind.make("geminiCli"); -// ── Module-level usage tracking ────────────────────────────────────── +// ── Per-instance usage tracking ────────────────────────────────────── interface GeminiUsageAccumulator { inputTokens: number; @@ -40,29 +41,13 @@ interface GeminiUsageAccumulator { turnCount: number; } -let _geminiUsageAccumulator: GeminiUsageAccumulator = { - inputTokens: 0, - outputTokens: 0, - cachedTokens: 0, - totalTokens: 0, - turnCount: 0, -}; - -export function fetchGeminiCliUsage(): ProviderUsageResult { - const acc = _geminiUsageAccumulator; - let sessionUsage: ProviderSessionUsage | undefined; - if (acc.turnCount > 0) { - sessionUsage = { - inputTokens: acc.inputTokens, - outputTokens: acc.outputTokens, - ...(acc.cachedTokens > 0 ? { cachedTokens: acc.cachedTokens } : {}), - totalTokens: acc.totalTokens, - turnCount: acc.turnCount, - }; - } +function emptyUsageAccumulator(): GeminiUsageAccumulator { return { - provider: PROVIDER, - ...(sessionUsage ? { sessionUsage } : {}), + inputTokens: 0, + outputTokens: 0, + cachedTokens: 0, + totalTokens: 0, + turnCount: 0, }; } @@ -343,8 +328,37 @@ export class GeminiCliServerManager extends EventEmitter<{ event: [ProviderRuntimeEvent]; }> { private readonly sessions = new Map(); + private readonly usageAccumulator: GeminiUsageAccumulator = emptyUsageAccumulator(); binaryPath: string | undefined; + constructor(options: { readonly binaryPath?: string | undefined } = {}) { + super(); + this.binaryPath = options.binaryPath; + } + + /** + * Per-instance usage snapshot. Replaces the old module-level + * `fetchGeminiCliUsage()` so two managers running side-by-side don't share + * an accumulator. + */ + fetchUsage(): ProviderUsageResult { + const acc = this.usageAccumulator; + let sessionUsage: ProviderSessionUsage | undefined; + if (acc.turnCount > 0) { + sessionUsage = { + inputTokens: acc.inputTokens, + outputTokens: acc.outputTokens, + ...(acc.cachedTokens > 0 ? { cachedTokens: acc.cachedTokens } : {}), + totalTokens: acc.totalTokens, + turnCount: acc.turnCount, + }; + } + return { + provider: PROVIDER, + ...(sessionUsage ? { sessionUsage } : {}), + }; + } + startSession(input: ProviderSessionStartInput): Promise { const threadId = input.threadId; if (this.sessions.has(threadId)) { @@ -777,15 +791,15 @@ export class GeminiCliServerManager extends EventEmitter<{ // Accumulate session-level usage if (usage) { - _geminiUsageAccumulator.turnCount++; + this.usageAccumulator.turnCount++; if (typeof usage.input_tokens === "number") - _geminiUsageAccumulator.inputTokens += usage.input_tokens; + this.usageAccumulator.inputTokens += usage.input_tokens; if (typeof usage.output_tokens === "number") - _geminiUsageAccumulator.outputTokens += usage.output_tokens; + this.usageAccumulator.outputTokens += usage.output_tokens; if (typeof usage.cached_tokens === "number") - _geminiUsageAccumulator.cachedTokens += usage.cached_tokens; + this.usageAccumulator.cachedTokens += usage.cached_tokens; if (typeof usage.total_tokens === "number") - _geminiUsageAccumulator.totalTokens += usage.total_tokens; + this.usageAccumulator.totalTokens += usage.total_tokens; } // Support both `error_message` (legacy) and `error.message` (0.32+). diff --git a/apps/server/src/git/Layers/GitManager.test.ts b/apps/server/src/git/GitManager.test.ts similarity index 89% rename from apps/server/src/git/Layers/GitManager.test.ts rename to apps/server/src/git/GitManager.test.ts index dbbc821a088..844cbb47ae3 100644 --- a/apps/server/src/git/Layers/GitManager.test.ts +++ b/apps/server/src/git/GitManager.test.ts @@ -5,6 +5,7 @@ import { spawnSync } from "node:child_process"; import * as NodeServices from "@effect/platform-node/NodeServices"; import { it } from "@effect/vitest"; import { Effect, FileSystem, Layer, PlatformError, Scope } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; import { expect } from "vitest"; import type { GitActionProgressEvent, @@ -13,24 +14,27 @@ import type { ThreadId, } from "@t3tools/contracts"; -import { GitCommandError, GitHubCliError, TextGenerationError } from "@t3tools/contracts"; -import { type GitManagerShape } from "../Services/GitManager.ts"; +import { GitCommandError, TextGenerationError } from "@t3tools/contracts"; +import { type GitManagerShape } from "./GitManager.ts"; import { + GitHubCliError, type GitHubCliShape, type GitHubPullRequestSummary, GitHubCli, -} from "../Services/GitHubCli.ts"; -import { type TextGenerationShape, TextGeneration } from "../Services/TextGeneration.ts"; -import { GitCoreLive } from "./GitCore.ts"; -import { GitCore } from "../Services/GitCore.ts"; +} from "../sourceControl/GitHubCli.ts"; +import { type TextGenerationShape, TextGeneration } from "../textGeneration/TextGeneration.ts"; +import * as GitVcsDriver from "../vcs/GitVcsDriver.ts"; +import * as VcsProcess from "../vcs/VcsProcess.ts"; +import * as GitHubSourceControlProvider from "../sourceControl/GitHubSourceControlProvider.ts"; +import * as SourceControlProviderRegistry from "../sourceControl/SourceControlProviderRegistry.ts"; import { makeGitManager } from "./GitManager.ts"; -import { ServerConfig } from "../../config.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; +import { ServerConfig } from "../config.ts"; +import { ServerSettingsService } from "../serverSettings.ts"; import { ProjectSetupScriptRunner, type ProjectSetupScriptRunnerInput, type ProjectSetupScriptRunnerShape, -} from "../../project/Services/ProjectSetupScriptRunner.ts"; +} from "../project/Services/ProjectSetupScriptRunner.ts"; interface FakeGhScenario { prListSequence?: string[]; @@ -53,6 +57,16 @@ interface FakeGhScenario { failWith?: GitHubCliError; } +function fakeGhOutput(stdout: string): VcsProcess.VcsProcessOutput { + return { + exitCode: ChildProcessSpawner.ExitCode(0), + stdout, + stderr: "", + stdoutTruncated: false, + stderrTruncated: false, + }; +} + interface FakeGitTextGeneration { generateCommitMessage: (input: { cwd: string; @@ -209,18 +223,27 @@ function runGit( args: readonly string[], allowNonZeroExit = false, ): Effect.Effect< - { readonly code: number; readonly stdout: string; readonly stderr: string }, + { + readonly exitCode: GitVcsDriver.ExecuteGitResult["exitCode"]; + readonly stdout: string; + readonly stderr: string; + }, GitCommandError, - GitCore + GitVcsDriver.GitVcsDriver > { return Effect.gen(function* () { - const gitCore = yield* GitCore; - return yield* gitCore.execute({ + const git = yield* GitVcsDriver.GitVcsDriver; + const result = yield* git.execute({ operation: "GitManager.test.runGit", cwd, args, allowNonZeroExit, }); + return { + exitCode: result.exitCode, + stdout: result.stdout, + stderr: result.stderr, + }; }); } @@ -229,7 +252,7 @@ function initRepo( ): Effect.Effect< void, PlatformError.PlatformError | GitCommandError, - FileSystem.FileSystem | Scope.Scope | GitCore + FileSystem.FileSystem | Scope.Scope | GitVcsDriver.GitVcsDriver > { return Effect.gen(function* () { const fs = yield* FileSystem.FileSystem; @@ -245,7 +268,7 @@ function initRepo( function createBareRemote(): Effect.Effect< string, PlatformError.PlatformError | GitCommandError, - FileSystem.FileSystem | Scope.Scope | GitCore + FileSystem.FileSystem | Scope.Scope | GitVcsDriver.GitVcsDriver > { return Effect.gen(function* () { const remoteDir = yield* makeTempDir("t3code-git-remote-"); @@ -259,7 +282,7 @@ function configureRemote( remoteName: string, remotePath: string, fetchNamespace: string, -): Effect.Effect { +): Effect.Effect { return Effect.gen(function* () { yield* runGit(cwd, ["config", `remote.${remoteName}.url`, remotePath]); yield* runGit(cwd, [ @@ -379,24 +402,15 @@ function createGitHubCliWithFakeGh(scenario: FakeGhScenario = {}): { ? scenario.prListByHeadSelector?.[headSelector] : undefined; const stdout = (mappedQueue ?? mappedStdout ?? prListQueue.shift() ?? "[]") + "\n"; - return Effect.succeed({ - stdout, - stderr: "", - code: 0, - signal: null, - timedOut: false, - }); + return Effect.succeed(fakeGhOutput(stdout)); } if (args[0] === "pr" && args[1] === "create") { - return Effect.succeed({ - stdout: + return Effect.succeed( + fakeGhOutput( (scenario.createdPrUrl ?? "https://github.com/pingdotgg/codething-mvp/pull/101") + "\n", - stderr: "", - code: 0, - signal: null, - timedOut: false, - }); + ), + ); } if (args[0] === "pr" && args[1] === "view") { @@ -408,8 +422,8 @@ function createGitHubCliWithFakeGh(scenario: FakeGhScenario = {}): { headRefName: "feature/pull-request", state: "open", }; - return Effect.succeed({ - stdout: + return Effect.succeed( + fakeGhOutput( JSON.stringify({ ...pullRequest, ...(pullRequest.headRepositoryNameWithOwner @@ -427,11 +441,8 @@ function createGitHubCliWithFakeGh(scenario: FakeGhScenario = {}): { } : {}), }) + "\n", - stderr: "", - code: 0, - signal: null, - timedOut: false, - }); + ), + ); } if (args[0] === "pr" && args[1] === "checkout") { @@ -453,13 +464,7 @@ function createGitHubCliWithFakeGh(scenario: FakeGhScenario = {}): { runGitSyncForFakeGh(input.cwd, ["checkout", "-b", headBranch]); } } - return { - stdout: "", - stderr: "", - code: 0, - signal: null, - timedOut: false, - }; + return fakeGhOutput(""); }, catch: (error) => isGitHubCliError(error) @@ -486,26 +491,17 @@ function createGitHubCliWithFakeGh(scenario: FakeGhScenario = {}): { }), ); } - return Effect.succeed({ - stdout: + return Effect.succeed( + fakeGhOutput( JSON.stringify({ nameWithOwner: repository, url: cloneUrls.url, sshUrl: cloneUrls.sshUrl, }) + "\n", - stderr: "", - code: 0, - signal: null, - timedOut: false, - }); + ), + ); } - return Effect.succeed({ - stdout: `${scenario.defaultBranch ?? "main"}\n`, - stderr: "", - code: 0, - signal: null, - timedOut: false, - }); + return Effect.succeed(fakeGhOutput(`${scenario.defaultBranch ?? "main"}\n`)); } return Effect.fail( @@ -584,6 +580,13 @@ function createGitHubCliWithFakeGh(scenario: FakeGhScenario = {}): { cwd: input.cwd, args: ["repo", "view", input.repository, "--json", "nameWithOwner,url,sshUrl"], }).pipe(Effect.map((result) => JSON.parse(result.stdout))), + createRepository: (input) => + Effect.fail( + new GitHubCliError({ + operation: "createRepository", + detail: `Unexpected repository create: ${input.repository}`, + }), + ), checkoutPullRequest: (input) => execute({ cwd: input.cwd, @@ -639,13 +642,27 @@ function makeManager(input?: { const serverSettingsLayer = ServerSettingsService.layerTest(); - const gitCoreLayer = GitCoreLive.pipe( + const vcsDriverLayer = GitVcsDriver.layer.pipe( + Layer.provideMerge(VcsProcess.layer), Layer.provideMerge(NodeServices.layer), Layer.provideMerge(ServerConfigLayer), ); + const sourceControlRegistryLayer = Layer.effect( + SourceControlProviderRegistry.SourceControlProviderRegistry, + GitHubSourceControlProvider.make().pipe( + Effect.map((provider) => + SourceControlProviderRegistry.SourceControlProviderRegistry.of({ + get: () => Effect.succeed(provider), + resolveHandle: () => Effect.succeed({ provider, context: null }), + resolve: () => Effect.succeed(provider), + discover: Effect.succeed([]), + }), + ), + Effect.provide(Layer.succeed(GitHubCli, gitHubCli)), + ), + ); const managerLayer = Layer.mergeAll( - Layer.succeed(GitHubCli, gitHubCli), Layer.succeed(TextGeneration, textGeneration), Layer.succeed( ProjectSetupScriptRunner, @@ -653,9 +670,9 @@ function makeManager(input?: { runForThread: () => Effect.succeed({ status: "no-script" as const }), }, ), - gitCoreLayer, + vcsDriverLayer, serverSettingsLayer, - ).pipe(Layer.provideMerge(NodeServices.layer)); + ).pipe(Layer.provideMerge(sourceControlRegistryLayer), Layer.provideMerge(NodeServices.layer)); return makeGitManager().pipe( Effect.provide(managerLayer), @@ -665,8 +682,9 @@ function makeManager(input?: { const asThreadId = (threadId: string) => threadId as ThreadId; -const GitManagerTestLayer = GitCoreLive.pipe( +const GitManagerTestLayer = GitVcsDriver.layer.pipe( Layer.provide(ServerConfig.layerTest(process.cwd(), { prefix: "t3-git-manager-test-" })), + Layer.provideMerge(VcsProcess.layer), Layer.provideMerge(NodeServices.layer), ); @@ -698,15 +716,15 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { const status = yield* manager.status({ cwd: repoDir }); expect(status.isRepo).toBe(true); - expect(status.hasOriginRemote).toBe(true); - expect(status.isDefaultBranch).toBe(false); - expect(status.branch).toBe("feature/status-open-pr"); + expect(status.hasPrimaryRemote).toBe(true); + expect(status.isDefaultRef).toBe(false); + expect(status.refName).toBe("feature/status-open-pr"); expect(status.pr).toEqual({ number: 13, title: "Existing PR", url: "https://github.com/pingdotgg/codething-mvp/pull/13", - baseBranch: "main", - headBranch: "feature/status-open-pr", + baseRef: "main", + headRef: "feature/status-open-pr", state: "open", }); }), @@ -743,8 +761,8 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { number: 14, title: "Existing PR title", url: "https://github.com/pingdotgg/codething-mvp/pull/14", - baseBranch: "main", - headBranch: "feature/status-trimmed-pr", + baseRef: "main", + headRef: "feature/status-trimmed-pr", state: "open", }); }), @@ -794,8 +812,8 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { number: 15, title: "Valid PR title", url: "https://github.com/pingdotgg/codething-mvp/pull/15", - baseBranch: "main", - headBranch: "feature/status-valid-pr-entry", + baseRef: "main", + headRef: "feature/status-valid-pr-entry", state: "open", }); }), @@ -843,8 +861,8 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { number: 17, title: "Merged PR", url: "https://github.com/pingdotgg/codething-mvp/pull/17", - baseBranch: "main", - headBranch: "feature/status-lowercase-state", + baseRef: "main", + headRef: "feature/status-lowercase-state", state: "merged", }); }), @@ -859,9 +877,9 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { expect(status).toEqual({ isRepo: false, - hasOriginRemote: false, - isDefaultBranch: false, - branch: null, + hasPrimaryRemote: false, + isDefaultRef: false, + refName: null, hasWorkingTreeChanges: false, workingTree: { files: [], @@ -871,6 +889,7 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { hasUpstream: false, aheadCount: 0, behindCount: 0, + aheadOfDefaultCount: 0, pr: null, }); }), @@ -888,9 +907,9 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { expect(status).toEqual({ isRepo: false, - hasOriginRemote: false, - isDefaultBranch: false, - branch: null, + hasPrimaryRemote: false, + isDefaultRef: false, + refName: null, hasWorkingTreeChanges: false, workingTree: { files: [], @@ -900,6 +919,7 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { hasUpstream: false, aheadCount: 0, behindCount: 0, + aheadOfDefaultCount: 0, pr: null, }); }), @@ -972,7 +992,7 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { }); const status = yield* manager.status({ cwd: repoDir }); - expect(status.branch).toBe("main"); + expect(status.refName).toBe("main"); expect(status.pr).toBeNull(); }), ); @@ -1026,13 +1046,13 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { }); const status = yield* manager.status({ cwd: repoDir }); - expect(status.branch).toBe("t3code/pr-488/statemachine"); + expect(status.refName).toBe("t3code/pr-488/statemachine"); expect(status.pr).toEqual({ number: 488, title: "Rebase this PR on latest main", url: "https://github.com/pingdotgg/codething-mvp/pull/488", - baseBranch: "main", - headBranch: "statemachine", + baseRef: "main", + headRef: "statemachine", state: "open", }); expect(ghCalls).toContain( @@ -1127,13 +1147,13 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { }); const status = yield* manager.status({ cwd: repoDir }); - expect(status.branch).toBe("upstream/effect-atom"); + expect(status.refName).toBe("upstream/effect-atom"); expect(status.pr).toEqual({ number: 1618, title: "Correct PR", url: "https://github.com/pingdotgg/t3code/pull/1618", - baseBranch: "main", - headBranch: "effect-atom", + baseRef: "main", + headRef: "effect-atom", state: "open", }); expect(ghCalls.some((call) => call.includes("pr list --head upstream/effect-atom "))).toBe( @@ -1177,18 +1197,48 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { }); const status = yield* manager.status({ cwd: repoDir }); - expect(status.branch).toBe("feature/status-merged-pr"); + expect(status.refName).toBe("feature/status-merged-pr"); expect(status.pr).toEqual({ number: 22, title: "Merged PR", url: "https://github.com/pingdotgg/codething-mvp/pull/22", - baseBranch: "main", - headBranch: "feature/status-merged-pr", + baseRef: "main", + headRef: "feature/status-merged-pr", state: "merged", }); }), ); + it.effect("status hides merged PRs on the default branch", () => + Effect.gen(function* () { + const repoDir = yield* makeTempDir("t3code-git-manager-"); + yield* initRepo(repoDir); + + const { manager } = yield* makeManager({ + ghScenario: { + prListSequence: [ + JSON.stringify([ + { + number: 23, + title: "Merged PR", + url: "https://github.com/pingdotgg/codething-mvp/pull/23", + baseRefName: "feature/status-default-branch-target", + headRefName: "main", + state: "MERGED", + mergedAt: "2026-01-30T10:00:00Z", + updatedAt: "2026-01-30T10:00:00Z", + }, + ]), + ], + }, + }); + + const status = yield* manager.status({ cwd: repoDir }); + expect(status.refName).toBe("main"); + expect(status.pr).toBeNull(); + }), + ); + it.effect("status prefers open PR when merged PR has newer updatedAt", () => Effect.gen(function* () { const repoDir = yield* makeTempDir("t3code-git-manager-"); @@ -1224,13 +1274,13 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { }); const status = yield* manager.status({ cwd: repoDir }); - expect(status.branch).toBe("feature/status-open-over-merged"); + expect(status.refName).toBe("feature/status-open-over-merged"); expect(status.pr).toEqual({ number: 46, title: "Open PR", url: "https://github.com/pingdotgg/codething-mvp/pull/46", - baseBranch: "main", - headBranch: "feature/status-open-over-merged", + baseRef: "main", + headRef: "feature/status-open-over-merged", state: "open", }); }), @@ -1255,7 +1305,7 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { }); const status = yield* manager.status({ cwd: repoDir }); - expect(status.branch).toBe("feature/status-no-gh"); + expect(status.refName).toBe("feature/status-no-gh"); expect(status.pr).toBeNull(); }), ); @@ -1633,6 +1683,41 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { }), ); + it.effect("pushes existing commits without committing dirty worktree changes", () => + Effect.gen(function* () { + const repoDir = yield* makeTempDir("t3code-git-manager-"); + yield* initRepo(repoDir); + yield* runGit(repoDir, ["checkout", "-b", "feature/push-dirty"]); + const remoteDir = yield* createBareRemote(); + yield* runGit(repoDir, ["remote", "add", "origin", remoteDir]); + fs.writeFileSync(path.join(repoDir, "push-dirty.txt"), "push dirty\n"); + yield* runGit(repoDir, ["add", "push-dirty.txt"]); + yield* runGit(repoDir, ["commit", "-m", "Push dirty branch"]); + fs.mkdirSync(path.join(repoDir, ".vercel")); + fs.writeFileSync(path.join(repoDir, ".vercel", "project.json"), "{}\n"); + + const { manager } = yield* makeManager(); + const result = yield* runStackedAction(manager, { + cwd: repoDir, + action: "push", + }); + + expect(result.commit.status).toBe("skipped_not_requested"); + expect(result.push.status).toBe("pushed"); + expect(result.pr.status).toBe("skipped_not_requested"); + expect( + yield* runGit(repoDir, ["status", "--porcelain"]).pipe( + Effect.map((output) => output.stdout.trim()), + ), + ).toContain("?? .vercel/"); + expect( + yield* runGit(remoteDir, ["log", "-1", "--pretty=%s", "feature/push-dirty"]).pipe( + Effect.map((output) => output.stdout.trim()), + ), + ).toBe("Push dirty branch"); + }), + ); + it.effect("create_pr pushes a clean branch before creating the PR when needed", () => Effect.gen(function* () { const repoDir = yield* makeTempDir("t3code-git-manager-"); @@ -1679,6 +1764,49 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { }), ); + it.effect("create_pr falls back to main when source control provider detection fails", () => + Effect.gen(function* () { + const repoDir = yield* makeTempDir("t3code-git-manager-"); + yield* initRepo(repoDir); + yield* runGit(repoDir, ["checkout", "-b", "feature/provider-fallback"]); + fs.writeFileSync(path.join(repoDir, "provider-fallback.txt"), "fallback\n"); + yield* runGit(repoDir, ["add", "provider-fallback.txt"]); + yield* runGit(repoDir, ["commit", "-m", "Provider fallback"]); + const remoteDir = yield* createBareRemote(); + yield* runGit(repoDir, ["remote", "add", "origin", remoteDir]); + + const { manager, ghCalls } = yield* makeManager({ + ghScenario: { + prListSequence: [ + "[]", + JSON.stringify([ + { + number: 404, + title: "Provider fallback", + url: "https://github.com/pingdotgg/codething-mvp/pull/404", + baseRefName: "main", + headRefName: "feature/provider-fallback", + }, + ]), + ], + }, + }); + + const result = yield* runStackedAction(manager, { + cwd: repoDir, + action: "create_pr", + }); + + expect(result.pr.status).toBe("created"); + expect(result.pr.number).toBe(404); + expect( + ghCalls.some((call) => + call.includes("pr create --base main --head feature/provider-fallback"), + ), + ).toBe(true); + }), + ); + it.effect("returns existing PR metadata for commit/push/pr action", () => Effect.gen(function* () { const repoDir = yield* makeTempDir("t3code-git-manager-"); @@ -2340,6 +2468,113 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { }), ); + it.effect( + "restores same-repository upstream tracking after local PR checkout without a remote ref", + () => + Effect.gen(function* () { + const repoDir = yield* makeTempDir("t3code-git-manager-"); + yield* initRepo(repoDir); + const remoteDir = yield* createBareRemote(); + yield* runGit(repoDir, ["remote", "add", "origin", remoteDir]); + yield* runGit(repoDir, ["push", "-u", "origin", "main"]); + yield* runGit(repoDir, ["checkout", "-b", "feature/pr-local-upstream"]); + fs.writeFileSync(path.join(repoDir, "upstream.txt"), "upstream\n"); + yield* runGit(repoDir, ["add", "upstream.txt"]); + yield* runGit(repoDir, ["commit", "-m", "Local upstream PR branch"]); + yield* runGit(repoDir, ["push", "-u", "origin", "feature/pr-local-upstream"]); + yield* runGit(repoDir, ["checkout", "main"]); + yield* runGit(repoDir, ["branch", "-D", "feature/pr-local-upstream"]); + yield* runGit(repoDir, [ + "update-ref", + "-d", + "refs/remotes/origin/feature/pr-local-upstream", + ]); + + const { manager } = yield* makeManager({ + ghScenario: { + pullRequest: { + number: 65, + title: "Local upstream PR", + url: "https://github.com/pingdotgg/codething-mvp/pull/65", + baseRefName: "main", + headRefName: "feature/pr-local-upstream", + state: "open", + isCrossRepository: false, + headRepositoryNameWithOwner: "pingdotgg/codething-mvp", + headRepositoryOwnerLogin: "pingdotgg", + }, + repositoryCloneUrls: { + "pingdotgg/codething-mvp": { + url: remoteDir, + sshUrl: remoteDir, + }, + }, + }, + }); + + const result = yield* preparePullRequestThread(manager, { + cwd: repoDir, + reference: "65", + mode: "local", + }); + + expect(result.worktreePath).toBeNull(); + expect(result.branch).toBe("feature/pr-local-upstream"); + expect( + (yield* runGit(repoDir, ["rev-parse", "--abbrev-ref", "@{upstream}"])).stdout.trim(), + ).toBe("origin/feature/pr-local-upstream"); + }), + ); + + it.effect( + "restores same-repository upstream tracking when provider omits head repository metadata", + () => + Effect.gen(function* () { + const repoDir = yield* makeTempDir("t3code-git-manager-"); + yield* initRepo(repoDir); + const remoteDir = yield* createBareRemote(); + yield* runGit(repoDir, ["remote", "add", "origin", remoteDir]); + yield* runGit(repoDir, ["push", "-u", "origin", "main"]); + yield* runGit(repoDir, ["checkout", "-b", "feature/pr-local-no-head-repo"]); + fs.writeFileSync(path.join(repoDir, "no-head-repo.txt"), "upstream\n"); + yield* runGit(repoDir, ["add", "no-head-repo.txt"]); + yield* runGit(repoDir, ["commit", "-m", "Local PR branch without repo metadata"]); + yield* runGit(repoDir, ["push", "-u", "origin", "feature/pr-local-no-head-repo"]); + yield* runGit(repoDir, ["checkout", "main"]); + yield* runGit(repoDir, ["branch", "-D", "feature/pr-local-no-head-repo"]); + yield* runGit(repoDir, [ + "update-ref", + "-d", + "refs/remotes/origin/feature/pr-local-no-head-repo", + ]); + + const { manager } = yield* makeManager({ + ghScenario: { + pullRequest: { + number: 66, + title: "Local upstream PR without repo metadata", + url: "https://github.com/pingdotgg/codething-mvp/pull/66", + baseRefName: "main", + headRefName: "feature/pr-local-no-head-repo", + state: "open", + }, + }, + }); + + const result = yield* preparePullRequestThread(manager, { + cwd: repoDir, + reference: "66", + mode: "local", + }); + + expect(result.worktreePath).toBeNull(); + expect(result.branch).toBe("feature/pr-local-no-head-repo"); + expect( + (yield* runGit(repoDir, ["rev-parse", "--abbrev-ref", "@{upstream}"])).stdout.trim(), + ).toBe("origin/feature/pr-local-no-head-repo"); + }), + ); + it.effect("prepares pull request threads in worktree mode on the PR head branch", () => Effect.gen(function* () { const repoDir = yield* makeTempDir("t3code-git-manager-"); @@ -3094,7 +3329,7 @@ it.layer(GitManagerTestLayer)("GitManager", (it) => { expect.objectContaining({ kind: "phase_started", phase: "pr", - label: "Creating GitHub pull request...", + label: "Creating pull request...", }), ]); }), diff --git a/apps/server/src/git/Layers/GitManager.ts b/apps/server/src/git/GitManager.ts similarity index 84% rename from apps/server/src/git/Layers/GitManager.ts rename to apps/server/src/git/GitManager.ts index dadf2f7e79b..b77ee5169b1 100644 --- a/apps/server/src/git/Layers/GitManager.ts +++ b/apps/server/src/git/GitManager.ts @@ -1,55 +1,96 @@ import { randomUUID } from "node:crypto"; -import { realpathSync } from "node:fs"; import { + Array as Arr, Cache, + Context, + DateTime, Duration, Effect, Exit, FileSystem, Layer, Option, + Order, Path, Ref, - Result, } from "effect"; import { GitActionProgressEvent, GitActionProgressPhase, GitCommandError, + GitPreparePullRequestThreadInput, + GitPreparePullRequestThreadResult, + GitPullRequestRefInput, + GitResolvePullRequestResult, + GitRunStackedActionInput, GitRunStackedActionResult, GitStackedAction, - type GitStatusLocalResult, - type GitStatusRemoteResult, + VcsStatusInput, + type VcsStatusLocalResult, + type VcsStatusRemoteResult, + VcsStatusResult, ModelSelection, } from "@t3tools/contracts"; import { - detectGitHostingProviderFromRemoteUrl, + detectSourceControlProviderFromGitRemoteUrl, mergeGitStatusParts, resolveAutoFeatureBranchName, sanitizeBranchFragment, sanitizeFeatureBranchName, } from "@t3tools/shared/git"; +import { + getChangeRequestTerminologyForKind, + type ChangeRequestTerminology, +} from "@t3tools/shared/sourceControl"; import { GitManagerError } from "@t3tools/contracts"; -import { - GitManager, - type GitActionProgressReporter, - type GitManagerShape, - type GitRunStackedActionOptions, -} from "../Services/GitManager.ts"; -import { GitCore } from "../Services/GitCore.ts"; -import type { GitStatusDetails } from "../Services/GitCore.ts"; -import { GitHubCli, type GitHubPullRequestSummary } from "../Services/GitHubCli.ts"; -import { TextGeneration } from "../Services/TextGeneration.ts"; -import { ProjectSetupScriptRunner } from "../../project/Services/ProjectSetupScriptRunner.ts"; -import { extractBranchNameFromRemoteRef } from "../remoteRefs.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; +import { TextGeneration } from "../textGeneration/TextGeneration.ts"; +import { ProjectSetupScriptRunner } from "../project/Services/ProjectSetupScriptRunner.ts"; +import { extractBranchNameFromRemoteRef } from "./remoteRefs.ts"; +import { ServerSettingsService } from "../serverSettings.ts"; import type { GitManagerServiceError } from "@t3tools/contracts"; -import { - decodeGitHubPullRequestListJson, - formatGitHubJsonDecodeError, -} from "../githubPullRequests.ts"; +import { GitVcsDriver, type GitStatusDetails } from "../vcs/GitVcsDriver.ts"; +import { SourceControlProviderRegistry } from "../sourceControl/SourceControlProviderRegistry.ts"; +import type { ChangeRequest } from "@t3tools/contracts"; + +export interface GitActionProgressReporter { + readonly publish: (event: GitActionProgressEvent) => Effect.Effect; +} + +export interface GitRunStackedActionOptions { + readonly actionId?: string; + readonly progressReporter?: GitActionProgressReporter; +} + +export interface GitManagerShape { + readonly status: ( + input: VcsStatusInput, + ) => Effect.Effect; + readonly localStatus: ( + input: VcsStatusInput, + ) => Effect.Effect; + readonly remoteStatus: ( + input: VcsStatusInput, + ) => Effect.Effect; + readonly invalidateLocalStatus: (cwd: string) => Effect.Effect; + readonly invalidateRemoteStatus: (cwd: string) => Effect.Effect; + readonly invalidateStatus: (cwd: string) => Effect.Effect; + readonly resolvePullRequest: ( + input: GitPullRequestRefInput, + ) => Effect.Effect; + readonly preparePullRequestThread: ( + input: GitPreparePullRequestThreadInput, + ) => Effect.Effect; + readonly runStackedAction: ( + input: GitRunStackedActionInput, + options?: GitRunStackedActionOptions, + ) => Effect.Effect; +} + +export class GitManager extends Context.Service()( + "t3/git/GitManager", +) {} const COMMIT_TIMEOUT_MS = 10 * 60_000; const MAX_PROGRESS_TEXT_LENGTH = 500; @@ -75,9 +116,14 @@ interface OpenPrInfo { interface PullRequestInfo extends OpenPrInfo, PullRequestHeadRemoteInfo { state: "open" | "closed" | "merged"; - updatedAt: string | null; + updatedAt: Option.Option; } +const pullRequestUpdatedAtDescOrder: Order.Order = Order.mapInput( + Order.flip(Option.makeOrder(DateTime.Order)), + (pullRequest) => pullRequest.updatedAt, +); + interface ResolvedPullRequest { number: number; title: string; @@ -88,9 +134,9 @@ interface ResolvedPullRequest { } interface PullRequestHeadRemoteInfo { - isCrossRepository?: boolean; - headRepositoryNameWithOwner?: string | null; - headRepositoryOwnerLogin?: string | null; + isCrossRepository?: boolean | undefined; + headRepositoryNameWithOwner?: string | null | undefined; + headRepositoryOwnerLogin?: string | null | undefined; } interface BranchHeadContext { @@ -256,7 +302,7 @@ function matchesBranchHeadContext( return true; } -function toPullRequestInfo(summary: GitHubPullRequestSummary): PullRequestInfo { +function toPullRequestInfo(summary: ChangeRequest): PullRequestInfo { return { number: summary.number, title: summary.title, @@ -264,7 +310,7 @@ function toPullRequestInfo(summary: GitHubPullRequestSummary): PullRequestInfo { baseRefName: summary.baseRefName, headRefName: summary.headRefName, state: summary.state ?? "open", - updatedAt: null, + updatedAt: summary.updatedAt, ...(summary.isCrossRepository !== undefined ? { isCrossRepository: summary.isCrossRepository } : {}), @@ -311,13 +357,14 @@ function withDescription(title: string, description: string | undefined) { function summarizeGitActionResult( result: Pick, + terms: ChangeRequestTerminology, ): { title: string; description?: string; } { if (result.pr.status === "created" || result.pr.status === "opened_existing") { const prNumber = result.pr.number ? ` #${result.pr.number}` : ""; - const title = `${result.pr.status === "created" ? "Created PR" : "Opened PR"}${prNumber}`; + const title = `${result.pr.status === "created" ? "Created" : "Opened"} ${terms.shortLabel}${prNumber}`; return withDescription(title, truncateText(result.pr.title)); } @@ -422,16 +469,16 @@ function toStatusPr(pr: PullRequestInfo): { number: number; title: string; url: string; - baseBranch: string; - headBranch: string; + baseRef: string; + headRef: string; state: "open" | "closed" | "merged"; } { return { number: pr.number, title: pr.title, url: pr.url, - baseBranch: pr.baseRefName, - headBranch: pr.headRefName, + baseRef: pr.baseRefName, + headRef: pr.headRefName, state: pr.state, }; } @@ -442,14 +489,6 @@ function normalizePullRequestReference(reference: string): string { return hashNumber?.[1] ?? trimmed; } -function canonicalizeExistingPath(value: string): string { - try { - return realpathSync.native(value); - } catch { - return value; - } -} - function toResolvedPullRequest(pr: { number: number; title: string; @@ -475,9 +514,9 @@ function shouldPreferSshRemote(url: string | null): boolean { } function toPullRequestHeadRemoteInfo(pr: { - isCrossRepository?: boolean; - headRepositoryNameWithOwner?: string | null; - headRepositoryOwnerLogin?: string | null; + isCrossRepository?: boolean | undefined; + headRepositoryNameWithOwner?: string | null | undefined; + headRepositoryOwnerLogin?: string | null | undefined; }): PullRequestHeadRemoteInfo { return { ...(pr.isCrossRepository !== undefined ? { isCrossRepository: pr.isCrossRepository } : {}), @@ -491,10 +530,12 @@ function toPullRequestHeadRemoteInfo(pr: { } export const makeGitManager = Effect.fn("makeGitManager")(function* () { - const gitCore = yield* GitCore; - const gitHubCli = yield* GitHubCli; + const gitCore = yield* GitVcsDriver; + const sourceControlProviders = yield* SourceControlProviderRegistry; const textGeneration = yield* TextGeneration; const projectSetupScriptRunner = yield* ProjectSetupScriptRunner; + + const sourceControlProvider = (cwd: string) => sourceControlProviders.resolve({ cwd }); const serverSettingsService = yield* ServerSettingsService; const createProgressEmitter = ( @@ -527,11 +568,27 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { localBranch = pullRequest.headBranch, ) { const repositoryNameWithOwner = resolveHeadRepositoryNameWithOwner(pullRequest) ?? ""; + if (repositoryNameWithOwner.length === 0 && pullRequest.isCrossRepository !== true) { + const remoteName = yield* gitCore.resolvePrimaryRemoteName(cwd); + yield* gitCore.fetchRemoteTrackingBranch({ + cwd, + remoteName, + remoteBranch: pullRequest.headBranch, + }); + yield* gitCore.setBranchUpstream({ + cwd, + branch: localBranch, + remoteName, + remoteBranch: pullRequest.headBranch, + }); + return; + } + if (repositoryNameWithOwner.length === 0) { return; } - const cloneUrls = yield* gitHubCli.getRepositoryCloneUrls({ + const cloneUrls = yield* (yield* sourceControlProvider(cwd)).getRepositoryCloneUrls({ cwd, repository: repositoryNameWithOwner, }); @@ -547,6 +604,11 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { url: remoteUrl, }); + yield* gitCore.fetchRemoteTrackingBranch({ + cwd, + remoteName, + remoteBranch: pullRequest.headBranch, + }); yield* gitCore.setBranchUpstream({ cwd, branch: localBranch, @@ -586,7 +648,7 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { return; } - const cloneUrls = yield* gitHubCli.getRepositoryCloneUrls({ + const cloneUrls = yield* (yield* sourceControlProvider(cwd)).getRepositoryCloneUrls({ cwd, repository: repositoryNameWithOwner, }); @@ -635,7 +697,9 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { const path = yield* Path.Path; const tempDir = process.env.TMPDIR ?? process.env.TEMP ?? process.env.TMP ?? "/tmp"; - const normalizeStatusCacheKey = (cwd: string) => canonicalizeExistingPath(cwd); + const canonicalizeExistingPath = (value: string) => + fileSystem.realPath(value).pipe(Effect.catch(() => Effect.succeed(value))); + const normalizeStatusCacheKey = canonicalizeExistingPath; const nonRepositoryStatusDetails = { isRepo: false, hasOriginRemote: false, @@ -647,6 +711,7 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { hasUpstream: false, aheadCount: 0, behindCount: 0, + aheadOfDefaultCount: 0, } satisfies GitStatusDetails; const readLocalStatus = Effect.fn("readLocalStatus")(function* (cwd: string) { const details = yield* gitCore @@ -660,20 +725,22 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { return { isRepo: details.isRepo, - ...(hostingProvider ? { hostingProvider } : {}), - hasOriginRemote: details.hasOriginRemote, - isDefaultBranch: details.isDefaultBranch, - branch: details.branch, + ...(hostingProvider ? { sourceControlProvider: hostingProvider } : {}), + hasPrimaryRemote: details.hasOriginRemote, + isDefaultRef: details.isDefaultBranch, + refName: details.branch, hasWorkingTreeChanges: details.hasWorkingTreeChanges, workingTree: details.workingTree, - } satisfies GitStatusLocalResult; + } satisfies VcsStatusLocalResult; }); const localStatusResultCache = yield* Cache.makeWith(readLocalStatus, { capacity: STATUS_RESULT_CACHE_CAPACITY, timeToLive: (exit) => (Exit.isSuccess(exit) ? STATUS_RESULT_CACHE_TTL : Duration.zero), }); const invalidateLocalStatusResultCache = (cwd: string) => - Cache.invalidate(localStatusResultCache, normalizeStatusCacheKey(cwd)); + normalizeStatusCacheKey(cwd).pipe( + Effect.flatMap((cacheKey) => Cache.invalidate(localStatusResultCache, cacheKey)), + ); const readRemoteStatus = Effect.fn("readRemoteStatus")(function* (cwd: string) { const details = yield* gitCore .statusDetails(cwd) @@ -688,7 +755,13 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { branch: details.branch, upstreamRef: details.upstreamRef, }).pipe( - Effect.map((latest) => (latest ? toStatusPr(latest) : null)), + Effect.map((latest) => { + if (!latest) return null; + // On the default branch, only surface open PRs. + // Merged/closed matches are usually reverse-merge history, not the thread's PR context. + if (details.isDefaultBranch && latest.state !== "open") return null; + return toStatusPr(latest); + }), Effect.catch(() => Effect.succeed(null)), ) : null; @@ -697,15 +770,18 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { hasUpstream: details.hasUpstream, aheadCount: details.aheadCount, behindCount: details.behindCount, + aheadOfDefaultCount: details.aheadOfDefaultCount, pr, - } satisfies GitStatusRemoteResult; + } satisfies VcsStatusRemoteResult; }); const remoteStatusResultCache = yield* Cache.makeWith(readRemoteStatus, { capacity: STATUS_RESULT_CACHE_CAPACITY, timeToLive: (exit) => (Exit.isSuccess(exit) ? STATUS_RESULT_CACHE_TTL : Duration.zero), }); const invalidateRemoteStatusResultCache = (cwd: string) => - Cache.invalidate(remoteStatusResultCache, normalizeStatusCacheKey(cwd)); + normalizeStatusCacheKey(cwd).pipe( + Effect.flatMap((cacheKey) => Cache.invalidate(remoteStatusResultCache, cacheKey)), + ); const readConfigValueNullable = (cwd: string, key: string) => gitCore.readConfigValue(cwd, key).pipe(Effect.catch(() => Effect.succeed(null))); @@ -722,7 +798,7 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { (yield* readConfigValueNullable(cwd, `remote.${preferredRemoteName}.url`)) ?? (yield* readConfigValueNullable(cwd, "remote.origin.url")); - return remoteUrl ? detectGitHostingProviderFromRemoteUrl(remoteUrl) : null; + return remoteUrl ? detectSourceControlProviderFromGitRemoteUrl(remoteUrl) : null; }); const resolveRemoteRepositoryContext = Effect.fn("resolveRemoteRepositoryContext")(function* ( @@ -827,9 +903,10 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { >, ) { for (const headSelector of headContext.headSelectors) { - const pullRequests = yield* gitHubCli.listOpenPullRequests({ + const pullRequests = yield* (yield* sourceControlProvider(cwd)).listChangeRequests({ cwd, headSelector, + state: "open", limit: 1, }); const normalizedPullRequests = pullRequests.map(toPullRequestInfo); @@ -845,7 +922,7 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { baseRefName: firstPullRequest.baseRefName, headRefName: firstPullRequest.headRefName, state: "open", - updatedAt: null, + updatedAt: Option.none(), } satisfies PullRequestInfo; } } @@ -861,46 +938,14 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { const parsedByNumber = new Map(); for (const headSelector of headContext.headSelectors) { - const stdout = yield* gitHubCli - .execute({ - cwd, - args: [ - "pr", - "list", - "--head", - headSelector, - "--state", - "all", - "--limit", - "20", - "--json", - "number,title,url,baseRefName,headRefName,state,mergedAt,updatedAt,isCrossRepository,headRepository,headRepositoryOwner", - ], - }) - .pipe(Effect.map((result) => result.stdout)); - - const raw = stdout.trim(); - if (raw.length === 0) { - continue; - } - - const pullRequests = yield* Effect.sync(() => decodeGitHubPullRequestListJson(raw)).pipe( - Effect.flatMap((decoded) => { - if (!Result.isSuccess(decoded)) { - return Effect.fail( - gitManagerError( - "findLatestPr", - `GitHub CLI returned invalid PR list JSON: ${formatGitHubJsonDecodeError(decoded.failure)}`, - decoded.failure, - ), - ); - } - - return Effect.succeed(decoded.success); - }), - ); + const pullRequests = yield* (yield* sourceControlProvider(cwd)).listChangeRequests({ + cwd, + headSelector, + state: "all", + limit: 20, + }); - for (const pr of pullRequests) { + for (const pr of pullRequests.map(toPullRequestInfo)) { if (!matchesBranchHeadContext(pr, headContext)) { continue; } @@ -908,11 +953,7 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { } } - const parsed = Array.from(parsedByNumber.values()).toSorted((a, b) => { - const left = a.updatedAt ? Date.parse(a.updatedAt) : 0; - const right = b.updatedAt ? Date.parse(b.updatedAt) : 0; - return right - left; - }); + const parsed = Arr.sort(parsedByNumber.values(), pullRequestUpdatedAtDescOrder); const latestOpenPr = parsed.find((pr) => pr.state === "open"); if (latestOpenPr) { @@ -925,7 +966,11 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { cwd: string, result: Pick, ) { - const summary = summarizeGitActionResult(result); + const terms = yield* sourceControlProvider(cwd).pipe( + Effect.map((provider) => getChangeRequestTerminologyForKind(provider.kind)), + Effect.catch(() => Effect.succeed(getChangeRequestTerminologyForKind("unknown"))), + ); + const summary = summarizeGitActionResult(result, terms); let latestOpenPr: PullRequestInfo | null = null; let currentBranchIsDefault = false; let finalBranchContext: { @@ -990,7 +1035,7 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { result.pr.status === "opened_existing") ? { kind: "open_pr" as const, - label: "View PR", + label: `View ${terms.shortLabel}`, url: openPr.url, } : (result.action === "push" || result.action === "commit_push") && @@ -998,7 +1043,7 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { !currentBranchIsDefault ? { kind: "run_action" as const, - label: "Create PR", + label: `Create ${terms.shortLabel}`, action: { kind: "create_pr" as const }, } : { @@ -1029,11 +1074,12 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { } } - const defaultFromGh = yield* gitHubCli - .getDefaultBranch({ cwd }) - .pipe(Effect.catch(() => Effect.succeed(null))); - if (defaultFromGh) { - return defaultFromGh; + const defaultFromProvider = yield* sourceControlProvider(cwd).pipe( + Effect.flatMap((provider) => provider.getDefaultBranch({ cwd })), + Effect.catch(() => Effect.succeed(null)), + ); + if (defaultFromProvider) { + return defaultFromProvider; } return "main"; @@ -1205,6 +1251,8 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { fallbackBranch: string | null, emit: GitActionProgressEmitter, ) { + const provider = yield* sourceControlProvider(cwd); + const terms = getChangeRequestTerminologyForKind(provider.kind); const details = yield* gitCore.statusDetails(cwd); const branch = details.branch ?? fallbackBranch; if (!branch) { @@ -1241,7 +1289,7 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { yield* emit({ kind: "phase_started", phase: "pr", - label: "Generating PR content...", + label: `Generating ${terms.shortLabel} content...`, }); const rangeContext = yield* gitCore.readRangeContext(cwd, baseBranch); @@ -1266,12 +1314,12 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { yield* emit({ kind: "phase_started", phase: "pr", - label: "Creating GitHub pull request...", + label: `Creating ${terms.singular}...`, }); - yield* gitHubCli - .createPullRequest({ + yield* provider + .createChangeRequest({ cwd, - baseBranch, + baseRefName: baseBranch, headSelector: headContext.preferredHeadSelector, title: generated.title, bodyFile, @@ -1299,11 +1347,13 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { }); const localStatus: GitManagerShape["localStatus"] = Effect.fn("localStatus")(function* (input) { - return yield* Cache.get(localStatusResultCache, normalizeStatusCacheKey(input.cwd)); + const cacheKey = yield* normalizeStatusCacheKey(input.cwd); + return yield* Cache.get(localStatusResultCache, cacheKey); }); const remoteStatus: GitManagerShape["remoteStatus"] = Effect.fn("remoteStatus")( function* (input) { - return yield* Cache.get(remoteStatusResultCache, normalizeStatusCacheKey(input.cwd)); + const cacheKey = yield* normalizeStatusCacheKey(input.cwd); + return yield* Cache.get(remoteStatusResultCache, cacheKey); }, ); const status: GitManagerShape["status"] = Effect.fn("status")(function* (input) { @@ -1329,8 +1379,8 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { const resolvePullRequest: GitManagerShape["resolvePullRequest"] = Effect.fn("resolvePullRequest")( function* (input) { - const pullRequest = yield* gitHubCli - .getPullRequest({ + const pullRequest = yield* (yield* sourceControlProvider(input.cwd)) + .getChangeRequest({ cwd: input.cwd, reference: normalizePullRequestReference(input.reference), }) @@ -1363,15 +1413,15 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { }; return yield* Effect.gen(function* () { const normalizedReference = normalizePullRequestReference(input.reference); - const rootWorktreePath = canonicalizeExistingPath(input.cwd); - const pullRequestSummary = yield* gitHubCli.getPullRequest({ + const rootWorktreePath = yield* canonicalizeExistingPath(input.cwd); + const pullRequestSummary = yield* (yield* sourceControlProvider(input.cwd)).getChangeRequest({ cwd: input.cwd, reference: normalizedReference, }); const pullRequest = toResolvedPullRequest(pullRequestSummary); if (input.mode === "local") { - yield* gitHubCli.checkoutPullRequest({ + yield* (yield* sourceControlProvider(input.cwd)).checkoutChangeRequest({ cwd: input.cwd, reference: normalizedReference, force: true, @@ -1413,33 +1463,35 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { const localPullRequestBranch = resolvePullRequestWorktreeLocalBranchName(pullRequestWithRemoteInfo); - const findLocalHeadBranch = (cwd: string) => - gitCore.listBranches({ cwd }).pipe( - Effect.map((result) => { - const localBranch = result.branches.find( - (branch) => !branch.isRemote && branch.name === localPullRequestBranch, - ); - if (localBranch) { - return localBranch; - } - if (localPullRequestBranch === pullRequest.headBranch) { - return null; - } - return ( - result.branches.find( - (branch) => - !branch.isRemote && - branch.name === pullRequest.headBranch && - branch.worktreePath !== null && - canonicalizeExistingPath(branch.worktreePath) !== rootWorktreePath, - ) ?? null - ); - }), + const findLocalHeadBranch = Effect.fn("findLocalHeadBranch")(function* (cwd: string) { + const result = yield* gitCore.listRefs({ cwd }); + const localBranch = result.refs.find( + (branch) => !branch.isRemote && branch.name === localPullRequestBranch, ); + if (localBranch) { + return localBranch; + } + if (localPullRequestBranch === pullRequest.headBranch) { + return null; + } + + for (const branch of result.refs) { + if (branch.isRemote || branch.name !== pullRequest.headBranch || !branch.worktreePath) { + continue; + } + + const worktreePath = yield* canonicalizeExistingPath(branch.worktreePath); + if (worktreePath !== rootWorktreePath) { + return branch; + } + } + + return null; + }); const existingBranchBeforeFetch = yield* findLocalHeadBranch(input.cwd); const existingBranchBeforeFetchPath = existingBranchBeforeFetch?.worktreePath - ? canonicalizeExistingPath(existingBranchBeforeFetch.worktreePath) + ? yield* canonicalizeExistingPath(existingBranchBeforeFetch.worktreePath) : null; if ( existingBranchBeforeFetch?.worktreePath && @@ -1467,7 +1519,7 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { const existingBranchAfterFetch = yield* findLocalHeadBranch(input.cwd); const existingBranchAfterFetchPath = existingBranchAfterFetch?.worktreePath - ? canonicalizeExistingPath(existingBranchAfterFetch.worktreePath) + ? yield* canonicalizeExistingPath(existingBranchAfterFetch.worktreePath) : null; if ( existingBranchAfterFetch?.worktreePath && @@ -1489,7 +1541,7 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { const worktree = yield* gitCore.createWorktree({ cwd: input.cwd, - branch: localPullRequestBranch, + refName: localPullRequestBranch, path: null, }); yield* ensureExistingWorktreeUpstream(worktree.worktree.path); @@ -1497,7 +1549,7 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { return { pullRequest, - branch: worktree.worktree.branch, + branch: worktree.worktree.refName, worktreePath: worktree.worktree.path, }; }).pipe(Effect.ensuring(invalidateStatus(input.cwd))); @@ -1529,8 +1581,8 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { const existingBranchNames = yield* gitCore.listLocalBranchNames(cwd); const resolvedBranch = resolveAutoFeatureBranchName(existingBranchNames, preferredBranch); - yield* gitCore.createBranch({ cwd, branch: resolvedBranch }); - yield* Effect.scoped(gitCore.checkoutBranch({ cwd, branch: resolvedBranch })); + yield* gitCore.createRef({ cwd, refName: resolvedBranch }); + yield* Effect.scoped(gitCore.switchRef({ cwd, refName: resolvedBranch })); return { branchStep: { status: "created" as const, name: resolvedBranch }, @@ -1564,12 +1616,6 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { "Feature-branch checkout is only supported for commit actions.", ); } - if (input.action === "push" && initialStatus.hasWorkingTreeChanges) { - return yield* gitManagerError( - "runStackedAction", - "Commit or stash local changes before pushing.", - ); - } if (input.action === "create_pr" && initialStatus.hasWorkingTreeChanges) { return yield* gitManagerError( "runStackedAction", @@ -1633,6 +1679,12 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { const currentBranch = branchStep.name ?? initialStatus.branch; const commitAction = isCommitAction(input.action) ? input.action : null; + const changeRequestTerms = wantsPr + ? yield* sourceControlProvider(input.cwd).pipe( + Effect.map((provider) => getChangeRequestTerminologyForKind(provider.kind)), + Effect.catch(() => Effect.succeed(getChangeRequestTerminologyForKind("unknown"))), + ) + : null; const commit = commitAction ? yield* Ref.set(currentPhase, Option.some("commit")).pipe( @@ -1670,7 +1722,7 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { .emit({ kind: "phase_started", phase: "pr", - label: "Preparing PR...", + label: `Preparing ${changeRequestTerms?.shortLabel ?? "PR"}...`, }) .pipe( Effect.tap(() => Ref.set(currentPhase, Option.some("pr"))), @@ -1731,4 +1783,4 @@ export const makeGitManager = Effect.fn("makeGitManager")(function* () { } satisfies GitManagerShape; }); -export const GitManagerLive = Layer.effect(GitManager, makeGitManager()); +export const layer = Layer.effect(GitManager, makeGitManager()); diff --git a/apps/server/src/git/GitWorkflowService.test.ts b/apps/server/src/git/GitWorkflowService.test.ts new file mode 100644 index 00000000000..dd7273b40c9 --- /dev/null +++ b/apps/server/src/git/GitWorkflowService.test.ts @@ -0,0 +1,132 @@ +import { assert, describe, it, vi } from "@effect/vitest"; +import { Effect, Layer } from "effect"; + +import * as GitManager from "./GitManager.ts"; +import * as GitWorkflowService from "./GitWorkflowService.ts"; +import * as GitVcsDriver from "../vcs/GitVcsDriver.ts"; +import * as VcsDriverRegistry from "../vcs/VcsDriverRegistry.ts"; + +function makeLayer(input: { readonly detect: VcsDriverRegistry.VcsDriverRegistryShape["detect"] }) { + return GitWorkflowService.layer.pipe( + Layer.provide( + Layer.mock(VcsDriverRegistry.VcsDriverRegistry)({ + detect: input.detect, + }), + ), + Layer.provide(Layer.mock(GitVcsDriver.GitVcsDriver)({})), + Layer.provide(Layer.mock(GitManager.GitManager)({})), + ); +} + +describe("GitWorkflowService", () => { + it.effect("returns an empty local status when no VCS repository is detected", () => + Effect.gen(function* () { + const workflow = yield* GitWorkflowService.GitWorkflowService; + const status = yield* workflow.localStatus({ cwd: "/not-a-repo" }); + + assert.deepStrictEqual(status, { + isRepo: false, + hasPrimaryRemote: false, + isDefaultRef: false, + refName: null, + hasWorkingTreeChanges: false, + workingTree: { + files: [], + insertions: 0, + deletions: 0, + }, + }); + }).pipe( + Effect.provide( + makeLayer({ + detect: () => Effect.succeed(null), + }), + ), + ), + ); + + it.effect("returns an empty full status when no VCS repository is detected", () => + Effect.gen(function* () { + const workflow = yield* GitWorkflowService.GitWorkflowService; + const status = yield* workflow.status({ cwd: "/not-a-repo" }); + + assert.deepStrictEqual(status, { + isRepo: false, + hasPrimaryRemote: false, + isDefaultRef: false, + refName: null, + hasWorkingTreeChanges: false, + workingTree: { + files: [], + insertions: 0, + deletions: 0, + }, + hasUpstream: false, + aheadCount: 0, + behindCount: 0, + aheadOfDefaultCount: 0, + pr: null, + }); + }).pipe( + Effect.provide( + makeLayer({ + detect: () => Effect.succeed(null), + }), + ), + ), + ); + + it.effect("does not call GitManager status methods when no VCS repository is detected", () => { + const localStatus = vi.fn(); + const remoteStatus = vi.fn(); + const status = vi.fn(); + + const testLayer = GitWorkflowService.layer.pipe( + Layer.provide( + Layer.mock(VcsDriverRegistry.VcsDriverRegistry)({ + detect: () => Effect.succeed(null), + }), + ), + Layer.provide(Layer.mock(GitVcsDriver.GitVcsDriver)({})), + Layer.provide( + Layer.mock(GitManager.GitManager)({ + localStatus, + remoteStatus, + status, + }), + ), + ); + + return Effect.gen(function* () { + const workflow = yield* GitWorkflowService.GitWorkflowService; + yield* workflow.localStatus({ cwd: "/not-a-repo" }); + yield* workflow.remoteStatus({ cwd: "/not-a-repo" }); + yield* workflow.status({ cwd: "/not-a-repo" }); + + assert.equal(localStatus.mock.calls.length, 0); + assert.equal(remoteStatus.mock.calls.length, 0); + assert.equal(status.mock.calls.length, 0); + }).pipe(Effect.provide(testLayer)); + }); + + it.effect("returns an empty ref list when no VCS repository is detected", () => + Effect.gen(function* () { + const workflow = yield* GitWorkflowService.GitWorkflowService; + const refs = yield* workflow.listRefs({ cwd: "/not-a-repo" }); + + assert.deepStrictEqual(refs, { + refs: [], + isRepo: false, + hasPrimaryRemote: false, + nextCursor: null, + totalCount: 0, + }); + }).pipe( + Effect.provide( + makeLayer({ + detect: () => Effect.succeed(null), + }), + ), + ), + ); +}); diff --git a/apps/server/src/git/GitWorkflowService.ts b/apps/server/src/git/GitWorkflowService.ts new file mode 100644 index 00000000000..70ab6eecf1f --- /dev/null +++ b/apps/server/src/git/GitWorkflowService.ts @@ -0,0 +1,314 @@ +import { Context, Effect, Layer } from "effect"; + +import { + GitManagerError, + GitCommandError, + type VcsSwitchRefInput, + type VcsSwitchRefResult, + type VcsCreateRefInput, + type VcsCreateRefResult, + type VcsCreateWorktreeInput, + type VcsCreateWorktreeResult, + type VcsListRefsInput, + type VcsListRefsResult, + type GitManagerServiceError, + type GitPreparePullRequestThreadInput, + type GitPreparePullRequestThreadResult, + type GitPullRequestRefInput, + type VcsPullResult, + type VcsRemoveWorktreeInput, + type GitResolvePullRequestResult, + type GitRunStackedActionInput, + type GitRunStackedActionResult, + type VcsStatusInput, + type VcsStatusLocalResult, + type VcsStatusRemoteResult, + type VcsStatusResult, +} from "@t3tools/contracts"; + +import { GitManager, type GitRunStackedActionOptions } from "./GitManager.ts"; +import { GitVcsDriver } from "../vcs/GitVcsDriver.ts"; +import { VcsDriverRegistry } from "../vcs/VcsDriverRegistry.ts"; + +export interface GitWorkflowServiceShape { + readonly status: ( + input: VcsStatusInput, + ) => Effect.Effect; + readonly localStatus: ( + input: VcsStatusInput, + ) => Effect.Effect; + readonly remoteStatus: ( + input: VcsStatusInput, + ) => Effect.Effect; + readonly invalidateLocalStatus: (cwd: string) => Effect.Effect; + readonly invalidateRemoteStatus: (cwd: string) => Effect.Effect; + readonly invalidateStatus: (cwd: string) => Effect.Effect; + readonly pullCurrentBranch: (cwd: string) => Effect.Effect; + readonly runStackedAction: ( + input: GitRunStackedActionInput, + options?: GitRunStackedActionOptions, + ) => Effect.Effect; + readonly resolvePullRequest: ( + input: GitPullRequestRefInput, + ) => Effect.Effect; + readonly preparePullRequestThread: ( + input: GitPreparePullRequestThreadInput, + ) => Effect.Effect; + readonly listRefs: (input: VcsListRefsInput) => Effect.Effect; + readonly createWorktree: ( + input: VcsCreateWorktreeInput, + ) => Effect.Effect; + readonly removeWorktree: (input: VcsRemoveWorktreeInput) => Effect.Effect; + readonly createRef: ( + input: VcsCreateRefInput, + ) => Effect.Effect; + readonly switchRef: ( + input: VcsSwitchRefInput, + ) => Effect.Effect; + readonly renameBranch: (input: { + readonly cwd: string; + readonly oldBranch: string; + readonly newBranch: string; + }) => Effect.Effect<{ readonly branch: string }, GitManagerServiceError>; +} + +export class GitWorkflowService extends Context.Service< + GitWorkflowService, + GitWorkflowServiceShape +>()("t3/git/GitWorkflowService") {} + +const unsupportedGitWorkflow = (operation: string, cwd: string, detail: string) => + new GitManagerError({ + operation, + detail: `${detail} (${cwd})`, + }); + +const unsupportedGitCommand = (operation: string, cwd: string, detail: string) => + new GitCommandError({ + operation, + command: "vcs-route", + cwd, + detail, + }); + +function nonRepositoryLocalStatus(): VcsStatusLocalResult { + return { + isRepo: false, + hasPrimaryRemote: false, + isDefaultRef: false, + refName: null, + hasWorkingTreeChanges: false, + workingTree: { + files: [], + insertions: 0, + deletions: 0, + }, + }; +} + +function nonRepositoryStatus(): VcsStatusResult { + return { + ...nonRepositoryLocalStatus(), + hasUpstream: false, + aheadCount: 0, + behindCount: 0, + aheadOfDefaultCount: 0, + pr: null, + }; +} + +function nonRepositoryListRefs(): VcsListRefsResult { + return { + refs: [], + isRepo: false, + hasPrimaryRemote: false, + nextCursor: null, + totalCount: 0, + }; +} + +export const make = Effect.fn("makeGitWorkflowService")(function* () { + const registry = yield* VcsDriverRegistry; + const git = yield* GitVcsDriver; + const gitManager = yield* GitManager; + + const ensureGit = Effect.fn("GitWorkflowService.ensureGit")(function* ( + operation: string, + cwd: string, + ) { + const handle = yield* registry + .resolve({ cwd }) + .pipe( + Effect.mapError((error) => + unsupportedGitWorkflow( + operation, + cwd, + error instanceof Error ? error.message : String(error), + ), + ), + ); + if (handle.kind !== "git") { + return yield* unsupportedGitWorkflow( + operation, + cwd, + `The ${operation} workflow currently supports Git repositories only; detected ${handle.kind}.`, + ); + } + }); + + const ensureGitCommand = Effect.fn("GitWorkflowService.ensureGitCommand")(function* ( + operation: string, + cwd: string, + ) { + const handle = yield* registry + .resolve({ cwd }) + .pipe( + Effect.mapError((error) => + unsupportedGitCommand( + operation, + cwd, + error instanceof Error ? error.message : String(error), + ), + ), + ); + if (handle.kind !== "git") { + return yield* unsupportedGitCommand( + operation, + cwd, + `The ${operation} command currently supports Git repositories only; detected ${handle.kind}.`, + ); + } + }); + + const detectGitRepositoryForStatus = Effect.fn("GitWorkflowService.detectGitRepositoryForStatus")( + function* (operation: string, cwd: string) { + const handle = yield* registry + .detect({ cwd }) + .pipe( + Effect.mapError((error) => + unsupportedGitWorkflow( + operation, + cwd, + error instanceof Error ? error.message : String(error), + ), + ), + ); + if (!handle) { + return false; + } + if (handle.kind !== "git") { + return yield* unsupportedGitWorkflow( + operation, + cwd, + `The ${operation} workflow currently supports Git repositories only; detected ${handle.kind}.`, + ); + } + return true; + }, + ); + + const detectGitRepositoryForCommand = Effect.fn( + "GitWorkflowService.detectGitRepositoryForCommand", + )(function* (operation: string, cwd: string) { + const handle = yield* registry + .detect({ cwd }) + .pipe( + Effect.mapError((error) => + unsupportedGitCommand( + operation, + cwd, + error instanceof Error ? error.message : String(error), + ), + ), + ); + if (!handle) { + return false; + } + if (handle.kind !== "git") { + return yield* unsupportedGitCommand( + operation, + cwd, + `The ${operation} command currently supports Git repositories only; detected ${handle.kind}.`, + ); + } + return true; + }); + + const routeGitManager = + ( + operation: string, + run: (input: Input) => Effect.Effect, + ) => + (input: Input) => + ensureGit(operation, input.cwd).pipe(Effect.andThen(run(input))); + + return GitWorkflowService.of({ + status: (input) => + detectGitRepositoryForStatus("GitWorkflowService.status", input.cwd).pipe( + Effect.flatMap((isGitRepository) => + isGitRepository ? gitManager.status(input) : Effect.succeed(nonRepositoryStatus()), + ), + ), + localStatus: (input) => + detectGitRepositoryForStatus("GitWorkflowService.localStatus", input.cwd).pipe( + Effect.flatMap((isGitRepository) => + isGitRepository + ? gitManager.localStatus(input) + : Effect.succeed(nonRepositoryLocalStatus()), + ), + ), + remoteStatus: (input) => + detectGitRepositoryForStatus("GitWorkflowService.remoteStatus", input.cwd).pipe( + Effect.flatMap((isGitRepository) => + isGitRepository ? gitManager.remoteStatus(input) : Effect.succeed(null), + ), + ), + invalidateLocalStatus: gitManager.invalidateLocalStatus, + invalidateRemoteStatus: gitManager.invalidateRemoteStatus, + invalidateStatus: gitManager.invalidateStatus, + pullCurrentBranch: (cwd) => + ensureGitCommand("GitWorkflowService.pullCurrentBranch", cwd).pipe( + Effect.andThen(git.pullCurrentBranch(cwd)), + ), + runStackedAction: (input, options) => + ensureGit("GitWorkflowService.runStackedAction", input.cwd).pipe( + Effect.andThen(gitManager.runStackedAction(input, options)), + ), + resolvePullRequest: routeGitManager( + "GitWorkflowService.resolvePullRequest", + gitManager.resolvePullRequest, + ), + preparePullRequestThread: routeGitManager( + "GitWorkflowService.preparePullRequestThread", + gitManager.preparePullRequestThread, + ), + listRefs: (input) => + detectGitRepositoryForCommand("GitWorkflowService.listRefs", input.cwd).pipe( + Effect.flatMap((isGitRepository) => + isGitRepository ? git.listRefs(input) : Effect.succeed(nonRepositoryListRefs()), + ), + ), + createWorktree: (input) => + ensureGitCommand("GitWorkflowService.createWorktree", input.cwd).pipe( + Effect.andThen(git.createWorktree(input)), + ), + removeWorktree: (input) => + ensureGitCommand("GitWorkflowService.removeWorktree", input.cwd).pipe( + Effect.andThen(git.removeWorktree(input)), + ), + createRef: (input) => + ensureGitCommand("GitWorkflowService.createRef", input.cwd).pipe( + Effect.andThen(git.createRef(input)), + ), + switchRef: (input) => + ensureGitCommand("GitWorkflowService.switchRef", input.cwd).pipe( + Effect.andThen(Effect.scoped(git.switchRef(input))), + ), + renameBranch: (input) => + ensureGit("GitWorkflowService.renameBranch", input.cwd).pipe( + Effect.andThen(git.renameBranch(input)), + ), + }); +}); + +export const layer = Layer.effect(GitWorkflowService, make()); diff --git a/apps/server/src/git/Layers/ClaudeTextGeneration.test.ts b/apps/server/src/git/Layers/ClaudeTextGeneration.test.ts deleted file mode 100644 index 08471346989..00000000000 --- a/apps/server/src/git/Layers/ClaudeTextGeneration.test.ts +++ /dev/null @@ -1,309 +0,0 @@ -import * as NodeServices from "@effect/platform-node/NodeServices"; -import { it } from "@effect/vitest"; -import { Effect, FileSystem, Layer, Path } from "effect"; -import { expect } from "vitest"; - -import { ServerConfig } from "../../config.ts"; -import { TextGeneration } from "../Services/TextGeneration.ts"; -import { sanitizeThreadTitle } from "../Utils.ts"; -import { ClaudeTextGenerationLive } from "./ClaudeTextGeneration.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; - -const ClaudeTextGenerationTestLayer = ClaudeTextGenerationLive.pipe( - Layer.provideMerge(ServerSettingsService.layerTest()), - Layer.provideMerge( - ServerConfig.layerTest(process.cwd(), { - prefix: "t3code-claude-text-generation-test-", - }), - ), - Layer.provideMerge(NodeServices.layer), -); - -function makeFakeClaudeBinary(dir: string) { - return Effect.gen(function* () { - const fs = yield* FileSystem.FileSystem; - const path = yield* Path.Path; - const binDir = path.join(dir, "bin"); - const claudePath = path.join(binDir, "claude"); - yield* fs.makeDirectory(binDir, { recursive: true }); - - yield* fs.writeFileString( - claudePath, - [ - "#!/bin/sh", - 'args="$*"', - 'stdin_content="$(cat)"', - 'if [ -n "$T3_FAKE_CLAUDE_ARGS_MUST_CONTAIN" ]; then', - ' printf "%s" "$args" | grep -F -- "$T3_FAKE_CLAUDE_ARGS_MUST_CONTAIN" >/dev/null || {', - ' printf "%s\\n" "args missing expected content" >&2', - " exit 2", - " }", - "fi", - 'if [ -n "$T3_FAKE_CLAUDE_ARGS_MUST_NOT_CONTAIN" ]; then', - ' if printf "%s" "$args" | grep -F -- "$T3_FAKE_CLAUDE_ARGS_MUST_NOT_CONTAIN" >/dev/null; then', - ' printf "%s\\n" "args contained forbidden content" >&2', - " exit 3", - " fi", - "fi", - 'if [ -n "$T3_FAKE_CLAUDE_STDIN_MUST_CONTAIN" ]; then', - ' printf "%s" "$stdin_content" | grep -F -- "$T3_FAKE_CLAUDE_STDIN_MUST_CONTAIN" >/dev/null || {', - ' printf "%s\\n" "stdin missing expected content" >&2', - " exit 4", - " }", - "fi", - 'if [ -n "$T3_FAKE_CLAUDE_STDERR" ]; then', - ' printf "%s\\n" "$T3_FAKE_CLAUDE_STDERR" >&2', - "fi", - 'printf "%s" "$T3_FAKE_CLAUDE_OUTPUT"', - 'exit "${T3_FAKE_CLAUDE_EXIT_CODE:-0}"', - "", - ].join("\n"), - ); - yield* fs.chmod(claudePath, 0o755); - return binDir; - }); -} - -function withFakeClaudeEnv( - input: { - output: string; - exitCode?: number; - stderr?: string; - argsMustContain?: string; - argsMustNotContain?: string; - stdinMustContain?: string; - }, - effect: Effect.Effect, -) { - return Effect.acquireUseRelease( - Effect.gen(function* () { - const fs = yield* FileSystem.FileSystem; - const tempDir = yield* fs.makeTempDirectoryScoped({ prefix: "t3code-claude-text-" }); - const binDir = yield* makeFakeClaudeBinary(tempDir); - const previousPath = process.env.PATH; - const previousOutput = process.env.T3_FAKE_CLAUDE_OUTPUT; - const previousExitCode = process.env.T3_FAKE_CLAUDE_EXIT_CODE; - const previousStderr = process.env.T3_FAKE_CLAUDE_STDERR; - const previousArgsMustContain = process.env.T3_FAKE_CLAUDE_ARGS_MUST_CONTAIN; - const previousArgsMustNotContain = process.env.T3_FAKE_CLAUDE_ARGS_MUST_NOT_CONTAIN; - const previousStdinMustContain = process.env.T3_FAKE_CLAUDE_STDIN_MUST_CONTAIN; - - yield* Effect.sync(() => { - process.env.PATH = `${binDir}:${previousPath ?? ""}`; - process.env.T3_FAKE_CLAUDE_OUTPUT = input.output; - - if (input.exitCode !== undefined) { - process.env.T3_FAKE_CLAUDE_EXIT_CODE = String(input.exitCode); - } else { - delete process.env.T3_FAKE_CLAUDE_EXIT_CODE; - } - - if (input.stderr !== undefined) { - process.env.T3_FAKE_CLAUDE_STDERR = input.stderr; - } else { - delete process.env.T3_FAKE_CLAUDE_STDERR; - } - - if (input.argsMustContain !== undefined) { - process.env.T3_FAKE_CLAUDE_ARGS_MUST_CONTAIN = input.argsMustContain; - } else { - delete process.env.T3_FAKE_CLAUDE_ARGS_MUST_CONTAIN; - } - - if (input.argsMustNotContain !== undefined) { - process.env.T3_FAKE_CLAUDE_ARGS_MUST_NOT_CONTAIN = input.argsMustNotContain; - } else { - delete process.env.T3_FAKE_CLAUDE_ARGS_MUST_NOT_CONTAIN; - } - - if (input.stdinMustContain !== undefined) { - process.env.T3_FAKE_CLAUDE_STDIN_MUST_CONTAIN = input.stdinMustContain; - } else { - delete process.env.T3_FAKE_CLAUDE_STDIN_MUST_CONTAIN; - } - }); - - return { - previousPath, - previousOutput, - previousExitCode, - previousStderr, - previousArgsMustContain, - previousArgsMustNotContain, - previousStdinMustContain, - }; - }), - () => effect, - (previous) => - Effect.sync(() => { - process.env.PATH = previous.previousPath; - - if (previous.previousOutput === undefined) { - delete process.env.T3_FAKE_CLAUDE_OUTPUT; - } else { - process.env.T3_FAKE_CLAUDE_OUTPUT = previous.previousOutput; - } - - if (previous.previousExitCode === undefined) { - delete process.env.T3_FAKE_CLAUDE_EXIT_CODE; - } else { - process.env.T3_FAKE_CLAUDE_EXIT_CODE = previous.previousExitCode; - } - - if (previous.previousStderr === undefined) { - delete process.env.T3_FAKE_CLAUDE_STDERR; - } else { - process.env.T3_FAKE_CLAUDE_STDERR = previous.previousStderr; - } - - if (previous.previousArgsMustContain === undefined) { - delete process.env.T3_FAKE_CLAUDE_ARGS_MUST_CONTAIN; - } else { - process.env.T3_FAKE_CLAUDE_ARGS_MUST_CONTAIN = previous.previousArgsMustContain; - } - - if (previous.previousArgsMustNotContain === undefined) { - delete process.env.T3_FAKE_CLAUDE_ARGS_MUST_NOT_CONTAIN; - } else { - process.env.T3_FAKE_CLAUDE_ARGS_MUST_NOT_CONTAIN = previous.previousArgsMustNotContain; - } - - if (previous.previousStdinMustContain === undefined) { - delete process.env.T3_FAKE_CLAUDE_STDIN_MUST_CONTAIN; - } else { - process.env.T3_FAKE_CLAUDE_STDIN_MUST_CONTAIN = previous.previousStdinMustContain; - } - }), - ); -} - -it.layer(ClaudeTextGenerationTestLayer)("ClaudeTextGenerationLive", (it) => { - it.effect("forwards Claude thinking settings for Haiku without passing effort", () => - withFakeClaudeEnv( - { - output: JSON.stringify({ - structured_output: { - subject: "Add important change", - body: "", - }, - }), - argsMustContain: '--settings {"alwaysThinkingEnabled":false}', - argsMustNotContain: "--effort", - }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - const generated = yield* textGeneration.generateCommitMessage({ - cwd: process.cwd(), - branch: "feature/claude-effect", - stagedSummary: "M README.md", - stagedPatch: "diff --git a/README.md b/README.md", - modelSelection: { - provider: "claudeAgent", - model: "claude-haiku-4-5", - options: { - thinking: false, - effort: "high", - }, - }, - }); - - expect(generated.subject).toBe("Add important change"); - }), - ), - ); - - it.effect("forwards Claude fast mode and supported effort", () => - withFakeClaudeEnv( - { - output: JSON.stringify({ - structured_output: { - title: "Improve orchestration flow", - body: "Body", - }, - }), - argsMustContain: '--effort max --settings {"fastMode":true}', - }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - const generated = yield* textGeneration.generatePrContent({ - cwd: process.cwd(), - baseBranch: "main", - headBranch: "feature/claude-effect", - commitSummary: "Improve orchestration", - diffSummary: "1 file changed", - diffPatch: "diff --git a/README.md b/README.md", - modelSelection: { - provider: "claudeAgent", - model: "claude-opus-4-6", - options: { - effort: "max", - fastMode: true, - }, - }, - }); - - expect(generated.title).toBe("Improve orchestration flow"); - }), - ), - ); - - it.effect("generates thread titles through the Claude provider", () => - withFakeClaudeEnv( - { - output: JSON.stringify({ - structured_output: { - title: - ' "Reconnect failures after restart because the session state does not recover" ', - }, - }), - stdinMustContain: "You write concise thread titles for coding conversations.", - }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - const generated = yield* textGeneration.generateThreadTitle({ - cwd: process.cwd(), - message: "Please investigate reconnect failures after restarting the session.", - modelSelection: { - provider: "claudeAgent", - model: "claude-sonnet-4-6", - }, - }); - - expect(generated.title).toBe( - sanitizeThreadTitle( - '"Reconnect failures after restart because the session state does not recover"', - ), - ); - }), - ), - ); - - it.effect("falls back when Claude thread title normalization becomes whitespace-only", () => - withFakeClaudeEnv( - { - output: JSON.stringify({ - structured_output: { - title: ' """ """ ', - }, - }), - }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - const generated = yield* textGeneration.generateThreadTitle({ - cwd: process.cwd(), - message: "Name this thread.", - modelSelection: { - provider: "claudeAgent", - model: "claude-sonnet-4-6", - }, - }); - - expect(generated.title).toBe("New thread"); - }), - ), - ); -}); diff --git a/apps/server/src/git/Layers/CopilotTextGeneration.ts b/apps/server/src/git/Layers/CopilotTextGeneration.ts deleted file mode 100644 index 8948c719896..00000000000 --- a/apps/server/src/git/Layers/CopilotTextGeneration.ts +++ /dev/null @@ -1,321 +0,0 @@ -import type { - CopilotClient as CopilotClientType, - CopilotClientOptions, - PermissionRequestResult, -} from "@github/copilot-sdk"; -import { DEFAULT_MODEL_BY_PROVIDER } from "@t3tools/contracts"; -import { sanitizeFeatureBranchName } from "@t3tools/shared/git"; -import { Effect, Layer, Schema, SchemaIssue } from "effect"; - -import { - normalizeCopilotCliPathOverride, - resolveBundledCopilotCliPath, -} from "../../provider/Layers/copilotCliPath.ts"; -import { TextGenerationError } from "@t3tools/contracts"; -import { - CopilotTextGeneration, - type CopilotTextGenerationShape, -} from "../Services/CopilotTextGeneration.ts"; -import type { - CommitMessageGenerationResult, - PrContentGenerationResult, -} from "../Services/TextGeneration.ts"; - -const COPILOT_TIMEOUT_MS = 180_000; -const DENY_PERMISSION_RESULT: PermissionRequestResult = { - kind: "denied-interactively-by-user", -}; - -const CommitMessageResponseSchema = Schema.Struct({ - subject: Schema.String, - body: Schema.String, - branch: Schema.optional(Schema.String), -}); - -const PrContentResponseSchema = Schema.Struct({ - title: Schema.String, - body: Schema.String, -}); - -interface CopilotClientHandle { - createSession( - config: Parameters[0], - ): Promise; - stop(): Promise>; -} - -interface CopilotSessionHandle { - destroy(): Promise; - sendAndWait( - input: { - prompt: string; - mode: "immediate"; - }, - timeoutMs: number, - ): Promise<{ - data?: { - content?: string; - }; - }>; -} - -export interface CopilotTextGenerationLiveOptions { - readonly cliPath?: string; - readonly clientFactory?: (options: CopilotClientOptions) => CopilotClientHandle; -} - -function normalizeCopilotError( - operation: "generateCommitMessage" | "generatePrContent", - error: unknown, - fallback: string, -): TextGenerationError { - if (Schema.is(TextGenerationError)(error)) { - return error; - } - - if (error instanceof Error) { - const lower = error.message.toLowerCase(); - if (lower.includes("enoent") || lower.includes("spawn")) { - return new TextGenerationError({ - operation, - detail: "GitHub Copilot CLI is required but was not found.", - cause: error, - }); - } - return new TextGenerationError({ - operation, - detail: `${fallback}: ${error.message}`, - cause: error, - }); - } - - return new TextGenerationError({ - operation, - detail: fallback, - cause: error, - }); -} - -function limitSection(value: string, maxChars: number): string { - if (value.length <= maxChars) return value; - return `${value.slice(0, maxChars)}\n\n[truncated]`; -} - -function sanitizePrTitle(raw: string): string { - const singleLine = raw.trim().split(/\r?\n/g)[0]?.trim() ?? ""; - return singleLine.length > 0 ? singleLine : "Update project changes"; -} - -function extractJsonObject(raw: string): string { - const trimmed = raw.trim(); - if (trimmed.startsWith("```")) { - const fenced = trimmed.replace(/^```(?:json)?\s*/i, "").replace(/\s*```$/i, ""); - return fenced.trim(); - } - const start = trimmed.indexOf("{"); - const end = trimmed.lastIndexOf("}"); - if (start !== -1 && end !== -1 && end > start) { - return trimmed.slice(start, end + 1); - } - return trimmed; -} - -function decodeJsonResponse( - operation: "generateCommitMessage" | "generatePrContent", - raw: string, - schema: S, -): Effect.Effect { - return Effect.gen(function* () { - const jsonText = extractJsonObject(raw); - const parsed = yield* Effect.try({ - try: () => JSON.parse(jsonText) as unknown, - catch: (cause) => - normalizeCopilotError(operation, cause, "GitHub Copilot returned invalid JSON"), - }); - - return yield* Schema.decodeUnknownEffect(schema)(parsed).pipe( - Effect.mapError( - (cause) => - new TextGenerationError({ - operation, - detail: `GitHub Copilot returned an unexpected payload: ${SchemaIssue.makeFormatterDefault()(cause.issue)}`, - cause, - }), - ), - ); - }); -} - -export const makeCopilotTextGenerationLive = (options?: CopilotTextGenerationLiveOptions) => - Layer.effect( - CopilotTextGeneration, - Effect.sync(() => { - const runCopilotJson = ({ - operation, - prompt, - schema, - }: { - operation: "generateCommitMessage" | "generatePrContent"; - prompt: string; - schema: S; - }): Effect.Effect => - Effect.gen(function* () { - const cliPath = - normalizeCopilotCliPathOverride(options?.cliPath) ?? resolveBundledCopilotCliPath(); - const model = DEFAULT_MODEL_BY_PROVIDER.copilot; - const clientOptions: CopilotClientOptions = { - ...(cliPath ? { cliPath } : {}), - logLevel: "error", - }; - const { CopilotClient } = yield* Effect.promise(() => import("@github/copilot-sdk")); - const client = - options?.clientFactory?.(clientOptions) ?? new CopilotClient(clientOptions); - let session: CopilotSessionHandle | undefined; - const cleanup = Effect.promise(async () => { - if (session) { - await session.destroy().catch(() => undefined); - } - await client.stop().catch(() => []); - }).pipe(Effect.asVoid); - - return yield* Effect.gen(function* () { - const createdSession = yield* Effect.tryPromise({ - try: () => - client.createSession({ - model, - onPermissionRequest: () => DENY_PERMISSION_RESULT, - systemMessage: { - mode: "append", - content: - "Do not use tools, do not request permissions, and answer using only valid JSON with no markdown fences or prose.", - }, - }), - catch: (cause) => - normalizeCopilotError( - operation, - cause, - "Failed to start a GitHub Copilot text-generation session", - ), - }); - session = createdSession; - - const response = yield* Effect.tryPromise({ - try: () => - createdSession.sendAndWait({ prompt, mode: "immediate" }, COPILOT_TIMEOUT_MS), - catch: (cause) => - normalizeCopilotError( - operation, - cause, - "GitHub Copilot did not finish generating text", - ), - }); - - const responseContent = - response && - typeof response === "object" && - "data" in response && - response.data && - typeof response.data === "object" && - "content" in response.data && - typeof response.data.content === "string" - ? response.data.content - : null; - - if (!responseContent) { - return yield* new TextGenerationError({ - operation, - detail: "GitHub Copilot did not return any text.", - }); - } - - return yield* decodeJsonResponse(operation, responseContent, schema); - }).pipe(Effect.ensuring(cleanup)); - }); - - const generateCommitMessage: CopilotTextGenerationShape["generateCommitMessage"] = ( - input, - ) => { - const prompt = [ - "You write concise git commit messages.", - input.includeBranch === true - ? "Return a JSON object with keys: subject, body, branch." - : "Return a JSON object with keys: subject, body.", - "Rules:", - "- subject must be imperative, <= 72 chars, and have no trailing period", - "- body can be an empty string or short bullet points", - ...(input.includeBranch === true - ? ["- branch must be a short semantic git branch fragment for this change"] - : []), - "- capture the primary user-visible or developer-visible change", - "", - `Branch: ${input.branch ?? "(detached)"}`, - "", - "Staged files:", - limitSection(input.stagedSummary, 6_000), - "", - "Staged patch:", - limitSection(input.stagedPatch, 40_000), - ].join("\n"); - - return runCopilotJson({ - operation: "generateCommitMessage", - prompt, - schema: CommitMessageResponseSchema, - }).pipe( - Effect.map( - (generated) => - ({ - subject: generated.subject, - body: generated.body.trim(), - ...(generated.branch - ? { branch: sanitizeFeatureBranchName(generated.branch) } - : {}), - }) satisfies CommitMessageGenerationResult, - ), - ); - }; - - const generatePrContent: CopilotTextGenerationShape["generatePrContent"] = (input) => { - const prompt = [ - "You write GitHub pull request content.", - "Return a JSON object with keys: title, body.", - "Rules:", - "- title should be concise and specific", - "- body must be markdown and include headings '## Summary' and '## Testing'", - "- under Summary, provide short bullet points", - "- under Testing, mention concrete commands when available", - "", - `Base branch: ${input.baseBranch}`, - `Head branch: ${input.headBranch}`, - "", - "Commit summary:", - limitSection(input.commitSummary, 6_000), - "", - "Diff summary:", - limitSection(input.diffSummary, 8_000), - "", - "Diff patch:", - limitSection(input.diffPatch, 40_000), - ].join("\n"); - - return runCopilotJson({ - operation: "generatePrContent", - prompt, - schema: PrContentResponseSchema, - }).pipe( - Effect.map( - (generated) => - ({ - title: sanitizePrTitle(generated.title), - body: generated.body.trim(), - }) satisfies PrContentGenerationResult, - ), - ); - }; - - return { - generateCommitMessage, - generatePrContent, - } satisfies CopilotTextGenerationShape; - }), - ); diff --git a/apps/server/src/git/Layers/CursorTextGeneration.test.ts b/apps/server/src/git/Layers/CursorTextGeneration.test.ts deleted file mode 100644 index e7bce113474..00000000000 --- a/apps/server/src/git/Layers/CursorTextGeneration.test.ts +++ /dev/null @@ -1,298 +0,0 @@ -import * as path from "node:path"; -import * as os from "node:os"; -import { fileURLToPath } from "node:url"; -import { chmodSync, mkdirSync, mkdtempSync, readFileSync, rmSync, writeFileSync } from "node:fs"; - -import * as NodeServices from "@effect/platform-node/NodeServices"; -import { it } from "@effect/vitest"; -import { Effect, Layer } from "effect"; -import { expect } from "vitest"; - -import { ServerSettingsError } from "@t3tools/contracts"; - -import { ServerConfig } from "../../config.ts"; -import { TextGeneration } from "../Services/TextGeneration.ts"; -import { CursorTextGenerationLive } from "./CursorTextGeneration.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; - -const __dirname = path.dirname(fileURLToPath(import.meta.url)); -const mockAgentPath = path.join(__dirname, "../../../scripts/acp-mock-agent.ts"); - -function shellSingleQuote(value: string): string { - return `'${value.replaceAll("'", `'"'"'`)}'`; -} - -const CursorTextGenerationTestLayer = CursorTextGenerationLive.pipe( - Layer.provideMerge(ServerSettingsService.layerTest()), - Layer.provideMerge( - ServerConfig.layerTest(process.cwd(), { - prefix: "t3code-cursor-text-generation-test-", - }), - ), - Layer.provideMerge(NodeServices.layer), -); - -function makeAcpAgentWrapper(dir: string, env: Record): string { - const binDir = path.join(dir, "bin"); - const agentPath = path.join(binDir, "agent"); - mkdirSync(binDir, { recursive: true }); - writeFileSync( - agentPath, - [ - "#!/bin/sh", - ...Object.entries(env).map(([key, value]) => `export ${key}=${shellSingleQuote(value)}`), - 'if [ "$1" != "acp" ]; then', - ' printf "%s\\n" "unexpected args: $*" >&2', - " exit 11", - "fi", - `exec bun ${JSON.stringify(mockAgentPath)}`, - "", - ].join("\n"), - "utf8", - ); - chmodSync(agentPath, 0o755); - return agentPath; -} - -function withFakeAcpAgent( - env: Record, - effect: Effect.Effect, -): Effect.Effect { - return Effect.gen(function* () { - const tempDir = mkdtempSync(path.join(os.tmpdir(), "t3code-cursor-text-acp-")); - const agentPath = makeAcpAgentWrapper(tempDir, env); - const serverSettings = yield* ServerSettingsService; - const previousSettings = yield* serverSettings.getSettings; - - yield* serverSettings.updateSettings({ - providers: { - cursor: { - binaryPath: agentPath, - }, - }, - }); - - return yield* effect.pipe( - Effect.ensuring( - serverSettings - .updateSettings({ - providers: { - cursor: { - binaryPath: previousSettings.providers.cursor.binaryPath, - }, - }, - }) - .pipe( - Effect.catch(() => Effect.void), - Effect.ensuring( - Effect.sync(() => { - rmSync(tempDir, { recursive: true, force: true }); - }), - ), - Effect.asVoid, - ), - ), - ); - }); -} - -function waitForFileContent(path: string): Effect.Effect { - return Effect.promise(async () => { - const deadline = Date.now() + 5_000; - for (;;) { - try { - return readFileSync(path, "utf8"); - } catch (error) { - if (Date.now() >= deadline) { - throw error instanceof Error ? error : new Error(String(error)); - } - } - await new Promise((resolve) => setTimeout(resolve, 25)); - } - }); -} - -it.layer(CursorTextGenerationTestLayer)("CursorTextGenerationLive", (it) => { - it.effect("uses ACP model config options instead of raw CLI model ids", () => { - const requestLogDir = mkdtempSync(path.join(os.tmpdir(), "t3code-cursor-text-log-")); - const requestLogPath = path.join(requestLogDir, "requests.ndjson"); - - return withFakeAcpAgent( - { - T3_ACP_REQUEST_LOG_PATH: requestLogPath, - T3_ACP_PROMPT_RESPONSE_TEXT: JSON.stringify({ - subject: "Add generated commit message", - body: "- verify cursor acp model config path", - }), - }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - const generated = yield* textGeneration.generateCommitMessage({ - cwd: process.cwd(), - branch: "feature/cursor-text-generation", - stagedSummary: "M apps/server/src/git/Layers/CursorTextGeneration.ts", - stagedPatch: - "diff --git a/apps/server/src/git/Layers/CursorTextGeneration.ts b/apps/server/src/git/Layers/CursorTextGeneration.ts", - modelSelection: { - provider: "cursor", - model: "gpt-5.4", - options: { - reasoning: "xhigh", - fastMode: true, - contextWindow: "1m", - }, - }, - }); - - expect(generated.subject).toBe("Add generated commit message"); - expect(generated.body).toBe("- verify cursor acp model config path"); - - const requests = readFileSync(requestLogPath, "utf8") - .trim() - .split("\n") - .filter((line) => line.length > 0) - .map((line) => JSON.parse(line) as { method?: string; params?: Record }); - - expect( - requests.find((request) => request.method === "initialize")?.params?.clientCapabilities, - ).toMatchObject({ - _meta: { - parameterizedModelPicker: true, - }, - }); - expect( - requests.some( - (request) => - request.method === "session/set_config_option" && - request.params?.configId === "model" && - request.params?.value === "gpt-5.4", - ), - ).toBe(true); - expect( - requests.some( - (request) => - request.method === "session/set_config_option" && - request.params?.configId === "reasoning" && - request.params?.value === "extra-high", - ), - ).toBe(true); - expect( - requests.some( - (request) => - request.method === "session/set_config_option" && - request.params?.configId === "context" && - request.params?.value === "1m", - ), - ).toBe(true); - expect( - requests.some( - (request) => - request.method === "session/set_config_option" && - request.params?.configId === "fast" && - request.params?.value === "true", - ), - ).toBe(true); - expect( - requests.find((request) => request.method === "session/prompt")?.params?.prompt, - ).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - type: "text", - text: expect.stringContaining("Staged patch:"), - }), - ]), - ); - - rmSync(requestLogDir, { recursive: true, force: true }); - }), - ); - }); - - it.effect("accepts json objects with extra assistant text around them", () => - withFakeAcpAgent( - { - T3_ACP_PROMPT_RESPONSE_TEXT: - 'Sure, here is the JSON:\n```json\n{\n "subject": "Update README dummy comment with attribution and date",\n "body": ""\n}\n```\nDone.', - }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - const generated = yield* textGeneration.generateCommitMessage({ - cwd: process.cwd(), - branch: "feature/cursor-noisy-json", - stagedSummary: "M README.md", - stagedPatch: "diff --git a/README.md b/README.md", - modelSelection: { - provider: "cursor", - model: "composer-2", - }, - }); - - expect(generated.subject).toBe("Update README dummy comment with attribution and date"); - expect(generated.body).toBe(""); - }), - ), - ); - - it.effect("generates thread titles through Cursor ACP text generation", () => - withFakeAcpAgent( - { - T3_ACP_PROMPT_RESPONSE_TEXT: JSON.stringify({ - title: '"Trim reconnect spinner status after resume."', - }), - }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - const generated = yield* textGeneration.generateThreadTitle({ - cwd: process.cwd(), - message: "Fix the reconnect spinner after a resumed session.", - modelSelection: { - provider: "cursor", - model: "composer-2", - }, - }); - - expect(generated.title).toBe("Trim reconnect spinner status after resume."); - }), - ), - ); - - it.effect("closes the ACP child process after text generation completes", () => { - const exitLogDir = mkdtempSync(path.join(os.tmpdir(), "t3code-cursor-text-exit-log-")); - const exitLogPath = path.join(exitLogDir, "exit.log"); - - return withFakeAcpAgent( - { - T3_ACP_EXIT_LOG_PATH: exitLogPath, - T3_ACP_PROMPT_RESPONSE_TEXT: JSON.stringify({ - subject: "Close runtime after generation", - body: "", - }), - }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - const generated = yield* textGeneration.generateCommitMessage({ - cwd: process.cwd(), - branch: "feature/cursor-runtime-close", - stagedSummary: "M apps/server/src/git/Layers/CursorTextGeneration.ts", - stagedPatch: - "diff --git a/apps/server/src/git/Layers/CursorTextGeneration.ts b/apps/server/src/git/Layers/CursorTextGeneration.ts", - modelSelection: { - provider: "cursor", - model: "composer-2", - }, - }); - - expect(generated.subject).toBe("Close runtime after generation"); - - const exitLog = yield* waitForFileContent(exitLogPath); - expect(exitLog).toContain("exit:0"); - - rmSync(exitLogDir, { recursive: true, force: true }); - }), - ); - }); -}); diff --git a/apps/server/src/git/Layers/GitCore.test.ts b/apps/server/src/git/Layers/GitCore.test.ts deleted file mode 100644 index 665c4b138f9..00000000000 --- a/apps/server/src/git/Layers/GitCore.test.ts +++ /dev/null @@ -1,2333 +0,0 @@ -import { existsSync } from "node:fs"; -import path from "node:path"; - -import * as NodeServices from "@effect/platform-node/NodeServices"; -import { it } from "@effect/vitest"; -import { Effect, FileSystem, Layer, PlatformError, Scope } from "effect"; -import { describe, expect, vi } from "vitest"; - -import { GitCoreLive, makeGitCore } from "./GitCore.ts"; -import { GitCore, type GitCoreShape } from "../Services/GitCore.ts"; -import { GitCommandError } from "@t3tools/contracts"; -import { type ProcessRunResult, runProcess } from "../../processRunner.ts"; -import { ServerConfig } from "../../config.ts"; - -// ── Helpers ── - -const ServerConfigLayer = ServerConfig.layerTest(process.cwd(), { prefix: "t3-git-core-test-" }); -const GitCoreTestLayer = GitCoreLive.pipe( - Layer.provide(ServerConfigLayer), - Layer.provide(NodeServices.layer), -); -const TestLayer = Layer.mergeAll(NodeServices.layer, GitCoreTestLayer); - -function makeTmpDir( - prefix = "git-test-", -): Effect.Effect { - return Effect.gen(function* () { - const fileSystem = yield* FileSystem.FileSystem; - return yield* fileSystem.makeTempDirectoryScoped({ prefix }); - }); -} - -function writeTextFile( - filePath: string, - contents: string, -): Effect.Effect { - return Effect.gen(function* () { - const fileSystem = yield* FileSystem.FileSystem; - yield* fileSystem.writeFileString(filePath, contents); - }); -} - -function removePath( - targetPath: string, -): Effect.Effect { - return Effect.gen(function* () { - const fileSystem = yield* FileSystem.FileSystem; - yield* fileSystem.remove(targetPath, { recursive: true, force: true }); - }); -} - -function makeDirectory( - dirPath: string, -): Effect.Effect { - return Effect.gen(function* () { - const fileSystem = yield* FileSystem.FileSystem; - yield* fileSystem.makeDirectory(dirPath, { recursive: true }); - }); -} - -/** Run a raw git command for test setup (not under test). */ -function git( - cwd: string, - args: ReadonlyArray, - env?: NodeJS.ProcessEnv, -): Effect.Effect { - return Effect.gen(function* () { - const gitCore = yield* GitCore; - const result = yield* gitCore.execute({ - operation: "GitCore.test.git", - cwd, - args, - ...(env ? { env } : {}), - timeoutMs: 10_000, - }); - return result.stdout.trim(); - }); -} - -function configureRemote( - cwd: string, - remoteName: string, - remotePath: string, - fetchNamespace: string, -): Effect.Effect { - return Effect.gen(function* () { - yield* git(cwd, ["config", `remote.${remoteName}.url`, remotePath]); - return yield* git(cwd, [ - "config", - "--replace-all", - `remote.${remoteName}.fetch`, - `+refs/heads/*:refs/remotes/${fetchNamespace}/*`, - ]); - }); -} - -function runShellCommand(input: { - command: string; - cwd: string; - timeoutMs?: number; - maxOutputBytes?: number; -}): Effect.Effect { - return Effect.promise(() => { - const shellPath = - process.platform === "win32" - ? (process.env.ComSpec ?? "cmd.exe") - : (process.env.SHELL ?? "/bin/sh"); - - const args = - process.platform === "win32" ? ["/d", "/s", "/c", input.command] : ["-lc", input.command]; - - return runProcess(shellPath, args, { - cwd: input.cwd, - timeoutMs: input.timeoutMs ?? 30_000, - allowNonZeroExit: true, - maxBufferBytes: input.maxOutputBytes ?? 1_000_000, - outputMode: "truncate", - }); - }); -} - -const makeIsolatedGitCore = (executeOverride: GitCoreShape["execute"]) => - makeGitCore({ executeOverride }).pipe( - Effect.provide(Layer.provideMerge(ServerConfigLayer, NodeServices.layer)), - ); - -/** Create a repo with an initial commit so branches work. */ -function initRepoWithCommit( - cwd: string, -): Effect.Effect< - { initialBranch: string }, - GitCommandError | PlatformError.PlatformError, - GitCore | FileSystem.FileSystem -> { - return Effect.gen(function* () { - const core = yield* GitCore; - yield* core.initRepo({ cwd }); - yield* git(cwd, ["config", "user.email", "test@test.com"]); - yield* git(cwd, ["config", "user.name", "Test"]); - yield* writeTextFile(path.join(cwd, "README.md"), "# test\n"); - yield* git(cwd, ["add", "."]); - yield* git(cwd, ["commit", "-m", "initial commit"]); - const initialBranch = yield* git(cwd, ["branch", "--show-current"]); - return { initialBranch }; - }); -} - -function commitWithDate( - cwd: string, - fileName: string, - fileContents: string, - dateIsoString: string, - message: string, -): Effect.Effect< - void, - GitCommandError | PlatformError.PlatformError, - GitCore | FileSystem.FileSystem -> { - return Effect.gen(function* () { - yield* writeTextFile(path.join(cwd, fileName), fileContents); - yield* git(cwd, ["add", fileName]); - yield* git(cwd, ["commit", "-m", message], { - ...process.env, - GIT_AUTHOR_DATE: dateIsoString, - GIT_COMMITTER_DATE: dateIsoString, - }); - }); -} - -function buildLargeText(lineCount = 20_000): string { - return Array.from({ length: lineCount }, (_, index) => `line ${String(index).padStart(5, "0")}`) - .join("\n") - .concat("\n"); -} - -function splitNullSeparatedPaths(input: string): string[] { - return input - .split("\0") - .map((value) => value.trim()) - .filter((value) => value.length > 0); -} - -// ── Tests ── - -it.layer(TestLayer)("git integration", (it) => { - describe("shell process execution", () => { - it.effect("caps captured output when maxOutputBytes is exceeded", () => - Effect.gen(function* () { - const result = yield* runShellCommand({ - command: `node -e "process.stdout.write('x'.repeat(2000))"`, - cwd: process.cwd(), - timeoutMs: 10_000, - maxOutputBytes: 128, - }); - - expect(result.code).toBe(0); - expect(result.stdout.length).toBeLessThanOrEqual(128); - expect(result.stdoutTruncated || result.stderrTruncated).toBe(true); - }), - ); - }); - - // ── initGitRepo ── - - describe("initGitRepo", () => { - it.effect("creates a valid git repo", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* (yield* GitCore).initRepo({ cwd: tmp }); - expect(existsSync(path.join(tmp, ".git"))).toBe(true); - }), - ); - - it.effect("listGitBranches reports isRepo: true after init + commit", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const result = yield* (yield* GitCore).listBranches({ cwd: tmp }); - expect(result.isRepo).toBe(true); - expect(result.hasOriginRemote).toBe(false); - expect(result.branches.length).toBeGreaterThanOrEqual(1); - }), - ); - }); - - describe("workspace helpers", () => { - it.effect("filterIgnoredPaths chunks large path lists and preserves kept paths", () => - Effect.gen(function* () { - const cwd = "/virtual/repo"; - const relativePaths = Array.from({ length: 340 }, (_, index) => { - const prefix = index % 3 === 0 ? "ignored" : "kept"; - return `${prefix}/segment-${String(index).padStart(4, "0")}/${"x".repeat(900)}.ts`; - }); - const expectedPaths = relativePaths.filter( - (relativePath) => !relativePath.startsWith("ignored/"), - ); - - const seenChunks: string[][] = []; - const core = yield* makeIsolatedGitCore((input) => { - if ( - input.args.join(" ") !== - "-c core.fsmonitor=false -c core.untrackedCache=false check-ignore --no-index -z --stdin" - ) { - return Effect.fail( - new GitCommandError({ - operation: input.operation, - command: `git ${input.args.join(" ")}`, - cwd: input.cwd, - detail: "unexpected git command in chunking test", - }), - ); - } - - const chunkPaths = splitNullSeparatedPaths(input.stdin ?? ""); - seenChunks.push(chunkPaths); - const ignoredPaths = chunkPaths.filter((relativePath) => - relativePath.startsWith("ignored/"), - ); - - return Effect.succeed({ - code: ignoredPaths.length > 0 ? 0 : 1, - stdout: ignoredPaths.length > 0 ? `${ignoredPaths.join("\0")}\0` : "", - stderr: "", - stdoutTruncated: false, - stderrTruncated: false, - }); - }); - - const result = yield* core.filterIgnoredPaths(cwd, relativePaths); - - expect(seenChunks.length).toBeGreaterThan(1); - expect(seenChunks.flat()).toEqual(relativePaths); - expect(result).toEqual(expectedPaths); - }), - ); - - it.effect("listWorkspaceFiles disables fsmonitor and untracked cache helpers", () => - Effect.gen(function* () { - const core = yield* makeIsolatedGitCore((input) => { - expect(input.args).toEqual([ - "-c", - "core.fsmonitor=false", - "-c", - "core.untrackedCache=false", - "ls-files", - "--cached", - "--others", - "--exclude-standard", - "-z", - ]); - return Effect.succeed({ - code: 0, - stdout: "src/index.ts\0README.md\0", - stderr: "", - stdoutTruncated: false, - stderrTruncated: false, - }); - }); - - const result = yield* core.listWorkspaceFiles("/virtual/repo"); - expect(result.paths).toEqual(["src/index.ts", "README.md"]); - expect(result.truncated).toBe(false); - }), - ); - }); - - // ── listGitBranches ── - - describe("listGitBranches", () => { - it.effect("returns isRepo: false for non-git directory", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - const result = yield* (yield* GitCore).listBranches({ cwd: tmp }); - expect(result.isRepo).toBe(false); - expect(result.hasOriginRemote).toBe(false); - expect(result.branches).toEqual([]); - }), - ); - - it.effect("returns isRepo: false for deleted directories", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - const deletedDir = path.join(tmp, "deleted-repo"); - yield* makeDirectory(deletedDir); - yield* removePath(deletedDir); - - const result = yield* (yield* GitCore).listBranches({ cwd: deletedDir }); - - expect(result.isRepo).toBe(false); - expect(result.hasOriginRemote).toBe(false); - expect(result.branches).toEqual([]); - }), - ); - - it.effect("returns the current branch with current: true", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const result = yield* (yield* GitCore).listBranches({ cwd: tmp }); - const current = result.branches.find((b) => b.current); - expect(current).toBeDefined(); - expect(current!.current).toBe(true); - }), - ); - - it.effect("does not include detached HEAD pseudo-refs as branches", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - yield* git(tmp, ["checkout", "--detach", "HEAD"]); - - const result = yield* (yield* GitCore).listBranches({ cwd: tmp }); - expect(result.branches.some((branch) => branch.name.startsWith("("))).toBe(false); - expect(result.branches.some((branch) => branch.current)).toBe(false); - }), - ); - - it.effect("keeps current branch first and sorts the remaining branches by recency", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const initialBranch = (yield* (yield* GitCore).listBranches({ cwd: tmp })).branches.find( - (branch) => branch.current, - )!.name; - - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "older-branch" }); - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "older-branch" }); - yield* commitWithDate( - tmp, - "older.txt", - "older branch change\n", - "Thu, 1 Jan 2037 00:00:00 +0000", - "older branch change", - ); - - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: initialBranch }); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "newer-branch" }); - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "newer-branch" }); - yield* commitWithDate( - tmp, - "newer.txt", - "newer branch change\n", - "Fri, 1 Jan 2038 00:00:00 +0000", - "newer branch change", - ); - - // Switch away to show current branch is pinned, then remaining branches are recency-sorted. - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "older-branch" }); - - const result = yield* (yield* GitCore).listBranches({ cwd: tmp }); - expect(result.branches[0]!.name).toBe("older-branch"); - expect(result.branches[1]!.name).toBe("newer-branch"); - }), - ); - - it.effect("keeps default branch right after current branch", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - const remote = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const defaultBranch = (yield* (yield* GitCore).listBranches({ cwd: tmp })).branches.find( - (branch) => branch.current, - )!.name; - - yield* git(remote, ["init", "--bare"]); - yield* git(tmp, ["remote", "add", "origin", remote]); - yield* git(tmp, ["push", "-u", "origin", defaultBranch]); - yield* git(tmp, ["remote", "set-head", "origin", defaultBranch]); - - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "current-branch" }); - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "current-branch" }); - yield* commitWithDate( - tmp, - "current.txt", - "current change\n", - "Thu, 1 Jan 2037 00:00:00 +0000", - "current change", - ); - - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: defaultBranch }); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "newer-branch" }); - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "newer-branch" }); - yield* commitWithDate( - tmp, - "newer.txt", - "newer change\n", - "Fri, 1 Jan 2038 00:00:00 +0000", - "newer change", - ); - - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "current-branch" }); - - const result = yield* (yield* GitCore).listBranches({ cwd: tmp }); - expect(result.branches[0]!.name).toBe("current-branch"); - expect(result.branches[1]!.name).toBe(defaultBranch); - expect(result.branches[2]!.name).toBe("newer-branch"); - }), - ); - - it.effect("lists multiple branches after creating them", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "feature-a" }); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "feature-b" }); - - const result = yield* (yield* GitCore).listBranches({ cwd: tmp }); - const names = result.branches.map((b) => b.name); - expect(names).toContain("feature-a"); - expect(names).toContain("feature-b"); - }), - ); - - it.effect("paginates branch results and returns paging metadata", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - const { initialBranch } = yield* initRepoWithCommit(tmp); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "feature-a" }); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "feature-b" }); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "feature-c" }); - - const firstPage = yield* (yield* GitCore).listBranches({ cwd: tmp, limit: 2 }); - expect(firstPage.totalCount).toBe(4); - expect(firstPage.nextCursor).toBe(2); - expect(firstPage.branches.map((branch) => branch.name)).toEqual([ - initialBranch, - "feature-a", - ]); - - const secondPage = yield* (yield* GitCore).listBranches({ - cwd: tmp, - cursor: firstPage.nextCursor ?? 0, - limit: 2, - }); - expect(secondPage.totalCount).toBe(4); - expect(secondPage.nextCursor).toBeNull(); - expect(secondPage.branches.map((branch) => branch.name)).toEqual([ - "feature-b", - "feature-c", - ]); - }), - ); - - it.effect("parses separate branch names when column.ui is always enabled", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - const { initialBranch } = yield* initRepoWithCommit(tmp); - const createdBranchNames = [ - "go-bin", - "copilot/rewrite-cli-in-go", - "copilot/rewrite-cli-in-rust", - ] as const; - for (const branchName of createdBranchNames) { - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: branchName }); - } - yield* git(tmp, ["config", "column.ui", "always"]); - - const rawBranchOutput = yield* git(tmp, ["branch", "--no-color"], { - ...process.env, - COLUMNS: "120", - }); - expect( - rawBranchOutput - .split("\n") - .some( - (line) => - createdBranchNames.filter((branchName) => line.includes(branchName)).length >= 2, - ), - ).toBe(true); - - const realGitCore = yield* GitCore; - const core = yield* makeIsolatedGitCore((input) => - realGitCore.execute( - input.args[0] === "branch" - ? { - ...input, - env: { ...input.env, COLUMNS: "120" }, - } - : input, - ), - ); - - const result = yield* core.listBranches({ cwd: tmp }); - const localBranchNames = result.branches - .filter((branch) => !branch.isRemote) - .map((branch) => branch.name); - - expect(localBranchNames).toHaveLength(4); - expect(localBranchNames).toEqual( - expect.arrayContaining([initialBranch, ...createdBranchNames]), - ); - expect( - localBranchNames.some( - (branchName) => - createdBranchNames.filter((createdBranch) => branchName.includes(createdBranch)) - .length >= 2, - ), - ).toBe(false); - }), - ); - - it.effect("isDefault is false when no remote exists", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const result = yield* (yield* GitCore).listBranches({ cwd: tmp }); - expect(result.branches.every((b) => b.isDefault === false)).toBe(true); - }), - ); - - it.effect("lists local branches first and remote branches last", () => - Effect.gen(function* () { - const remote = yield* makeTmpDir(); - const tmp = yield* makeTmpDir(); - - yield* git(remote, ["init", "--bare"]); - yield* initRepoWithCommit(tmp); - const defaultBranch = (yield* (yield* GitCore).listBranches({ cwd: tmp })).branches.find( - (branch) => branch.current, - )!.name; - - yield* git(tmp, ["remote", "add", "origin", remote]); - yield* git(tmp, ["push", "-u", "origin", defaultBranch]); - - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "feature/local-only" }); - - const remoteOnlyBranch = "feature/remote-only"; - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: defaultBranch }); - yield* git(tmp, ["checkout", "-b", remoteOnlyBranch]); - yield* git(tmp, ["push", "-u", "origin", remoteOnlyBranch]); - yield* git(tmp, ["checkout", defaultBranch]); - yield* git(tmp, ["branch", "-D", remoteOnlyBranch]); - - const result = yield* (yield* GitCore).listBranches({ cwd: tmp }); - const firstRemoteIndex = result.branches.findIndex((branch) => branch.isRemote); - - expect(result.hasOriginRemote).toBe(true); - expect(firstRemoteIndex).toBeGreaterThan(0); - expect(result.branches.slice(0, firstRemoteIndex).every((branch) => !branch.isRemote)).toBe( - true, - ); - expect(result.branches.slice(firstRemoteIndex).every((branch) => branch.isRemote)).toBe( - true, - ); - expect( - result.branches.some( - (branch) => branch.name === "feature/local-only" && !branch.isRemote, - ), - ).toBe(true); - expect( - result.branches.some( - (branch) => branch.name === "origin/feature/remote-only" && branch.isRemote, - ), - ).toBe(true); - }), - ); - - it.effect("includes remoteName metadata for remotes with slash in the name", () => - Effect.gen(function* () { - const remote = yield* makeTmpDir(); - const tmp = yield* makeTmpDir(); - const remoteName = "my-org/upstream"; - - yield* git(remote, ["init", "--bare"]); - yield* initRepoWithCommit(tmp); - const defaultBranch = (yield* (yield* GitCore).listBranches({ cwd: tmp })).branches.find( - (branch) => branch.current, - )!.name; - - yield* git(tmp, ["remote", "add", remoteName, remote]); - yield* git(tmp, ["push", "-u", remoteName, defaultBranch]); - - const remoteOnlyBranch = "feature/remote-with-remote-name"; - yield* git(tmp, ["checkout", "-b", remoteOnlyBranch]); - yield* git(tmp, ["push", "-u", remoteName, remoteOnlyBranch]); - yield* git(tmp, ["checkout", defaultBranch]); - yield* git(tmp, ["branch", "-D", remoteOnlyBranch]); - - const result = yield* (yield* GitCore).listBranches({ cwd: tmp }); - const remoteBranch = result.branches.find( - (branch) => branch.name === `${remoteName}/${remoteOnlyBranch}`, - ); - - expect(remoteBranch).toBeDefined(); - expect(remoteBranch?.isRemote).toBe(true); - expect(remoteBranch?.remoteName).toBe(remoteName); - }), - ); - - it.effect( - "filters branch queries before pagination and dedupes origin refs with local matches", - () => - Effect.gen(function* () { - const remote = yield* makeTmpDir(); - const tmp = yield* makeTmpDir(); - - yield* git(remote, ["init", "--bare"]); - const { initialBranch } = yield* initRepoWithCommit(tmp); - yield* git(tmp, ["remote", "add", "origin", remote]); - yield* git(tmp, ["push", "-u", "origin", initialBranch]); - - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "feature/demo" }); - yield* git(tmp, ["push", "-u", "origin", "feature/demo"]); - - yield* git(tmp, ["checkout", "-b", "feature/remote-only"]); - yield* git(tmp, ["push", "-u", "origin", "feature/remote-only"]); - yield* git(tmp, ["checkout", initialBranch]); - yield* git(tmp, ["branch", "-D", "feature/remote-only"]); - - const result = yield* (yield* GitCore).listBranches({ - cwd: tmp, - query: "feature/", - limit: 10, - }); - - expect(result.totalCount).toBe(2); - expect(result.nextCursor).toBeNull(); - expect(result.branches.map((branch) => branch.name)).toEqual([ - "feature/demo", - "origin/feature/remote-only", - ]); - }), - ); - }); - - // ── checkoutGitBranch ── - - describe("checkoutGitBranch", () => { - it.effect("checks out an existing branch", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "feature" }); - - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "feature" }); - - const result = yield* (yield* GitCore).listBranches({ cwd: tmp }); - const current = result.branches.find((b) => b.current); - expect(current!.name).toBe("feature"); - }), - ); - - it.effect("refreshes upstream behind count after checkout when remote branch advanced", () => - Effect.gen(function* () { - const context = yield* Effect.context(); - const runPromise = Effect.runPromiseWith(context); - - const remote = yield* makeTmpDir(); - const source = yield* makeTmpDir(); - const clone = yield* makeTmpDir(); - yield* git(remote, ["init", "--bare"]); - - yield* initRepoWithCommit(source); - const defaultBranch = (yield* (yield* GitCore).listBranches({ cwd: source })).branches.find( - (branch) => branch.current, - )!.name; - yield* git(source, ["remote", "add", "origin", remote]); - yield* git(source, ["push", "-u", "origin", defaultBranch]); - - const featureBranch = "feature-behind"; - yield* (yield* GitCore).createBranch({ cwd: source, branch: featureBranch }); - yield* (yield* GitCore).checkoutBranch({ cwd: source, branch: featureBranch }); - yield* writeTextFile(path.join(source, "feature.txt"), "feature base\n"); - yield* git(source, ["add", "feature.txt"]); - yield* git(source, ["commit", "-m", "feature base"]); - yield* git(source, ["push", "-u", "origin", featureBranch]); - yield* (yield* GitCore).checkoutBranch({ cwd: source, branch: defaultBranch }); - - yield* git(clone, ["clone", remote, "."]); - yield* git(clone, ["config", "user.email", "test@test.com"]); - yield* git(clone, ["config", "user.name", "Test"]); - yield* git(clone, ["checkout", "-b", featureBranch, "--track", `origin/${featureBranch}`]); - yield* writeTextFile(path.join(clone, "feature.txt"), "feature from remote\n"); - yield* git(clone, ["add", "feature.txt"]); - yield* git(clone, ["commit", "-m", "remote feature update"]); - yield* git(clone, ["push", "origin", featureBranch]); - - yield* (yield* GitCore).checkoutBranch({ cwd: source, branch: featureBranch }); - const core = yield* GitCore; - yield* Effect.promise(() => - vi.waitFor( - async () => { - const details = await runPromise(core.statusDetails(source)); - expect(details.branch).toBe(featureBranch); - expect(details.aheadCount).toBe(0); - expect(details.behindCount).toBe(1); - }, - { - timeout: 10_000, - interval: 100, - }, - ), - ); - }), - ); - - it.effect("statusDetails remains successful when upstream refresh fails after checkout", () => - Effect.gen(function* () { - const remote = yield* makeTmpDir(); - const source = yield* makeTmpDir(); - yield* git(remote, ["init", "--bare"]); - - yield* initRepoWithCommit(source); - const defaultBranch = (yield* (yield* GitCore).listBranches({ cwd: source })).branches.find( - (branch) => branch.current, - )!.name; - yield* git(source, ["remote", "add", "origin", remote]); - yield* git(source, ["push", "-u", "origin", defaultBranch]); - - const featureBranch = "feature-refresh-failure"; - yield* git(source, ["branch", featureBranch]); - yield* git(source, ["checkout", featureBranch]); - yield* writeTextFile(path.join(source, "feature.txt"), "feature base\n"); - yield* git(source, ["add", "feature.txt"]); - yield* git(source, ["commit", "-m", "feature base"]); - yield* git(source, ["push", "-u", "origin", featureBranch]); - yield* git(source, ["checkout", defaultBranch]); - - const realGitCore = yield* GitCore; - let refreshFetchAttempts = 0; - const core = yield* makeIsolatedGitCore((input) => { - if (input.args[0] === "--git-dir" && input.args[2] === "fetch") { - refreshFetchAttempts += 1; - return Effect.fail( - new GitCommandError({ - operation: "git.test.refreshFailure", - command: `git ${input.args.join(" ")}`, - cwd: input.cwd, - detail: "simulated fetch timeout", - }), - ); - } - return realGitCore.execute(input); - }); - yield* core.checkoutBranch({ cwd: source, branch: featureBranch }); - const status = yield* core.statusDetails(source); - expect(refreshFetchAttempts).toBe(1); - expect(status.branch).toBe(featureBranch); - expect(status.upstreamRef).toBe(`origin/${featureBranch}`); - expect(yield* git(source, ["branch", "--show-current"])).toBe(featureBranch); - }), - ); - - it.effect("defers upstream refresh until statusDetails is requested", () => - Effect.gen(function* () { - const remote = yield* makeTmpDir(); - const source = yield* makeTmpDir(); - yield* git(remote, ["init", "--bare"]); - - yield* initRepoWithCommit(source); - const defaultBranch = (yield* (yield* GitCore).listBranches({ cwd: source })).branches.find( - (branch) => branch.current, - )!.name; - yield* git(source, ["remote", "add", "origin", remote]); - yield* git(source, ["push", "-u", "origin", defaultBranch]); - - const featureBranch = "feature/scoped-fetch"; - yield* git(source, ["checkout", "-b", featureBranch]); - yield* writeTextFile(path.join(source, "feature.txt"), "feature base\n"); - yield* git(source, ["add", "feature.txt"]); - yield* git(source, ["commit", "-m", "feature base"]); - yield* git(source, ["push", "-u", "origin", featureBranch]); - yield* git(source, ["checkout", defaultBranch]); - - const realGitCore = yield* GitCore; - let refreshFetchAttempts = 0; - const core = yield* makeIsolatedGitCore((input) => { - if (input.args[0] === "--git-dir" && input.args[2] === "fetch") { - refreshFetchAttempts += 1; - return Effect.succeed({ - code: 0, - stdout: "", - stderr: "", - stdoutTruncated: false, - stderrTruncated: false, - }); - } - return realGitCore.execute(input); - }); - yield* core.checkoutBranch({ cwd: source, branch: featureBranch }); - yield* Effect.promise(() => new Promise((resolve) => setTimeout(resolve, 50))); - expect(refreshFetchAttempts).toBe(0); - const status = yield* core.statusDetails(source); - expect(status.branch).toBe(featureBranch); - expect(refreshFetchAttempts).toBe(1); - }), - ); - - it.effect("coalesces upstream refreshes across sibling worktrees on the same remote", () => - Effect.gen(function* () { - const ok = (stdout = "") => - Effect.succeed({ - code: 0, - stdout, - stderr: "", - stdoutTruncated: false, - stderrTruncated: false, - }); - - let fetchCount = 0; - const core = yield* makeIsolatedGitCore((input) => { - if ( - input.args[0] === "rev-parse" && - input.args[1] === "--abbrev-ref" && - input.args[2] === "--symbolic-full-name" && - input.args[3] === "@{upstream}" - ) { - return ok( - input.cwd === "/repo/worktrees/pr-123" ? "origin/feature/pr-123\n" : "origin/main\n", - ); - } - if (input.args[0] === "remote") { - return ok("origin\n"); - } - if (input.args[0] === "rev-parse" && input.args[1] === "--git-common-dir") { - return ok("/repo/.git\n"); - } - if (input.args[0] === "--git-dir" && input.args[2] === "fetch") { - fetchCount += 1; - expect(input.cwd).toBe("/repo"); - expect(input.args).toEqual([ - "--git-dir", - "/repo/.git", - "fetch", - "--quiet", - "--no-tags", - "origin", - ]); - return ok(); - } - if (input.operation === "GitCore.statusDetails.status") { - return ok( - input.cwd === "/repo/worktrees/pr-123" - ? "# branch.head feature/pr-123\n# branch.upstream origin/feature/pr-123\n# branch.ab +0 -0\n" - : "# branch.head main\n# branch.upstream origin/main\n# branch.ab +0 -0\n", - ); - } - if ( - input.operation === "GitCore.statusDetails.unstagedNumstat" || - input.operation === "GitCore.statusDetails.stagedNumstat" - ) { - return ok(); - } - if (input.operation === "GitCore.statusDetails.defaultRef") { - return ok("refs/remotes/origin/main\n"); - } - return Effect.fail( - new GitCommandError({ - operation: input.operation, - command: `git ${input.args.join(" ")}`, - cwd: input.cwd, - detail: "Unexpected git command in shared refresh cache test.", - }), - ); - }); - - yield* core.statusDetails("/repo/worktrees/main"); - yield* core.statusDetails("/repo/worktrees/pr-123"); - expect(fetchCount).toBe(1); - }), - ); - - it.effect( - "briefly backs off failed upstream refreshes across sibling worktrees on one remote", - () => - Effect.gen(function* () { - const ok = (stdout = "") => - Effect.succeed({ - code: 0, - stdout, - stderr: "", - stdoutTruncated: false, - stderrTruncated: false, - }); - - let fetchCount = 0; - const core = yield* makeIsolatedGitCore((input) => { - if ( - input.args[0] === "rev-parse" && - input.args[1] === "--abbrev-ref" && - input.args[2] === "--symbolic-full-name" && - input.args[3] === "@{upstream}" - ) { - return ok( - input.cwd === "/repo/worktrees/pr-123" - ? "origin/feature/pr-123\n" - : "origin/main\n", - ); - } - if (input.args[0] === "remote") { - return ok("origin\n"); - } - if (input.args[0] === "rev-parse" && input.args[1] === "--git-common-dir") { - return ok("/repo/.git\n"); - } - if (input.args[0] === "--git-dir" && input.args[2] === "fetch") { - fetchCount += 1; - return Effect.fail( - new GitCommandError({ - operation: input.operation, - command: `git ${input.args.join(" ")}`, - cwd: input.cwd, - detail: "simulated fetch timeout", - }), - ); - } - if (input.operation === "GitCore.statusDetails.status") { - return ok( - input.cwd === "/repo/worktrees/pr-123" - ? "# branch.head feature/pr-123\n# branch.upstream origin/feature/pr-123\n# branch.ab +0 -0\n" - : "# branch.head main\n# branch.upstream origin/main\n# branch.ab +0 -0\n", - ); - } - if ( - input.operation === "GitCore.statusDetails.unstagedNumstat" || - input.operation === "GitCore.statusDetails.stagedNumstat" - ) { - return ok(); - } - if (input.operation === "GitCore.statusDetails.defaultRef") { - return ok("refs/remotes/origin/main\n"); - } - return Effect.fail( - new GitCommandError({ - operation: input.operation, - command: `git ${input.args.join(" ")}`, - cwd: input.cwd, - detail: "Unexpected git command in refresh failure cooldown test.", - }), - ); - }); - - yield* core.statusDetails("/repo/worktrees/main"); - yield* core.statusDetails("/repo/worktrees/pr-123"); - expect(fetchCount).toBe(1); - }), - ); - - it.effect("throws when branch does not exist", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const result = yield* Effect.result( - (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "nonexistent" }), - ); - expect(result._tag).toBe("Failure"); - }), - ); - - it.effect("does not silently checkout a local branch when a remote ref no longer exists", () => - Effect.gen(function* () { - const remote = yield* makeTmpDir(); - const source = yield* makeTmpDir(); - yield* git(remote, ["init", "--bare"]); - - yield* initRepoWithCommit(source); - const defaultBranch = (yield* (yield* GitCore).listBranches({ cwd: source })).branches.find( - (branch) => branch.current, - )!.name; - yield* git(source, ["remote", "add", "origin", remote]); - yield* git(source, ["push", "-u", "origin", defaultBranch]); - - yield* (yield* GitCore).createBranch({ cwd: source, branch: "feature" }); - - const checkoutResult = yield* Effect.result( - (yield* GitCore).checkoutBranch({ cwd: source, branch: "origin/feature" }), - ); - expect(checkoutResult._tag).toBe("Failure"); - expect(yield* git(source, ["branch", "--show-current"])).toBe(defaultBranch); - }), - ); - - it.effect("checks out a remote tracking branch when remote name contains slashes", () => - Effect.gen(function* () { - const remote = yield* makeTmpDir(); - const prefixRemote = yield* makeTmpDir(); - const source = yield* makeTmpDir(); - const prefixFetchNamespace = "prefix-my-org"; - const prefixRemoteName = "my-org"; - const remoteName = "my-org/upstream"; - const featureBranch = "feature"; - yield* git(remote, ["init", "--bare"]); - yield* git(prefixRemote, ["init", "--bare"]); - - yield* initRepoWithCommit(source); - const defaultBranch = (yield* (yield* GitCore).listBranches({ cwd: source })).branches.find( - (branch) => branch.current, - )!.name; - yield* configureRemote(source, prefixRemoteName, prefixRemote, prefixFetchNamespace); - yield* configureRemote(source, remoteName, remote, remoteName); - yield* git(source, ["push", "-u", remoteName, defaultBranch]); - - yield* git(source, ["checkout", "-b", featureBranch]); - yield* writeTextFile(path.join(source, "feature.txt"), "feature content\n"); - yield* git(source, ["add", "feature.txt"]); - yield* git(source, ["commit", "-m", "feature commit"]); - yield* git(source, ["push", "-u", remoteName, featureBranch]); - yield* git(source, ["checkout", defaultBranch]); - yield* git(source, ["branch", "-D", featureBranch]); - - const checkoutResult = yield* (yield* GitCore).checkoutBranch({ - cwd: source, - branch: `${remoteName}/${featureBranch}`, - }); - - expect(checkoutResult.branch).toBe("upstream/feature"); - expect(yield* git(source, ["branch", "--show-current"])).toBe("upstream/feature"); - const realGitCore = yield* GitCore; - let fetchArgs: readonly string[] | null = null; - const core = yield* makeIsolatedGitCore((input) => { - if (input.args[0] === "--git-dir" && input.args[2] === "fetch") { - fetchArgs = [...input.args]; - return Effect.succeed({ - code: 0, - stdout: "", - stderr: "", - stdoutTruncated: false, - stderrTruncated: false, - }); - } - return realGitCore.execute(input); - }); - - const status = yield* core.statusDetails(source); - expect(status.branch).toBe("upstream/feature"); - expect(status.upstreamRef).toBe(`${remoteName}/${featureBranch}`); - expect(fetchArgs).toEqual([ - "--git-dir", - path.join(source, ".git"), - "fetch", - "--quiet", - "--no-tags", - remoteName, - ]); - }), - ); - - it.effect( - "falls back to detached checkout when --track would conflict with an existing local branch", - () => - Effect.gen(function* () { - const remote = yield* makeTmpDir(); - const source = yield* makeTmpDir(); - yield* git(remote, ["init", "--bare"]); - - yield* initRepoWithCommit(source); - const defaultBranch = (yield* (yield* GitCore).listBranches({ - cwd: source, - })).branches.find((branch) => branch.current)!.name; - yield* git(source, ["remote", "add", "origin", remote]); - yield* git(source, ["push", "-u", "origin", defaultBranch]); - - // Keep local branch but remove tracking so `--track origin/` - // would attempt to create an already-existing local branch. - yield* git(source, ["branch", "--unset-upstream"]); - - yield* (yield* GitCore).checkoutBranch({ - cwd: source, - branch: `origin/${defaultBranch}`, - }); - - const core = yield* GitCore; - const status = yield* core.statusDetails(source); - expect(status.branch).toBeNull(); - }), - ); - - it.effect("throws when checkout would overwrite uncommitted changes", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "other" }); - - // Create a conflicting change: modify README on current branch - yield* writeTextFile(path.join(tmp, "README.md"), "modified\n"); - yield* git(tmp, ["add", "README.md"]); - - // First, checkout other branch cleanly - yield* git(tmp, ["stash"]); - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "other" }); - yield* writeTextFile(path.join(tmp, "README.md"), "other content\n"); - yield* git(tmp, ["add", "."]); - yield* git(tmp, ["commit", "-m", "other change"]); - - // Go back to default branch - const defaultBranch = (yield* (yield* GitCore).listBranches({ cwd: tmp })).branches.find( - (b) => !b.current, - )!.name; - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: defaultBranch }); - - // Make uncommitted changes to the same file - yield* writeTextFile(path.join(tmp, "README.md"), "conflicting local\n"); - - // Checkout should fail due to uncommitted changes - const result = yield* Effect.result( - (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "other" }), - ); - expect(result._tag).toBe("Failure"); - }), - ); - }); - - // ── createGitBranch ── - - describe("createGitBranch", () => { - it.effect("creates a new branch visible in listGitBranches", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "new-feature" }); - - const result = yield* (yield* GitCore).listBranches({ cwd: tmp }); - expect(result.branches.some((b) => b.name === "new-feature")).toBe(true); - }), - ); - - it.effect("throws when branch already exists", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "dupe" }); - const result = yield* Effect.result( - (yield* GitCore).createBranch({ cwd: tmp, branch: "dupe" }), - ); - expect(result._tag).toBe("Failure"); - }), - ); - }); - - // ── renameGitBranch ── - - describe("renameGitBranch", () => { - it.effect("renames the current branch", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "feature/old-name" }); - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "feature/old-name" }); - - const renamed = yield* (yield* GitCore).renameBranch({ - cwd: tmp, - oldBranch: "feature/old-name", - newBranch: "feature/new-name", - }); - - expect(renamed.branch).toBe("feature/new-name"); - - const branches = yield* (yield* GitCore).listBranches({ cwd: tmp }); - expect(branches.branches.some((branch) => branch.name === "feature/old-name")).toBe(false); - const current = branches.branches.find((branch) => branch.current); - expect(current?.name).toBe("feature/new-name"); - }), - ); - - it.effect("returns success without git invocation when old/new names match", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const current = (yield* (yield* GitCore).listBranches({ cwd: tmp })).branches.find( - (b) => b.current, - )!; - - const renamed = yield* (yield* GitCore).renameBranch({ - cwd: tmp, - oldBranch: current.name, - newBranch: current.name, - }); - - expect(renamed.branch).toBe(current.name); - }), - ); - - it.effect("appends numeric suffix when target branch already exists", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "t3code/feat/session" }); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "t3code/tmp-working" }); - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "t3code/tmp-working" }); - - const renamed = yield* (yield* GitCore).renameBranch({ - cwd: tmp, - oldBranch: "t3code/tmp-working", - newBranch: "t3code/feat/session", - }); - - expect(renamed.branch).toBe("t3code/feat/session-1"); - const branches = yield* (yield* GitCore).listBranches({ cwd: tmp }); - expect(branches.branches.some((branch) => branch.name === "t3code/feat/session")).toBe( - true, - ); - expect(branches.branches.some((branch) => branch.name === "t3code/feat/session-1")).toBe( - true, - ); - const current = branches.branches.find((branch) => branch.current); - expect(current?.name).toBe("t3code/feat/session-1"); - }), - ); - - it.effect("increments suffix until it finds an available branch name", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "t3code/feat/session" }); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "t3code/feat/session-1" }); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "t3code/tmp-working" }); - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "t3code/tmp-working" }); - - const renamed = yield* (yield* GitCore).renameBranch({ - cwd: tmp, - oldBranch: "t3code/tmp-working", - newBranch: "t3code/feat/session", - }); - - expect(renamed.branch).toBe("t3code/feat/session-2"); - }), - ); - - it.effect("uses '--' separator for branch rename arguments", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "feature/old-name" }); - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "feature/old-name" }); - - const realGitCore = yield* GitCore; - let renameArgs: ReadonlyArray | null = null; - const core = yield* makeIsolatedGitCore((input) => { - if (input.args[0] === "branch" && input.args[1] === "-m") { - renameArgs = [...input.args]; - } - return realGitCore.execute(input); - }); - - const renamed = yield* core.renameBranch({ - cwd: tmp, - oldBranch: "feature/old-name", - newBranch: "feature/new-name", - }); - - expect(renamed.branch).toBe("feature/new-name"); - expect(renameArgs).toEqual(["branch", "-m", "--", "feature/old-name", "feature/new-name"]); - }), - ); - }); - - // ── createGitWorktree + removeGitWorktree ── - - describe("createGitWorktree", () => { - it.effect("creates a worktree with a new branch from the base branch", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - - const wtPath = path.join(tmp, "worktree-out"); - const currentBranch = (yield* (yield* GitCore).listBranches({ cwd: tmp })).branches.find( - (b) => b.current, - )!.name; - - const result = yield* (yield* GitCore).createWorktree({ - cwd: tmp, - branch: currentBranch, - newBranch: "wt-branch", - path: wtPath, - }); - - expect(result.worktree.path).toBe(wtPath); - expect(result.worktree.branch).toBe("wt-branch"); - expect(existsSync(wtPath)).toBe(true); - expect(existsSync(path.join(wtPath, "README.md"))).toBe(true); - - // Clean up worktree before tmp dir disposal - yield* (yield* GitCore).removeWorktree({ cwd: tmp, path: wtPath }); - }), - ); - - it.effect("worktree has the new branch checked out", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - - const wtPath = path.join(tmp, "wt-check-dir"); - const currentBranch = (yield* (yield* GitCore).listBranches({ cwd: tmp })).branches.find( - (b) => b.current, - )!.name; - - yield* (yield* GitCore).createWorktree({ - cwd: tmp, - branch: currentBranch, - newBranch: "wt-check", - path: wtPath, - }); - - // Verify the worktree is on the new branch - const branchOutput = yield* git(wtPath, ["branch", "--show-current"]); - expect(branchOutput).toBe("wt-check"); - - yield* (yield* GitCore).removeWorktree({ cwd: tmp, path: wtPath }); - }), - ); - - it.effect("creates a worktree for an existing branch when newBranch is omitted", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "feature/existing-worktree" }); - - const wtPath = path.join(tmp, "wt-existing"); - const result = yield* (yield* GitCore).createWorktree({ - cwd: tmp, - branch: "feature/existing-worktree", - path: wtPath, - }); - - expect(result.worktree.path).toBe(wtPath); - expect(result.worktree.branch).toBe("feature/existing-worktree"); - const branchOutput = yield* git(wtPath, ["branch", "--show-current"]); - expect(branchOutput).toBe("feature/existing-worktree"); - - yield* (yield* GitCore).removeWorktree({ cwd: tmp, path: wtPath }); - }), - ); - - it.effect("throws when new branch name already exists", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "existing" }); - - const wtPath = path.join(tmp, "wt-conflict"); - const currentBranch = (yield* (yield* GitCore).listBranches({ cwd: tmp })).branches.find( - (b) => b.current, - )!.name; - - const result = yield* Effect.result( - (yield* GitCore).createWorktree({ - cwd: tmp, - branch: currentBranch, - newBranch: "existing", - path: wtPath, - }), - ); - expect(result._tag).toBe("Failure"); - }), - ); - - it.effect("listGitBranches from worktree cwd reports worktree branch as current", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - - const wtPath = path.join(tmp, "wt-list-dir"); - const mainBranch = (yield* (yield* GitCore).listBranches({ cwd: tmp })).branches.find( - (b) => b.current, - )!.name; - - yield* (yield* GitCore).createWorktree({ - cwd: tmp, - branch: mainBranch, - newBranch: "wt-list", - path: wtPath, - }); - - // listGitBranches from the worktree should show wt-list as current - const wtBranches = yield* (yield* GitCore).listBranches({ cwd: wtPath }); - expect(wtBranches.isRepo).toBe(true); - const wtCurrent = wtBranches.branches.find((b) => b.current); - expect(wtCurrent!.name).toBe("wt-list"); - - // Main repo should still show the original branch as current - const mainBranches = yield* (yield* GitCore).listBranches({ cwd: tmp }); - const mainCurrent = mainBranches.branches.find((b) => b.current); - expect(mainCurrent!.name).toBe(mainBranch); - - yield* (yield* GitCore).removeWorktree({ cwd: tmp, path: wtPath }); - }), - ); - - it.effect("removeGitWorktree cleans up the worktree", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - - const wtPath = path.join(tmp, "wt-remove-dir"); - const currentBranch = (yield* (yield* GitCore).listBranches({ cwd: tmp })).branches.find( - (b) => b.current, - )!.name; - - yield* (yield* GitCore).createWorktree({ - cwd: tmp, - branch: currentBranch, - newBranch: "wt-remove", - path: wtPath, - }); - expect(existsSync(wtPath)).toBe(true); - - yield* (yield* GitCore).removeWorktree({ cwd: tmp, path: wtPath }); - expect(existsSync(wtPath)).toBe(false); - }), - ); - - it.effect("removeGitWorktree force removes a dirty worktree", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - - const wtPath = path.join(tmp, "wt-dirty-dir"); - const currentBranch = (yield* (yield* GitCore).listBranches({ cwd: tmp })).branches.find( - (b) => b.current, - )!.name; - - yield* (yield* GitCore).createWorktree({ - cwd: tmp, - branch: currentBranch, - newBranch: "wt-dirty", - path: wtPath, - }); - expect(existsSync(wtPath)).toBe(true); - - yield* writeTextFile(path.join(wtPath, "README.md"), "dirty change\n"); - - const failedRemove = yield* Effect.result( - (yield* GitCore).removeWorktree({ cwd: tmp, path: wtPath }), - ); - expect(failedRemove._tag).toBe("Failure"); - expect(existsSync(wtPath)).toBe(true); - - yield* (yield* GitCore).removeWorktree({ cwd: tmp, path: wtPath, force: true }); - expect(existsSync(wtPath)).toBe(false); - }), - ); - }); - - // ── Full flow: local branch checkout ── - - describe("full flow: local branch checkout", () => { - it.effect("init → commit → create branch → checkout → verify current", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "feature-login" }); - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "feature-login" }); - - const result = yield* (yield* GitCore).listBranches({ cwd: tmp }); - const current = result.branches.find((b) => b.current); - expect(current!.name).toBe("feature-login"); - }), - ); - }); - - // ── Full flow: worktree creation from base branch ── - - describe("full flow: worktree creation", () => { - it.effect("creates worktree with new branch from current branch", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - - const currentBranch = (yield* (yield* GitCore).listBranches({ cwd: tmp })).branches.find( - (b) => b.current, - )!.name; - - const wtPath = path.join(tmp, "my-worktree"); - const result = yield* (yield* GitCore).createWorktree({ - cwd: tmp, - branch: currentBranch, - newBranch: "feature-wt", - path: wtPath, - }); - - // Worktree exists - expect(existsSync(result.worktree.path)).toBe(true); - - // Main repo still on original branch - const mainBranches = yield* (yield* GitCore).listBranches({ cwd: tmp }); - const mainCurrent = mainBranches.branches.find((b) => b.current); - expect(mainCurrent!.name).toBe(currentBranch); - - // Worktree is on the new branch - const wtBranch = yield* git(wtPath, ["branch", "--show-current"]); - expect(wtBranch).toBe("feature-wt"); - - yield* (yield* GitCore).removeWorktree({ cwd: tmp, path: wtPath }); - }), - ); - }); - - describe("fetchPullRequestBranch", () => { - it.effect("fetches a GitHub pull request ref into a local branch without checkout", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - const { initialBranch } = yield* initRepoWithCommit(tmp); - const remoteDir = yield* makeTmpDir("git-remote-"); - yield* git(remoteDir, ["init", "--bare"]); - yield* git(tmp, ["remote", "add", "origin", remoteDir]); - yield* git(tmp, ["push", "-u", "origin", initialBranch]); - yield* git(tmp, ["checkout", "-b", "feature/pr-fetch"]); - yield* writeTextFile(path.join(tmp, "pr-fetch.txt"), "fetch me\n"); - yield* git(tmp, ["add", "pr-fetch.txt"]); - yield* git(tmp, ["commit", "-m", "Add PR fetch branch"]); - yield* git(tmp, ["push", "-u", "origin", "feature/pr-fetch"]); - yield* git(tmp, ["push", "origin", "HEAD:refs/pull/55/head"]); - yield* git(tmp, ["checkout", initialBranch]); - - yield* (yield* GitCore).fetchPullRequestBranch({ - cwd: tmp, - prNumber: 55, - branch: "feature/pr-fetch", - }); - - const localBranches = yield* git(tmp, ["branch", "--list", "feature/pr-fetch"]); - expect(localBranches).toContain("feature/pr-fetch"); - const currentBranch = yield* git(tmp, ["branch", "--show-current"]); - expect(currentBranch).toBe(initialBranch); - }), - ); - }); - - // ── Full flow: thread switching simulation ── - - describe("full flow: thread switching (checkout toggling)", () => { - it.effect("checkout a → checkout b → checkout a → current matches", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "branch-a" }); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "branch-b" }); - - // Simulate switching to thread A's branch - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "branch-a" }); - let branches = yield* (yield* GitCore).listBranches({ cwd: tmp }); - expect(branches.branches.find((b) => b.current)!.name).toBe("branch-a"); - - // Simulate switching to thread B's branch - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "branch-b" }); - branches = yield* (yield* GitCore).listBranches({ cwd: tmp }); - expect(branches.branches.find((b) => b.current)!.name).toBe("branch-b"); - - // Switch back to thread A - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "branch-a" }); - branches = yield* (yield* GitCore).listBranches({ cwd: tmp }); - expect(branches.branches.find((b) => b.current)!.name).toBe("branch-a"); - }), - ); - }); - - // ── Full flow: checkout conflict ── - - describe("full flow: checkout conflict", () => { - it.effect("uncommitted changes prevent checkout to a diverged branch", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "diverged" }); - - // Make diverged branch have different file content - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "diverged" }); - yield* writeTextFile(path.join(tmp, "README.md"), "diverged content\n"); - yield* git(tmp, ["add", "."]); - yield* git(tmp, ["commit", "-m", "diverge"]); - - // Actually, let's just get back to the initial branch explicitly - const allBranches = yield* (yield* GitCore).listBranches({ cwd: tmp }); - const initialBranch = allBranches.branches.find((b) => b.name !== "diverged")!.name; - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: initialBranch }); - - // Make local uncommitted changes to the same file - yield* writeTextFile(path.join(tmp, "README.md"), "local uncommitted\n"); - - // Attempt checkout should fail - const failedCheckout = yield* Effect.result( - (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "diverged" }), - ); - expect(failedCheckout._tag).toBe("Failure"); - - // Current branch should still be the initial one - const result = yield* (yield* GitCore).listBranches({ cwd: tmp }); - expect(result.branches.find((b) => b.current)!.name).toBe(initialBranch); - }), - ); - }); - - describe("GitCore", () => { - it.effect("supports branch lifecycle operations through the service API", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - const core = yield* GitCore; - - yield* core.initRepo({ cwd: tmp }); - yield* git(tmp, ["config", "user.email", "test@test.com"]); - yield* git(tmp, ["config", "user.name", "Test"]); - yield* writeTextFile(path.join(tmp, "README.md"), "# test\n"); - yield* git(tmp, ["add", "."]); - yield* git(tmp, ["commit", "-m", "initial commit"]); - - yield* core.createBranch({ cwd: tmp, branch: "feature/service-api" }); - yield* core.checkoutBranch({ cwd: tmp, branch: "feature/service-api" }); - const branches = yield* core.listBranches({ cwd: tmp }); - - expect(branches.isRepo).toBe(true); - expect( - branches.branches.find((branch: { current: boolean; name: string }) => branch.current) - ?.name, - ).toBe("feature/service-api"); - }), - ); - - it.effect( - "reuses an existing remote when the target URL only differs by a trailing slash after .git", - () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const core = yield* GitCore; - - yield* git(tmp, ["remote", "add", "origin", "git@github.com:pingdotgg/t3code.git"]); - - const remoteName = yield* core.ensureRemote({ - cwd: tmp, - preferredName: "origin", - url: "git@github.com:pingdotgg/t3code.git/", - }); - - expect(remoteName).toBe("origin"); - expect((yield* git(tmp, ["remote"])).split("\n").filter(Boolean)).toEqual(["origin"]); - }), - ); - - it.effect("reports status details and dirty state", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const core = yield* GitCore; - - const clean = yield* core.status({ cwd: tmp }); - expect(clean.hasWorkingTreeChanges).toBe(false); - expect(clean.branch).toBeTruthy(); - - yield* writeTextFile(path.join(tmp, "README.md"), "updated\n"); - const dirty = yield* core.statusDetails(tmp); - expect(dirty.hasWorkingTreeChanges).toBe(true); - }), - ); - - it.effect("returns a non-repo status for deleted directories", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - const deletedDir = path.join(tmp, "deleted-repo"); - yield* makeDirectory(deletedDir); - yield* removePath(deletedDir); - const core = yield* GitCore; - - const status = yield* core.statusDetails(deletedDir); - const localStatus = yield* core.statusDetailsLocal(deletedDir); - - expect(status).toEqual({ - isRepo: false, - hasOriginRemote: false, - isDefaultBranch: false, - branch: null, - upstreamRef: null, - hasWorkingTreeChanges: false, - workingTree: { - files: [], - insertions: 0, - deletions: 0, - }, - hasUpstream: false, - aheadCount: 0, - behindCount: 0, - }); - expect(localStatus).toEqual(status); - }), - ); - - it.effect("computes ahead count against base branch when no upstream is configured", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const core = yield* GitCore; - - yield* core.createBranch({ cwd: tmp, branch: "feature/no-upstream-ahead" }); - yield* core.checkoutBranch({ cwd: tmp, branch: "feature/no-upstream-ahead" }); - yield* writeTextFile(path.join(tmp, "feature.txt"), "ahead of base\n"); - yield* git(tmp, ["add", "feature.txt"]); - yield* git(tmp, ["commit", "-m", "feature commit"]); - - const details = yield* core.statusDetails(tmp); - expect(details.branch).toBe("feature/no-upstream-ahead"); - expect(details.hasUpstream).toBe(false); - expect(details.aheadCount).toBe(1); - expect(details.behindCount).toBe(0); - }), - ); - - it.effect( - "computes ahead count against origin/default when local default branch is missing", - () => - Effect.gen(function* () { - const remote = yield* makeTmpDir(); - const source = yield* makeTmpDir(); - yield* git(remote, ["init", "--bare"]); - - yield* initRepoWithCommit(source); - const initialBranch = (yield* (yield* GitCore).listBranches({ - cwd: source, - })).branches.find((branch) => branch.current)!.name; - yield* git(source, ["remote", "add", "origin", remote]); - yield* git(source, ["push", "-u", "origin", initialBranch]); - yield* git(source, ["checkout", "-b", "feature/remote-base-only"]); - yield* writeTextFile( - path.join(source, "feature.txt"), - `ahead of origin/${initialBranch}\n`, - ); - yield* git(source, ["add", "feature.txt"]); - yield* git(source, ["commit", "-m", "feature commit"]); - yield* git(source, ["branch", "-D", initialBranch]); - - const core = yield* GitCore; - const details = yield* core.statusDetails(source); - expect(details.branch).toBe("feature/remote-base-only"); - expect(details.hasUpstream).toBe(false); - expect(details.aheadCount).toBe(1); - expect(details.behindCount).toBe(0); - }), - ); - - it.effect( - "computes ahead count against a non-origin remote-prefixed gh-merge-base candidate", - () => - Effect.gen(function* () { - const remote = yield* makeTmpDir(); - const source = yield* makeTmpDir(); - const remoteName = "fork-seed"; - yield* git(remote, ["init", "--bare"]); - - yield* initRepoWithCommit(source); - const initialBranch = (yield* (yield* GitCore).listBranches({ - cwd: source, - })).branches.find((branch) => branch.current)!.name; - yield* git(source, ["remote", "add", remoteName, remote]); - yield* git(source, ["push", "-u", remoteName, initialBranch]); - yield* git(source, ["checkout", "-b", "feature/non-origin-merge-base"]); - yield* git(source, [ - "config", - "branch.feature/non-origin-merge-base.gh-merge-base", - `${remoteName}/${initialBranch}`, - ]); - yield* writeTextFile( - path.join(source, "feature.txt"), - `ahead of ${remoteName}/${initialBranch}\n`, - ); - yield* git(source, ["add", "feature.txt"]); - yield* git(source, ["commit", "-m", "feature commit"]); - yield* git(source, ["branch", "-D", initialBranch]); - - const core = yield* GitCore; - const details = yield* core.statusDetails(source); - expect(details.branch).toBe("feature/non-origin-merge-base"); - expect(details.hasUpstream).toBe(false); - expect(details.aheadCount).toBe(1); - expect(details.behindCount).toBe(0); - }), - ); - - it.effect("skips push when no upstream is configured and branch is not ahead of base", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const core = yield* GitCore; - - yield* core.createBranch({ cwd: tmp, branch: "feature/no-upstream-no-ahead" }); - yield* core.checkoutBranch({ cwd: tmp, branch: "feature/no-upstream-no-ahead" }); - - const pushed = yield* core.pushCurrentBranch(tmp, null); - expect(pushed.status).toBe("skipped_up_to_date"); - expect(pushed.branch).toBe("feature/no-upstream-no-ahead"); - expect(pushed.setUpstream).toBeUndefined(); - }), - ); - - it.effect("pushes with upstream setup when no comparable base branch exists", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - const remote = yield* makeTmpDir(); - yield* git(tmp, ["init", "--initial-branch=trunk"]); - yield* git(tmp, ["config", "user.email", "test@test.com"]); - yield* git(tmp, ["config", "user.name", "Test"]); - yield* writeTextFile(path.join(tmp, "README.md"), "hello\n"); - yield* git(tmp, ["add", "README.md"]); - yield* git(tmp, ["commit", "-m", "initial"]); - yield* git(remote, ["init", "--bare"]); - yield* git(tmp, ["remote", "add", "origin", remote]); - yield* git(tmp, ["checkout", "-b", "feature/no-base"]); - - const core = yield* GitCore; - const pushed = yield* core.pushCurrentBranch(tmp, null); - expect(pushed.status).toBe("pushed"); - expect(pushed.setUpstream).toBe(true); - expect(pushed.upstreamBranch).toBe("origin/feature/no-base"); - expect(yield* git(tmp, ["rev-parse", "--abbrev-ref", "@{upstream}"])).toBe( - "origin/feature/no-base", - ); - }), - ); - - it.effect("pushes with upstream setup to the only configured non-origin remote", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - const remote = yield* makeTmpDir(); - yield* git(tmp, ["init", "--initial-branch=main"]); - yield* git(tmp, ["config", "user.email", "test@test.com"]); - yield* git(tmp, ["config", "user.name", "Test"]); - yield* writeTextFile(path.join(tmp, "README.md"), "hello\n"); - yield* git(tmp, ["add", "README.md"]); - yield* git(tmp, ["commit", "-m", "initial"]); - yield* git(remote, ["init", "--bare"]); - yield* git(tmp, ["remote", "add", "fork", remote]); - yield* git(tmp, ["checkout", "-b", "feature/fork-only"]); - - const core = yield* GitCore; - const pushed = yield* core.pushCurrentBranch(tmp, null); - expect(pushed.status).toBe("pushed"); - expect(pushed.setUpstream).toBe(true); - expect(pushed.upstreamBranch).toBe("fork/feature/fork-only"); - expect(yield* git(tmp, ["rev-parse", "--abbrev-ref", "@{upstream}"])).toBe( - "fork/feature/fork-only", - ); - }), - ); - - it.effect( - "pushes with upstream setup when comparable base exists but remote branch is missing", - () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - const remote = yield* makeTmpDir(); - yield* git(remote, ["init", "--bare"]); - - yield* initRepoWithCommit(tmp); - const initialBranch = (yield* (yield* GitCore).listBranches({ cwd: tmp })).branches.find( - (branch) => branch.current, - )!.name; - yield* git(tmp, ["remote", "add", "origin", remote]); - yield* git(tmp, ["push", "-u", "origin", initialBranch]); - - yield* writeTextFile(path.join(tmp, "default-ahead.txt"), "ahead on default\n"); - yield* git(tmp, ["add", "default-ahead.txt"]); - yield* git(tmp, ["commit", "-m", "default ahead"]); - - const featureBranch = "feature/publish-no-upstream"; - yield* git(tmp, ["checkout", "-b", featureBranch]); - - const core = yield* GitCore; - const pushed = yield* core.pushCurrentBranch(tmp, null); - expect(pushed.status).toBe("pushed"); - expect(pushed.setUpstream).toBe(true); - expect(pushed.upstreamBranch).toBe(`origin/${featureBranch}`); - expect(yield* git(tmp, ["rev-parse", "--abbrev-ref", "@{upstream}"])).toBe( - `origin/${featureBranch}`, - ); - expect(yield* git(tmp, ["ls-remote", "--heads", "origin", featureBranch])).toContain( - featureBranch, - ); - }), - ); - - it.effect("prefers branch pushRemote over origin when setting upstream", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - const origin = yield* makeTmpDir(); - const fork = yield* makeTmpDir(); - yield* git(origin, ["init", "--bare"]); - yield* git(fork, ["init", "--bare"]); - - yield* initRepoWithCommit(tmp); - const initialBranch = (yield* (yield* GitCore).listBranches({ cwd: tmp })).branches.find( - (branch) => branch.current, - )!.name; - yield* git(tmp, ["remote", "add", "origin", origin]); - yield* git(tmp, ["remote", "add", "fork", fork]); - yield* git(tmp, ["push", "-u", "origin", initialBranch]); - - const featureBranch = "feature/push-remote"; - yield* git(tmp, ["checkout", "-b", featureBranch]); - yield* git(tmp, ["config", `branch.${featureBranch}.pushRemote`, "fork"]); - yield* writeTextFile(path.join(tmp, "feature.txt"), "push to fork\n"); - yield* git(tmp, ["add", "feature.txt"]); - yield* git(tmp, ["commit", "-m", "feature commit"]); - - const core = yield* GitCore; - const pushed = yield* core.pushCurrentBranch(tmp, null); - expect(pushed.status).toBe("pushed"); - expect(pushed.setUpstream).toBe(true); - expect(pushed.upstreamBranch).toBe(`fork/${featureBranch}`); - expect(yield* git(tmp, ["rev-parse", "--abbrev-ref", "@{upstream}"])).toBe( - `fork/${featureBranch}`, - ); - expect(yield* git(tmp, ["ls-remote", "--heads", "fork", featureBranch])).toContain( - featureBranch, - ); - }), - ); - - it.effect( - "pushes renamed PR worktree branches to their tracked upstream branch even when push.default is current", - () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - const fork = yield* makeTmpDir(); - yield* git(fork, ["init", "--bare"]); - - const { initialBranch } = yield* initRepoWithCommit(tmp); - yield* git(tmp, ["remote", "add", "jasonLaster", fork]); - yield* git(tmp, ["checkout", "-b", "statemachine"]); - yield* writeTextFile(path.join(tmp, "fork.txt"), "fork branch\n"); - yield* git(tmp, ["add", "fork.txt"]); - yield* git(tmp, ["commit", "-m", "fork branch"]); - yield* git(tmp, ["push", "-u", "jasonLaster", "statemachine"]); - yield* git(tmp, ["checkout", initialBranch]); - yield* git(tmp, ["branch", "-D", "statemachine"]); - yield* git(tmp, [ - "checkout", - "-b", - "t3code/pr-488/statemachine", - "--track", - "jasonLaster/statemachine", - ]); - yield* git(tmp, ["config", "push.default", "current"]); - yield* writeTextFile(path.join(tmp, "fork.txt"), "updated fork branch\n"); - yield* git(tmp, ["add", "fork.txt"]); - yield* git(tmp, ["commit", "-m", "update reviewed PR branch"]); - - const core = yield* GitCore; - const pushed = yield* core.pushCurrentBranch(tmp, null); - - expect(pushed.status).toBe("pushed"); - expect(pushed.setUpstream).toBe(false); - expect(pushed.upstreamBranch).toBe("jasonLaster/statemachine"); - expect(yield* git(tmp, ["rev-parse", "--abbrev-ref", "@{upstream}"])).toBe( - "jasonLaster/statemachine", - ); - expect( - yield* git(tmp, ["ls-remote", "--heads", "jasonLaster", "statemachine"]), - ).toContain("statemachine"); - expect( - yield* git(tmp, ["ls-remote", "--heads", "jasonLaster", "t3code/pr-488/statemachine"]), - ).toBe(""); - }), - ); - - it.effect("pushes to the tracked upstream when the remote name contains slashes", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - const remote = yield* makeTmpDir(); - const prefixRemote = yield* makeTmpDir(); - const prefixFetchNamespace = "prefix-my-org"; - const prefixRemoteName = "my-org"; - const remoteName = "my-org/upstream"; - const featureBranch = "feature/slash-remote-push"; - yield* git(remote, ["init", "--bare"]); - yield* git(prefixRemote, ["init", "--bare"]); - - const { initialBranch } = yield* initRepoWithCommit(tmp); - yield* configureRemote(tmp, prefixRemoteName, prefixRemote, prefixFetchNamespace); - yield* configureRemote(tmp, remoteName, remote, remoteName); - yield* git(tmp, ["push", "-u", remoteName, initialBranch]); - - yield* git(tmp, ["checkout", "-b", featureBranch]); - yield* writeTextFile(path.join(tmp, "feature.txt"), "first revision\n"); - yield* git(tmp, ["add", "feature.txt"]); - yield* git(tmp, ["commit", "-m", "feature base"]); - yield* git(tmp, ["push", "-u", remoteName, featureBranch]); - - yield* writeTextFile(path.join(tmp, "feature.txt"), "second revision\n"); - yield* git(tmp, ["add", "feature.txt"]); - yield* git(tmp, ["commit", "-m", "feature update"]); - - const core = yield* GitCore; - const pushed = yield* core.pushCurrentBranch(tmp, null); - expect(pushed.status).toBe("pushed"); - expect(pushed.setUpstream).toBe(false); - expect(pushed.upstreamBranch).toBe(`${remoteName}/${featureBranch}`); - expect(yield* git(tmp, ["rev-parse", "--abbrev-ref", "@{upstream}"])).toBe( - `${remoteName}/${featureBranch}`, - ); - expect(yield* git(tmp, ["ls-remote", "--heads", remoteName, featureBranch])).toContain( - featureBranch, - ); - }), - ); - - it.effect("includes command context when worktree removal fails", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const core = yield* GitCore; - const missingWorktreePath = path.join(tmp, "missing-worktree"); - - const removeResult = yield* Effect.result( - core.removeWorktree({ cwd: tmp, path: missingWorktreePath }), - ); - expect(removeResult._tag).toBe("Failure"); - if (removeResult._tag !== "Failure") { - return; - } - const message = removeResult.failure.message; - expect(message).toContain("git worktree remove"); - expect(message).toContain(`cwd: ${tmp}`); - expect(message).toContain(missingWorktreePath); - }), - ); - - it.effect( - "refreshes upstream before statusDetails so behind count reflects remote updates", - () => - Effect.gen(function* () { - const remote = yield* makeTmpDir(); - const source = yield* makeTmpDir(); - const clone = yield* makeTmpDir(); - yield* git(remote, ["init", "--bare"]); - - yield* initRepoWithCommit(source); - const initialBranch = (yield* (yield* GitCore).listBranches({ - cwd: source, - })).branches.find((branch) => branch.current)!.name; - yield* git(source, ["remote", "add", "origin", remote]); - yield* git(source, ["push", "-u", "origin", initialBranch]); - - yield* git(clone, ["clone", remote, "."]); - yield* git(clone, ["config", "user.email", "test@test.com"]); - yield* git(clone, ["config", "user.name", "Test"]); - yield* git(clone, [ - "checkout", - "-B", - initialBranch, - "--track", - `origin/${initialBranch}`, - ]); - yield* writeTextFile(path.join(clone, "CHANGELOG.md"), "remote change\n"); - yield* git(clone, ["add", "CHANGELOG.md"]); - yield* git(clone, ["commit", "-m", "remote update"]); - yield* git(clone, ["push", "origin", initialBranch]); - - const core = yield* GitCore; - const details = yield* core.statusDetails(source); - expect(details.branch).toBe(initialBranch); - expect(details.aheadCount).toBe(0); - expect(details.behindCount).toBe(1); - }), - ); - - it.effect("prepares commit context by auto-staging and creates commit", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const core = yield* GitCore; - - yield* writeTextFile(path.join(tmp, "README.md"), "new content\n"); - const context = yield* core.prepareCommitContext(tmp); - expect(context).not.toBeNull(); - expect(context!.stagedSummary.length).toBeGreaterThan(0); - expect(context!.stagedPatch.length).toBeGreaterThan(0); - - const created = yield* core.commit(tmp, "Add README update", "- include updated content"); - expect(created.commitSha.length).toBeGreaterThan(0); - expect(yield* git(tmp, ["log", "-1", "--pretty=%s"])).toBe("Add README update"); - }), - ); - - it.effect("prepareCommitContext stages only selected files when filePaths provided", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const core = yield* GitCore; - - yield* writeTextFile(path.join(tmp, "a.txt"), "file a\n"); - yield* writeTextFile(path.join(tmp, "b.txt"), "file b\n"); - - const context = yield* core.prepareCommitContext(tmp, ["a.txt"]); - expect(context).not.toBeNull(); - expect(context!.stagedSummary).toContain("a.txt"); - expect(context!.stagedSummary).not.toContain("b.txt"); - - yield* core.commit(tmp, "Add only a.txt", ""); - - // b.txt should still be untracked after commit - const statusAfter = yield* git(tmp, ["status", "--porcelain"]); - expect(statusAfter).toContain("b.txt"); - expect(statusAfter).not.toContain("a.txt"); - }), - ); - - it.effect("prepareCommitContext stages everything when filePaths is undefined", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const core = yield* GitCore; - - yield* writeTextFile(path.join(tmp, "a.txt"), "file a\n"); - yield* writeTextFile(path.join(tmp, "b.txt"), "file b\n"); - - const context = yield* core.prepareCommitContext(tmp); - expect(context).not.toBeNull(); - expect(context!.stagedSummary).toContain("a.txt"); - expect(context!.stagedSummary).toContain("b.txt"); - }), - ); - - it.effect("prepareCommitContext truncates oversized staged patches instead of failing", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const core = yield* GitCore; - - yield* writeTextFile(path.join(tmp, "README.md"), buildLargeText()); - - const context = yield* core.prepareCommitContext(tmp); - expect(context).not.toBeNull(); - expect(context!.stagedSummary).toContain("README.md"); - expect(context!.stagedPatch).toContain("[truncated]"); - }), - ); - - it.effect("readRangeContext truncates oversized diff patches instead of failing", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - const { initialBranch } = yield* initRepoWithCommit(tmp); - const core = yield* GitCore; - - yield* core.createBranch({ cwd: tmp, branch: "feature/large-range-context" }); - yield* core.checkoutBranch({ cwd: tmp, branch: "feature/large-range-context" }); - yield* writeTextFile(path.join(tmp, "large.txt"), buildLargeText()); - yield* git(tmp, ["add", "large.txt"]); - yield* git(tmp, ["commit", "-m", "Add large range context"]); - - const rangeContext = yield* core.readRangeContext(tmp, initialBranch); - expect(rangeContext.commitSummary).toContain("Add large range context"); - expect(rangeContext.diffSummary).toContain("large.txt"); - expect(rangeContext.diffPatch).toContain("[truncated]"); - }), - ); - - it.effect("pushes with upstream setup and then skips when up to date", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - const remote = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - yield* git(remote, ["init", "--bare"]); - yield* git(tmp, ["remote", "add", "origin", remote]); - yield* (yield* GitCore).createBranch({ cwd: tmp, branch: "feature/core-push" }); - yield* (yield* GitCore).checkoutBranch({ cwd: tmp, branch: "feature/core-push" }); - - yield* writeTextFile(path.join(tmp, "feature.txt"), "push me\n"); - const core = yield* GitCore; - const context = yield* core.prepareCommitContext(tmp); - expect(context).not.toBeNull(); - yield* core.commit(tmp, "Add feature file", ""); - - const pushed = yield* core.pushCurrentBranch(tmp, null); - expect(pushed.status).toBe("pushed"); - expect(pushed.setUpstream).toBe(true); - expect(yield* git(tmp, ["rev-parse", "--abbrev-ref", "@{upstream}"])).toBe( - "origin/feature/core-push", - ); - - const skipped = yield* core.pushCurrentBranch(tmp, null); - expect(skipped.status).toBe("skipped_up_to_date"); - }), - ); - - it.effect("pulls behind branch and then reports up-to-date", () => - Effect.gen(function* () { - const remote = yield* makeTmpDir(); - const source = yield* makeTmpDir(); - const clone = yield* makeTmpDir(); - yield* git(remote, ["init", "--bare"]); - - yield* initRepoWithCommit(source); - const initialBranch = (yield* (yield* GitCore).listBranches({ cwd: source })).branches.find( - (branch) => branch.current, - )!.name; - yield* git(source, ["remote", "add", "origin", remote]); - yield* git(source, ["push", "-u", "origin", initialBranch]); - - yield* git(clone, ["clone", remote, "."]); - yield* git(clone, ["config", "user.email", "test@test.com"]); - yield* git(clone, ["config", "user.name", "Test"]); - yield* writeTextFile(path.join(clone, "CHANGELOG.md"), "remote change\n"); - yield* git(clone, ["add", "CHANGELOG.md"]); - yield* git(clone, ["commit", "-m", "remote update"]); - yield* git(clone, ["push", "origin", initialBranch]); - - const core = yield* GitCore; - const pulled = yield* core.pullCurrentBranch(source); - expect(pulled.status).toBe("pulled"); - expect((yield* core.statusDetails(source)).behindCount).toBe(0); - - const skipped = yield* core.pullCurrentBranch(source); - expect(skipped.status).toBe("skipped_up_to_date"); - }), - ); - - it.effect("top-level pullGitBranch rejects when no upstream exists", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const result = yield* Effect.result((yield* GitCore).pullCurrentBranch(tmp)); - expect(result._tag).toBe("Failure"); - if (result._tag === "Failure") { - expect(result.failure.message.toLowerCase()).toContain("no upstream"); - } - }), - ); - - it.effect("lists branches when recency lookup fails", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - const realGitCore = yield* GitCore; - let didFailRecency = false; - const core = yield* makeIsolatedGitCore((input) => { - if (!didFailRecency && input.args[0] === "for-each-ref") { - didFailRecency = true; - return Effect.fail( - new GitCommandError({ - operation: "git.test.listBranchesRecency", - command: `git ${input.args.join(" ")}`, - cwd: input.cwd, - detail: "timeout", - }), - ); - } - return realGitCore.execute(input); - }); - - const result = yield* core.listBranches({ cwd: tmp }); - - expect(result.isRepo).toBe(true); - expect(result.branches.length).toBeGreaterThan(0); - expect(result.branches[0]?.current).toBe(true); - expect(didFailRecency).toBe(true); - }), - ); - - it.effect("falls back to empty remote branch data when remote lookups fail", () => - Effect.gen(function* () { - const tmp = yield* makeTmpDir(); - const remote = yield* makeTmpDir(); - yield* initRepoWithCommit(tmp); - yield* git(remote, ["init", "--bare"]); - yield* git(tmp, ["remote", "add", "origin", remote]); - - const realGitCore = yield* GitCore; - let didFailRemoteBranches = false; - let didFailRemoteNames = false; - const core = yield* makeIsolatedGitCore((input) => { - if (input.args.join(" ") === "branch --no-color --no-column --remotes") { - didFailRemoteBranches = true; - return Effect.fail( - new GitCommandError({ - operation: "git.test.listBranchesRemoteBranches", - command: `git ${input.args.join(" ")}`, - cwd: input.cwd, - detail: "remote unavailable", - }), - ); - } - if (input.args.join(" ") === "remote") { - didFailRemoteNames = true; - return Effect.fail( - new GitCommandError({ - operation: "git.test.listBranchesRemoteNames", - command: `git ${input.args.join(" ")}`, - cwd: input.cwd, - detail: "remote unavailable", - }), - ); - } - return realGitCore.execute(input); - }); - - const result = yield* core.listBranches({ cwd: tmp }); - - expect(result.isRepo).toBe(true); - expect(result.branches.length).toBeGreaterThan(0); - expect(result.branches.every((branch) => !branch.isRemote)).toBe(true); - expect(didFailRemoteBranches).toBe(true); - expect(didFailRemoteNames).toBe(true); - }), - ); - }); -}); diff --git a/apps/server/src/git/Layers/GitHubCli.test.ts b/apps/server/src/git/Layers/GitHubCli.test.ts deleted file mode 100644 index 5a7b9cb8b1d..00000000000 --- a/apps/server/src/git/Layers/GitHubCli.test.ts +++ /dev/null @@ -1,227 +0,0 @@ -import { assert, it } from "@effect/vitest"; -import { Effect } from "effect"; -import { afterEach, expect, vi } from "vitest"; - -vi.mock("../../processRunner", () => ({ - runProcess: vi.fn(), -})); - -import { runProcess } from "../../processRunner.ts"; -import { GitHubCli } from "../Services/GitHubCli.ts"; -import { GitHubCliLive } from "./GitHubCli.ts"; - -const mockedRunProcess = vi.mocked(runProcess); -const layer = it.layer(GitHubCliLive); - -afterEach(() => { - mockedRunProcess.mockReset(); -}); - -layer("GitHubCliLive", (it) => { - it.effect("parses pull request view output", () => - Effect.gen(function* () { - mockedRunProcess.mockResolvedValueOnce({ - stdout: JSON.stringify({ - number: 42, - title: "Add PR thread creation", - url: "https://github.com/pingdotgg/codething-mvp/pull/42", - baseRefName: "main", - headRefName: "feature/pr-threads", - state: "OPEN", - mergedAt: null, - isCrossRepository: true, - headRepository: { - nameWithOwner: "octocat/codething-mvp", - }, - headRepositoryOwner: { - login: "octocat", - }, - }), - stderr: "", - code: 0, - signal: null, - timedOut: false, - }); - - const result = yield* Effect.gen(function* () { - const gh = yield* GitHubCli; - return yield* gh.getPullRequest({ - cwd: "/repo", - reference: "#42", - }); - }); - - assert.deepStrictEqual(result, { - number: 42, - title: "Add PR thread creation", - url: "https://github.com/pingdotgg/codething-mvp/pull/42", - baseRefName: "main", - headRefName: "feature/pr-threads", - state: "open", - isCrossRepository: true, - headRepositoryNameWithOwner: "octocat/codething-mvp", - headRepositoryOwnerLogin: "octocat", - }); - expect(mockedRunProcess).toHaveBeenCalledWith( - "gh", - [ - "pr", - "view", - "#42", - "--json", - "number,title,url,baseRefName,headRefName,state,mergedAt,isCrossRepository,headRepository,headRepositoryOwner", - ], - expect.objectContaining({ cwd: "/repo" }), - ); - }), - ); - - it.effect("trims pull request fields decoded from gh json", () => - Effect.gen(function* () { - mockedRunProcess.mockResolvedValueOnce({ - stdout: JSON.stringify({ - number: 42, - title: " Add PR thread creation \n", - url: " https://github.com/pingdotgg/codething-mvp/pull/42 ", - baseRefName: " main ", - headRefName: "\tfeature/pr-threads\t", - state: "OPEN", - mergedAt: null, - isCrossRepository: true, - headRepository: { - nameWithOwner: " octocat/codething-mvp ", - }, - headRepositoryOwner: { - login: " octocat ", - }, - }), - stderr: "", - code: 0, - signal: null, - timedOut: false, - }); - - const result = yield* Effect.gen(function* () { - const gh = yield* GitHubCli; - return yield* gh.getPullRequest({ - cwd: "/repo", - reference: "#42", - }); - }); - - assert.deepStrictEqual(result, { - number: 42, - title: "Add PR thread creation", - url: "https://github.com/pingdotgg/codething-mvp/pull/42", - baseRefName: "main", - headRefName: "feature/pr-threads", - state: "open", - isCrossRepository: true, - headRepositoryNameWithOwner: "octocat/codething-mvp", - headRepositoryOwnerLogin: "octocat", - }); - }), - ); - - it.effect("skips invalid entries when parsing pr lists", () => - Effect.gen(function* () { - mockedRunProcess.mockResolvedValueOnce({ - stdout: JSON.stringify([ - { - number: 0, - title: "invalid", - url: "https://github.com/pingdotgg/codething-mvp/pull/0", - baseRefName: "main", - headRefName: "feature/invalid", - }, - { - number: 43, - title: " Valid PR ", - url: " https://github.com/pingdotgg/codething-mvp/pull/43 ", - baseRefName: " main ", - headRefName: " feature/pr-list ", - headRepository: { - nameWithOwner: " ", - }, - headRepositoryOwner: { - login: " ", - }, - }, - ]), - stderr: "", - code: 0, - signal: null, - timedOut: false, - }); - - const result = yield* Effect.gen(function* () { - const gh = yield* GitHubCli; - return yield* gh.listOpenPullRequests({ - cwd: "/repo", - headSelector: "feature/pr-list", - }); - }); - - assert.deepStrictEqual(result, [ - { - number: 43, - title: "Valid PR", - url: "https://github.com/pingdotgg/codething-mvp/pull/43", - baseRefName: "main", - headRefName: "feature/pr-list", - state: "open", - }, - ]); - }), - ); - - it.effect("reads repository clone URLs", () => - Effect.gen(function* () { - mockedRunProcess.mockResolvedValueOnce({ - stdout: JSON.stringify({ - nameWithOwner: "octocat/codething-mvp", - url: "https://github.com/octocat/codething-mvp", - sshUrl: "git@github.com:octocat/codething-mvp.git", - }), - stderr: "", - code: 0, - signal: null, - timedOut: false, - }); - - const result = yield* Effect.gen(function* () { - const gh = yield* GitHubCli; - return yield* gh.getRepositoryCloneUrls({ - cwd: "/repo", - repository: "octocat/codething-mvp", - }); - }); - - assert.deepStrictEqual(result, { - nameWithOwner: "octocat/codething-mvp", - url: "https://github.com/octocat/codething-mvp", - sshUrl: "git@github.com:octocat/codething-mvp.git", - }); - }), - ); - - it.effect("surfaces a friendly error when the pull request is not found", () => - Effect.gen(function* () { - mockedRunProcess.mockRejectedValueOnce( - new Error( - "GraphQL: Could not resolve to a PullRequest with the number of 4888. (repository.pullRequest)", - ), - ); - - const error = yield* Effect.gen(function* () { - const gh = yield* GitHubCli; - return yield* gh.getPullRequest({ - cwd: "/repo", - reference: "4888", - }); - }).pipe(Effect.flip); - - assert.equal(error.message.includes("Pull request not found"), true); - }), - ); -}); diff --git a/apps/server/src/git/Layers/GitHubCli.ts b/apps/server/src/git/Layers/GitHubCli.ts deleted file mode 100644 index e67e4966d9d..00000000000 --- a/apps/server/src/git/Layers/GitHubCli.ts +++ /dev/null @@ -1,241 +0,0 @@ -import { Effect, Layer, Result, Schema, SchemaIssue } from "effect"; -import { TrimmedNonEmptyString } from "@t3tools/contracts"; - -import { runProcess } from "../../processRunner.ts"; -import { GitHubCliError } from "@t3tools/contracts"; -import { - GitHubCli, - type GitHubRepositoryCloneUrls, - type GitHubCliShape, -} from "../Services/GitHubCli.ts"; -import { - decodeGitHubPullRequestJson, - decodeGitHubPullRequestListJson, - formatGitHubJsonDecodeError, -} from "../githubPullRequests.ts"; - -const DEFAULT_TIMEOUT_MS = 30_000; - -function normalizeGitHubCliError(operation: "execute" | "stdout", error: unknown): GitHubCliError { - if (error instanceof Error) { - if (error.message.includes("Command not found: gh")) { - return new GitHubCliError({ - operation, - detail: "GitHub CLI (`gh`) is required but not available on PATH.", - cause: error, - }); - } - - const lower = error.message.toLowerCase(); - if ( - lower.includes("authentication failed") || - lower.includes("not logged in") || - lower.includes("gh auth login") || - lower.includes("no oauth token") - ) { - return new GitHubCliError({ - operation, - detail: "GitHub CLI is not authenticated. Run `gh auth login` and retry.", - cause: error, - }); - } - - if ( - lower.includes("could not resolve to a pullrequest") || - lower.includes("repository.pullrequest") || - lower.includes("no pull requests found for branch") || - lower.includes("pull request not found") - ) { - return new GitHubCliError({ - operation, - detail: "Pull request not found. Check the PR number or URL and try again.", - cause: error, - }); - } - - return new GitHubCliError({ - operation, - detail: `GitHub CLI command failed: ${error.message}`, - cause: error, - }); - } - - return new GitHubCliError({ - operation, - detail: "GitHub CLI command failed.", - cause: error, - }); -} - -const RawGitHubRepositoryCloneUrlsSchema = Schema.Struct({ - nameWithOwner: TrimmedNonEmptyString, - url: TrimmedNonEmptyString, - sshUrl: TrimmedNonEmptyString, -}); - -function normalizeRepositoryCloneUrls( - raw: Schema.Schema.Type, -): GitHubRepositoryCloneUrls { - return { - nameWithOwner: raw.nameWithOwner, - url: raw.url, - sshUrl: raw.sshUrl, - }; -} - -function decodeGitHubJson( - raw: string, - schema: S, - operation: "listOpenPullRequests" | "getPullRequest" | "getRepositoryCloneUrls", - invalidDetail: string, -): Effect.Effect { - return Schema.decodeEffect(Schema.fromJsonString(schema))(raw).pipe( - Effect.mapError( - (error) => - new GitHubCliError({ - operation, - detail: `${invalidDetail}: ${SchemaIssue.makeFormatterDefault()(error.issue)}`, - cause: error, - }), - ), - ); -} - -const makeGitHubCli = Effect.sync(() => { - const execute: GitHubCliShape["execute"] = (input) => - Effect.tryPromise({ - try: () => - runProcess("gh", input.args, { - cwd: input.cwd, - timeoutMs: input.timeoutMs ?? DEFAULT_TIMEOUT_MS, - }), - catch: (error) => normalizeGitHubCliError("execute", error), - }); - - const service = { - execute, - listOpenPullRequests: (input) => - execute({ - cwd: input.cwd, - args: [ - "pr", - "list", - "--head", - input.headSelector, - "--state", - "open", - "--limit", - String(input.limit ?? 1), - "--json", - "number,title,url,baseRefName,headRefName,state,mergedAt,isCrossRepository,headRepository,headRepositoryOwner", - ], - }).pipe( - Effect.map((result) => result.stdout.trim()), - Effect.flatMap((raw) => - raw.length === 0 - ? Effect.succeed([]) - : Effect.sync(() => decodeGitHubPullRequestListJson(raw)).pipe( - Effect.flatMap((decoded) => { - if (!Result.isSuccess(decoded)) { - return Effect.fail( - new GitHubCliError({ - operation: "listOpenPullRequests", - detail: `GitHub CLI returned invalid PR list JSON: ${formatGitHubJsonDecodeError(decoded.failure)}`, - cause: decoded.failure, - }), - ); - } - - return Effect.succeed( - decoded.success.map(({ updatedAt: _updatedAt, ...summary }) => summary), - ); - }), - ), - ), - ), - getPullRequest: (input) => - execute({ - cwd: input.cwd, - args: [ - "pr", - "view", - input.reference, - "--json", - "number,title,url,baseRefName,headRefName,state,mergedAt,isCrossRepository,headRepository,headRepositoryOwner", - ], - }).pipe( - Effect.map((result) => result.stdout.trim()), - Effect.flatMap((raw) => - Effect.sync(() => decodeGitHubPullRequestJson(raw)).pipe( - Effect.flatMap((decoded) => { - if (!Result.isSuccess(decoded)) { - return Effect.fail( - new GitHubCliError({ - operation: "getPullRequest", - detail: `GitHub CLI returned invalid pull request JSON: ${formatGitHubJsonDecodeError(decoded.failure)}`, - cause: decoded.failure, - }), - ); - } - - return Effect.succeed( - (({ updatedAt: _updatedAt, ...summary }) => summary)(decoded.success), - ); - }), - ), - ), - ), - getRepositoryCloneUrls: (input) => - execute({ - cwd: input.cwd, - args: ["repo", "view", input.repository, "--json", "nameWithOwner,url,sshUrl"], - }).pipe( - Effect.map((result) => result.stdout.trim()), - Effect.flatMap((raw) => - decodeGitHubJson( - raw, - RawGitHubRepositoryCloneUrlsSchema, - "getRepositoryCloneUrls", - "GitHub CLI returned invalid repository JSON.", - ), - ), - Effect.map(normalizeRepositoryCloneUrls), - ), - createPullRequest: (input) => - execute({ - cwd: input.cwd, - args: [ - "pr", - "create", - "--base", - input.baseBranch, - "--head", - input.headSelector, - "--title", - input.title, - "--body-file", - input.bodyFile, - ...(input.repository ? ["--repo", input.repository] : []), - ], - }).pipe(Effect.asVoid), - getDefaultBranch: (input) => - execute({ - cwd: input.cwd, - args: ["repo", "view", "--json", "defaultBranchRef", "--jq", ".defaultBranchRef.name"], - }).pipe( - Effect.map((value) => { - const trimmed = value.stdout.trim(); - return trimmed.length > 0 ? trimmed : null; - }), - ), - checkoutPullRequest: (input) => - execute({ - cwd: input.cwd, - args: ["pr", "checkout", input.reference, ...(input.force ? ["--force"] : [])], - }).pipe(Effect.asVoid), - } satisfies GitHubCliShape; - - return service; -}); - -export const GitHubCliLive = Layer.effect(GitHubCli, makeGitHubCli); diff --git a/apps/server/src/git/Layers/GitStatusBroadcaster.ts b/apps/server/src/git/Layers/GitStatusBroadcaster.ts deleted file mode 100644 index 3ad7d095d8d..00000000000 --- a/apps/server/src/git/Layers/GitStatusBroadcaster.ts +++ /dev/null @@ -1,311 +0,0 @@ -import { realpathSync } from "node:fs"; - -import { - Duration, - Effect, - Exit, - Fiber, - Layer, - PubSub, - Ref, - Scope, - Stream, - SynchronizedRef, -} from "effect"; -import type { - GitStatusInput, - GitStatusLocalResult, - GitStatusRemoteResult, - GitStatusStreamEvent, -} from "@t3tools/contracts"; -import { mergeGitStatusParts } from "@t3tools/shared/git"; - -import { - GitStatusBroadcaster, - type GitStatusBroadcasterShape, -} from "../Services/GitStatusBroadcaster.ts"; -import { GitManager } from "../Services/GitManager.ts"; - -const GIT_STATUS_REFRESH_INTERVAL = Duration.seconds(30); - -interface GitStatusChange { - readonly cwd: string; - readonly event: GitStatusStreamEvent; -} - -interface CachedValue { - readonly fingerprint: string; - readonly value: T; -} - -interface CachedGitStatus { - readonly local: CachedValue | null; - readonly remote: CachedValue | null; -} - -interface ActiveRemotePoller { - readonly fiber: Fiber.Fiber; - readonly subscriberCount: number; -} - -function normalizeCwd(cwd: string): string { - try { - return realpathSync.native(cwd); - } catch { - return cwd; - } -} - -function fingerprintStatusPart(status: unknown): string { - return JSON.stringify(status); -} - -export const GitStatusBroadcasterLive = Layer.effect( - GitStatusBroadcaster, - Effect.gen(function* () { - const gitManager = yield* GitManager; - const changesPubSub = yield* Effect.acquireRelease( - PubSub.unbounded(), - (pubsub) => PubSub.shutdown(pubsub), - ); - const broadcasterScope = yield* Effect.acquireRelease(Scope.make(), (scope) => - Scope.close(scope, Exit.void), - ); - const cacheRef = yield* Ref.make(new Map()); - const pollersRef = yield* SynchronizedRef.make(new Map()); - - const getCachedStatus = Effect.fn("getCachedStatus")(function* (cwd: string) { - return yield* Ref.get(cacheRef).pipe(Effect.map((cache) => cache.get(cwd) ?? null)); - }); - - const updateCachedLocalStatus = Effect.fn("updateCachedLocalStatus")(function* ( - cwd: string, - local: GitStatusLocalResult, - options?: { publish?: boolean }, - ) { - const nextLocal = { - fingerprint: fingerprintStatusPart(local), - value: local, - } satisfies CachedValue; - const shouldPublish = yield* Ref.modify(cacheRef, (cache) => { - const previous = cache.get(cwd) ?? { local: null, remote: null }; - const nextCache = new Map(cache); - nextCache.set(cwd, { - ...previous, - local: nextLocal, - }); - return [previous.local?.fingerprint !== nextLocal.fingerprint, nextCache] as const; - }); - - if (options?.publish && shouldPublish) { - yield* PubSub.publish(changesPubSub, { - cwd, - event: { - _tag: "localUpdated", - local, - }, - }); - } - - return local; - }); - - const updateCachedRemoteStatus = Effect.fn("updateCachedRemoteStatus")(function* ( - cwd: string, - remote: GitStatusRemoteResult | null, - options?: { publish?: boolean }, - ) { - const nextRemote = { - fingerprint: fingerprintStatusPart(remote), - value: remote, - } satisfies CachedValue; - const shouldPublish = yield* Ref.modify(cacheRef, (cache) => { - const previous = cache.get(cwd) ?? { local: null, remote: null }; - const nextCache = new Map(cache); - nextCache.set(cwd, { - ...previous, - remote: nextRemote, - }); - return [previous.remote?.fingerprint !== nextRemote.fingerprint, nextCache] as const; - }); - - if (options?.publish && shouldPublish) { - yield* PubSub.publish(changesPubSub, { - cwd, - event: { - _tag: "remoteUpdated", - remote, - }, - }); - } - - return remote; - }); - - const loadLocalStatus = Effect.fn("loadLocalStatus")(function* (cwd: string) { - const local = yield* gitManager.localStatus({ cwd }); - return yield* updateCachedLocalStatus(cwd, local); - }); - - const loadRemoteStatus = Effect.fn("loadRemoteStatus")(function* (cwd: string) { - const remote = yield* gitManager.remoteStatus({ cwd }); - return yield* updateCachedRemoteStatus(cwd, remote); - }); - - const getOrLoadLocalStatus = Effect.fn("getOrLoadLocalStatus")(function* (cwd: string) { - const cached = yield* getCachedStatus(cwd); - if (cached?.local) { - return cached.local.value; - } - return yield* loadLocalStatus(cwd); - }); - - const getOrLoadRemoteStatus = Effect.fn("getOrLoadRemoteStatus")(function* (cwd: string) { - const cached = yield* getCachedStatus(cwd); - if (cached?.remote) { - return cached.remote.value; - } - return yield* loadRemoteStatus(cwd); - }); - - const getStatus: GitStatusBroadcasterShape["getStatus"] = Effect.fn("getStatus")(function* ( - input: GitStatusInput, - ) { - const normalizedCwd = normalizeCwd(input.cwd); - const [local, remote] = yield* Effect.all([ - getOrLoadLocalStatus(normalizedCwd), - getOrLoadRemoteStatus(normalizedCwd), - ]); - return mergeGitStatusParts(local, remote); - }); - - const refreshLocalStatus: GitStatusBroadcasterShape["refreshLocalStatus"] = Effect.fn( - "refreshLocalStatus", - )(function* (cwd) { - const normalizedCwd = normalizeCwd(cwd); - yield* gitManager.invalidateLocalStatus(normalizedCwd); - const local = yield* gitManager.localStatus({ cwd: normalizedCwd }); - return yield* updateCachedLocalStatus(normalizedCwd, local, { publish: true }); - }); - - const refreshRemoteStatus = Effect.fn("refreshRemoteStatus")(function* (cwd: string) { - yield* gitManager.invalidateRemoteStatus(cwd); - const remote = yield* gitManager.remoteStatus({ cwd }); - return yield* updateCachedRemoteStatus(cwd, remote, { publish: true }); - }); - - const refreshStatus: GitStatusBroadcasterShape["refreshStatus"] = Effect.fn("refreshStatus")( - function* (cwd) { - const normalizedCwd = normalizeCwd(cwd); - const [local, remote] = yield* Effect.all([ - refreshLocalStatus(normalizedCwd), - refreshRemoteStatus(normalizedCwd), - ]); - return mergeGitStatusParts(local, remote); - }, - ); - - const makeRemoteRefreshLoop = (cwd: string) => { - const logRefreshFailure = (error: Error) => - Effect.logWarning("git remote status refresh failed", { - cwd, - detail: error.message, - }); - - return refreshRemoteStatus(cwd).pipe( - Effect.catch(logRefreshFailure), - Effect.andThen( - Effect.forever( - Effect.sleep(GIT_STATUS_REFRESH_INTERVAL).pipe( - Effect.andThen(refreshRemoteStatus(cwd).pipe(Effect.catch(logRefreshFailure))), - ), - ), - ), - ); - }; - - const retainRemotePoller = Effect.fn("retainRemotePoller")(function* (cwd: string) { - yield* SynchronizedRef.modifyEffect(pollersRef, (activePollers) => { - const existing = activePollers.get(cwd); - if (existing) { - const nextPollers = new Map(activePollers); - nextPollers.set(cwd, { - ...existing, - subscriberCount: existing.subscriberCount + 1, - }); - return Effect.succeed([undefined, nextPollers] as const); - } - - return makeRemoteRefreshLoop(cwd).pipe( - Effect.forkIn(broadcasterScope), - Effect.map((fiber) => { - const nextPollers = new Map(activePollers); - nextPollers.set(cwd, { - fiber, - subscriberCount: 1, - }); - return [undefined, nextPollers] as const; - }), - ); - }); - }); - - const releaseRemotePoller = Effect.fn("releaseRemotePoller")(function* (cwd: string) { - const pollerToInterrupt = yield* SynchronizedRef.modify(pollersRef, (activePollers) => { - const existing = activePollers.get(cwd); - if (!existing) { - return [null, activePollers] as const; - } - - if (existing.subscriberCount > 1) { - const nextPollers = new Map(activePollers); - nextPollers.set(cwd, { - ...existing, - subscriberCount: existing.subscriberCount - 1, - }); - return [null, nextPollers] as const; - } - - const nextPollers = new Map(activePollers); - nextPollers.delete(cwd); - return [existing.fiber, nextPollers] as const; - }); - - if (pollerToInterrupt) { - yield* Fiber.interrupt(pollerToInterrupt).pipe(Effect.ignore); - } - }); - - const streamStatus: GitStatusBroadcasterShape["streamStatus"] = (input) => - Stream.unwrap( - Effect.gen(function* () { - const normalizedCwd = normalizeCwd(input.cwd); - const subscription = yield* PubSub.subscribe(changesPubSub); - const initialLocal = yield* getOrLoadLocalStatus(normalizedCwd); - const initialRemote = (yield* getCachedStatus(normalizedCwd))?.remote?.value ?? null; - yield* retainRemotePoller(normalizedCwd); - - const release = releaseRemotePoller(normalizedCwd).pipe(Effect.ignore, Effect.asVoid); - - return Stream.concat( - Stream.make({ - _tag: "snapshot" as const, - local: initialLocal, - remote: initialRemote, - }), - Stream.fromSubscription(subscription).pipe( - Stream.filter((event) => event.cwd === normalizedCwd), - Stream.map((event) => event.event), - ), - ).pipe(Stream.ensuring(release)); - }), - ); - - return { - getStatus, - refreshLocalStatus, - refreshStatus, - streamStatus, - } satisfies GitStatusBroadcasterShape; - }), -); diff --git a/apps/server/src/git/Layers/OpenCodeTextGeneration.test.ts b/apps/server/src/git/Layers/OpenCodeTextGeneration.test.ts deleted file mode 100644 index 28ee0a3e6fe..00000000000 --- a/apps/server/src/git/Layers/OpenCodeTextGeneration.test.ts +++ /dev/null @@ -1,340 +0,0 @@ -import * as NodeServices from "@effect/platform-node/NodeServices"; -import { it } from "@effect/vitest"; -import { Duration, Effect, Layer } from "effect"; -import { TestClock } from "effect/testing"; -import { NetService } from "@t3tools/shared/Net"; -import { beforeEach, expect } from "vitest"; - -import { ServerConfig } from "../../config.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; -import { - OpenCodeRuntime, - OpenCodeRuntimeError, - type OpenCodeRuntimeShape, -} from "../../provider/opencodeRuntime.ts"; -import { TextGeneration } from "../Services/TextGeneration.ts"; -import { OpenCodeTextGenerationLive } from "./OpenCodeTextGeneration.ts"; - -const runtimeMock = { - state: { - startCalls: [] as string[], - promptUrls: [] as string[], - authHeaders: [] as Array, - closeCalls: [] as string[], - promptResult: undefined as - | { data?: { info?: { error?: unknown }; parts?: Array<{ type: string; text?: string }> } } - | undefined, - }, - reset() { - this.state.startCalls.length = 0; - this.state.promptUrls.length = 0; - this.state.authHeaders.length = 0; - this.state.closeCalls.length = 0; - this.state.promptResult = undefined; - }, -}; - -const OpenCodeRuntimeTestDouble: OpenCodeRuntimeShape = { - startOpenCodeServerProcess: ({ binaryPath }) => - Effect.gen(function* () { - const index = runtimeMock.state.startCalls.length + 1; - const url = `http://127.0.0.1:${4_300 + index}`; - runtimeMock.state.startCalls.push(binaryPath); - // The production runtime binds server lifetime to the caller's scope. - // Mirror that here so the closeCalls probe observes scope close. - yield* Effect.addFinalizer(() => - Effect.sync(() => { - runtimeMock.state.closeCalls.push(url); - }), - ); - return { - url, - exitCode: Effect.never, - }; - }), - connectToOpenCodeServer: ({ serverUrl }) => - Effect.succeed({ - url: serverUrl ?? "http://127.0.0.1:4301", - exitCode: null, - external: Boolean(serverUrl), - }), - runOpenCodeCommand: () => Effect.succeed({ stdout: "", stderr: "", code: 0 }), - createOpenCodeSdkClient: ({ baseUrl, serverPassword }) => - ({ - session: { - create: async () => ({ data: { id: `${baseUrl}/session` } }), - prompt: async () => { - runtimeMock.state.promptUrls.push(baseUrl); - runtimeMock.state.authHeaders.push( - serverPassword ? `Basic ${btoa(`opencode:${serverPassword}`)}` : null, - ); - return ( - runtimeMock.state.promptResult ?? { - data: { - parts: [ - { - type: "text", - text: JSON.stringify({ - subject: "Improve OpenCode reuse", - body: "Reuse one server for the full action.", - }), - }, - ], - }, - } - ); - }, - }, - }) as unknown as ReturnType, - loadOpenCodeInventory: () => - Effect.fail( - new OpenCodeRuntimeError({ - operation: "loadOpenCodeInventory", - detail: "OpenCodeRuntimeTestDouble.loadOpenCodeInventory not used in this test", - cause: null, - }), - ), -}; - -const DEFAULT_TEST_MODEL_SELECTION = { - provider: "opencode" as const, - model: "openai/gpt-5", -}; - -const OPENCODE_TEXT_GENERATION_IDLE_TTL_MS = 30_000; - -const OpenCodeTextGenerationTestLayer = OpenCodeTextGenerationLive.pipe( - Layer.provideMerge(Layer.succeed(OpenCodeRuntime, OpenCodeRuntimeTestDouble)), - Layer.provideMerge( - ServerSettingsService.layerTest({ - providers: { - opencode: { - binaryPath: "fake-opencode", - }, - }, - }), - ), - Layer.provideMerge( - ServerConfig.layerTest(process.cwd(), { - prefix: "t3code-opencode-text-generation-test-", - }), - ), - Layer.provideMerge(NetService.layer), - Layer.provideMerge(NodeServices.layer), -); - -const OpenCodeTextGenerationExistingServerTestLayer = OpenCodeTextGenerationLive.pipe( - Layer.provideMerge(Layer.succeed(OpenCodeRuntime, OpenCodeRuntimeTestDouble)), - Layer.provideMerge( - ServerSettingsService.layerTest({ - providers: { - opencode: { - binaryPath: "fake-opencode", - serverUrl: "http://127.0.0.1:9999", - serverPassword: "secret-password", - }, - }, - }), - ), - Layer.provideMerge( - ServerConfig.layerTest(process.cwd(), { - prefix: "t3code-opencode-text-generation-existing-server-test-", - }), - ), - Layer.provideMerge(NetService.layer), - Layer.provideMerge(NodeServices.layer), -); - -beforeEach(() => { - runtimeMock.reset(); -}); - -const advanceIdleClock = Effect.gen(function* () { - yield* Effect.yieldNow; - yield* TestClock.adjust(Duration.millis(OPENCODE_TEXT_GENERATION_IDLE_TTL_MS + 1)); - yield* Effect.yieldNow; -}); - -it.layer(OpenCodeTextGenerationTestLayer)("OpenCodeTextGenerationLive", (it) => { - it.effect("reuses a warm server across back-to-back requests and closes it after idling", () => - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - yield* textGeneration.generateCommitMessage({ - cwd: process.cwd(), - branch: "feature/opencode-reuse", - stagedSummary: "M README.md", - stagedPatch: "diff --git a/README.md b/README.md", - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }); - yield* textGeneration.generateCommitMessage({ - cwd: process.cwd(), - branch: "feature/opencode-reuse", - stagedSummary: "M README.md", - stagedPatch: "diff --git a/README.md b/README.md", - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }); - - expect(runtimeMock.state.startCalls).toEqual(["fake-opencode"]); - expect(runtimeMock.state.promptUrls).toEqual([ - "http://127.0.0.1:4301", - "http://127.0.0.1:4301", - ]); - expect(runtimeMock.state.closeCalls).toEqual([]); - - yield* advanceIdleClock; - - expect(runtimeMock.state.closeCalls).toEqual(["http://127.0.0.1:4301"]); - }).pipe(Effect.provide(TestClock.layer())), - ); - - it.effect("starts a new server after the warm server idles out", () => - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - yield* textGeneration.generateCommitMessage({ - cwd: process.cwd(), - branch: "feature/opencode-reuse", - stagedSummary: "M README.md", - stagedPatch: "diff --git a/README.md b/README.md", - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }); - - yield* advanceIdleClock; - - yield* textGeneration.generateCommitMessage({ - cwd: process.cwd(), - branch: "feature/opencode-reuse", - stagedSummary: "M README.md", - stagedPatch: "diff --git a/README.md b/README.md", - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }); - - expect(runtimeMock.state.startCalls).toEqual(["fake-opencode", "fake-opencode"]); - expect(runtimeMock.state.promptUrls).toEqual([ - "http://127.0.0.1:4301", - "http://127.0.0.1:4302", - ]); - expect(runtimeMock.state.closeCalls).toEqual(["http://127.0.0.1:4301"]); - }).pipe(Effect.provide(TestClock.layer())), - ); - - it.effect("returns a typed empty-output error when OpenCode returns no text parts", () => - Effect.gen(function* () { - runtimeMock.state.promptResult = { data: {} }; - const textGeneration = yield* TextGeneration; - - const error = yield* textGeneration - .generateCommitMessage({ - cwd: process.cwd(), - branch: "feature/opencode-reuse", - stagedSummary: "M README.md", - stagedPatch: "diff --git a/README.md b/README.md", - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }) - .pipe(Effect.flip); - - expect(error.message).toContain("OpenCode returned empty output."); - }), - ); - - it.effect("parses JSON returned as plain text output", () => - Effect.gen(function* () { - runtimeMock.state.promptResult = { - data: { - parts: [ - { - type: "text", - text: 'Here is the result:\n{"subject":"Tighten OpenCode parsing","body":"Handle JSON text output locally."}', - }, - ], - }, - }; - const textGeneration = yield* TextGeneration; - - const result = yield* textGeneration.generateCommitMessage({ - cwd: process.cwd(), - branch: "feature/opencode-reuse", - stagedSummary: "M README.md", - stagedPatch: "diff --git a/README.md b/README.md", - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }); - - expect(result).toEqual({ - subject: "Tighten OpenCode parsing", - body: "Handle JSON text output locally.", - }); - }), - ); - - it.effect("surfaces the upstream OpenCode structured-output error message", () => - Effect.gen(function* () { - runtimeMock.state.promptResult = { - data: { - info: { - error: { - name: "StructuredOutputError", - data: { - message: "Model did not produce structured output", - retries: 2, - }, - }, - }, - }, - }; - const textGeneration = yield* TextGeneration; - - const error = yield* textGeneration - .generateCommitMessage({ - cwd: process.cwd(), - branch: "feature/opencode-reuse", - stagedSummary: "M README.md", - stagedPatch: "diff --git a/README.md b/README.md", - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }) - .pipe(Effect.flip); - - expect(error.message).toContain("Model did not produce structured output"); - }), - ); -}); - -it.layer(OpenCodeTextGenerationExistingServerTestLayer)( - "OpenCodeTextGenerationLive with configured server URL", - (it) => { - it.effect("reuses a configured OpenCode server URL without spawning or applying idle TTL", () => - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - yield* textGeneration.generateCommitMessage({ - cwd: process.cwd(), - branch: "feature/opencode-reuse", - stagedSummary: "M README.md", - stagedPatch: "diff --git a/README.md b/README.md", - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }); - yield* textGeneration.generateCommitMessage({ - cwd: process.cwd(), - branch: "feature/opencode-reuse", - stagedSummary: "M README.md", - stagedPatch: "diff --git a/README.md b/README.md", - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }); - - expect(runtimeMock.state.startCalls).toEqual([]); - expect(runtimeMock.state.promptUrls).toEqual([ - "http://127.0.0.1:9999", - "http://127.0.0.1:9999", - ]); - expect(runtimeMock.state.authHeaders).toEqual([ - `Basic ${btoa("opencode:secret-password")}`, - `Basic ${btoa("opencode:secret-password")}`, - ]); - - yield* advanceIdleClock; - - expect(runtimeMock.state.closeCalls).toEqual([]); - }).pipe(Effect.provide(TestClock.layer())), - ); - }, -); diff --git a/apps/server/src/git/Layers/RoutingTextGeneration.ts b/apps/server/src/git/Layers/RoutingTextGeneration.ts deleted file mode 100644 index a4d8cc494f1..00000000000 --- a/apps/server/src/git/Layers/RoutingTextGeneration.ts +++ /dev/null @@ -1,151 +0,0 @@ -/** - * RoutingTextGeneration – Dispatches text generation requests to the - * appropriate CLI implementation based on the provider in each request input. - * - * Currently supported providers with dedicated layers: - * - `"claudeAgent"` → Claude CLI layer - * - `"copilot"` → Copilot text-generation layer (partial – falls back to - * codex for branch names / thread titles) - * - `"codex"` → Codex CLI layer (also the default fallback) - * - `"cursor"` → Cursor text-generation layer (ACP-based) - * - `"opencode"` → OpenCode text-generation layer (SDK-based) - * - * Providers without a dedicated CLI text-generation layer (geminiCli, amp, - * kilo) fall back to Codex. - * - * @module RoutingTextGeneration - */ -import { Effect, Layer, Context } from "effect"; - -import type { ProviderKind } from "@t3tools/contracts"; -import { TextGeneration, type TextGenerationShape } from "../Services/TextGeneration.ts"; -import { - CopilotTextGeneration, - type CopilotTextGenerationShape, -} from "../Services/CopilotTextGeneration.ts"; -import { CodexTextGenerationLive } from "./CodexTextGeneration.ts"; -import { ClaudeTextGenerationLive } from "./ClaudeTextGeneration.ts"; -import { makeCopilotTextGenerationLive } from "./CopilotTextGeneration.ts"; -import { CursorTextGenerationLive } from "./CursorTextGeneration.ts"; -import { OpenCodeTextGenerationLive } from "./OpenCodeTextGeneration.ts"; - -// --------------------------------------------------------------------------- -// Supported git text-generation providers. Providers not in this set fall -// back to codex (the most broadly compatible CLI implementation). -// --------------------------------------------------------------------------- - -const GIT_TEXT_GEN_PROVIDERS = new Set([ - "codex", - "claudeAgent", - "copilot", - "cursor", - "opencode", -]); - -class CodexTextGen extends Context.Service()( - "t3/git/Layers/RoutingTextGeneration/CodexTextGen", -) {} - -class ClaudeTextGen extends Context.Service()( - "t3/git/Layers/RoutingTextGeneration/ClaudeTextGen", -) {} - -class CopilotTextGen extends Context.Service()( - "t3/git/Layers/RoutingTextGeneration/CopilotTextGen", -) {} - -class CursorTextGen extends Context.Service()( - "t3/git/Layers/RoutingTextGeneration/CursorTextGen", -) {} - -class OpenCodeTextGen extends Context.Service()( - "t3/git/Layers/RoutingTextGeneration/OpenCodeTextGen", -) {} - -// --------------------------------------------------------------------------- -// Routing implementation -// --------------------------------------------------------------------------- - -const makeRoutingTextGeneration = Effect.gen(function* () { - const codex = yield* CodexTextGen; - const claude = yield* ClaudeTextGen; - const copilot = yield* CopilotTextGen; - const cursor = yield* CursorTextGen; - const openCode = yield* OpenCodeTextGen; - - const route = (provider?: ProviderKind): TextGenerationShape => { - if (!provider || !GIT_TEXT_GEN_PROVIDERS.has(provider)) return codex; - if (provider === "claudeAgent") return claude; - if (provider === "cursor") return cursor; - if (provider === "opencode") return openCode; - if (provider === "copilot") { - return { - generateCommitMessage: copilot.generateCommitMessage, - generatePrContent: copilot.generatePrContent, - // Copilot text generation doesn't support these yet; fall back to codex. - generateBranchName: codex.generateBranchName, - generateThreadTitle: codex.generateThreadTitle, - }; - } - return codex; - }; - - return { - generateCommitMessage: (input) => - route(input.modelSelection.provider).generateCommitMessage(input), - generatePrContent: (input) => route(input.modelSelection.provider).generatePrContent(input), - generateBranchName: (input) => route(input.modelSelection.provider).generateBranchName(input), - generateThreadTitle: (input) => route(input.modelSelection.provider).generateThreadTitle(input), - } satisfies TextGenerationShape; -}); - -const InternalCodexLayer = Layer.effect( - CodexTextGen, - Effect.gen(function* () { - const svc = yield* TextGeneration; - return svc; - }), -).pipe(Layer.provide(CodexTextGenerationLive)); - -const InternalClaudeLayer = Layer.effect( - ClaudeTextGen, - Effect.gen(function* () { - const svc = yield* TextGeneration; - return svc; - }), -).pipe(Layer.provide(ClaudeTextGenerationLive)); - -const InternalCopilotLayer = Layer.effect( - CopilotTextGen, - Effect.gen(function* () { - const svc = yield* CopilotTextGeneration; - return svc; - }), -).pipe(Layer.provide(makeCopilotTextGenerationLive())); - -const InternalCursorLayer = Layer.effect( - CursorTextGen, - Effect.gen(function* () { - const svc = yield* TextGeneration; - return svc; - }), -).pipe(Layer.provide(CursorTextGenerationLive)); - -const InternalOpenCodeLayer = Layer.effect( - OpenCodeTextGen, - Effect.gen(function* () { - const svc = yield* TextGeneration; - return svc; - }), -).pipe(Layer.provide(OpenCodeTextGenerationLive)); - -export const RoutingTextGenerationLive = Layer.effect( - TextGeneration, - makeRoutingTextGeneration, -).pipe( - Layer.provide(InternalCodexLayer), - Layer.provide(InternalClaudeLayer), - Layer.provide(InternalCopilotLayer), - Layer.provide(InternalCursorLayer), - Layer.provide(InternalOpenCodeLayer), -); diff --git a/apps/server/src/git/Layers/SessionTextGeneration.test.ts b/apps/server/src/git/Layers/SessionTextGeneration.test.ts deleted file mode 100644 index b5dd2924486..00000000000 --- a/apps/server/src/git/Layers/SessionTextGeneration.test.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { describe, expect, it } from "vitest"; - -import { SessionTextGenerationLive } from "./SessionTextGeneration.ts"; - -describe("SessionTextGeneration", () => { - it("exports a valid Layer", () => { - expect(SessionTextGenerationLive).toBeDefined(); - }); -}); diff --git a/apps/server/src/git/Layers/SessionTextGeneration.ts b/apps/server/src/git/Layers/SessionTextGeneration.ts deleted file mode 100644 index 66c0dda9bf6..00000000000 --- a/apps/server/src/git/Layers/SessionTextGeneration.ts +++ /dev/null @@ -1,492 +0,0 @@ -import { randomUUID } from "node:crypto"; - -import type { ProviderRuntimeEvent, ThreadId } from "@t3tools/contracts"; -import { Effect, Layer, Option, Queue, Schema, SchemaIssue, Stream } from "effect"; -import { sanitizeBranchFragment, sanitizeFeatureBranchName } from "@t3tools/shared/git"; -import { resolveModelSlugForProvider } from "@t3tools/shared/model"; - -import { ProviderService } from "../../provider/Services/ProviderService.ts"; -import { TextGenerationError } from "@t3tools/contracts"; -import { - type BranchNameGenerationInput, - type BranchNameGenerationResult, - type CommitMessageGenerationResult, - type PrContentGenerationResult, - type ThreadTitleGenerationInput, - type ThreadTitleGenerationResult, -} from "../Services/TextGeneration.ts"; -import { - SessionTextGeneration, - type SessionTextGenerationShape, -} from "../Services/SessionTextGeneration.ts"; - -const PROVIDER_TEXT_GENERATION_TIMEOUT_MS = 180_000; - -function limitSection(value: string, maxChars: number): string { - if (value.length <= maxChars) return value; - return `${value.slice(0, maxChars)}\n\n[truncated]`; -} - -function sanitizePrTitle(raw: string): string { - const singleLine = raw.trim().split(/\r?\n/g)[0]?.trim() ?? ""; - return singleLine.length > 0 ? singleLine : "Update project changes"; -} - -function sanitizeThreadTitle(raw: string): string { - const singleLine = raw.trim().split(/\r?\n/g)[0]?.trim() ?? ""; - return singleLine.length > 0 ? singleLine : "New thread"; -} - -function extractJsonObject(raw: string): string { - const trimmed = raw.trim(); - if (trimmed.startsWith("```")) { - const fenced = trimmed.replace(/^```(?:json)?\s*/i, "").replace(/\s*```$/i, ""); - return fenced.trim(); - } - const start = trimmed.indexOf("{"); - const end = trimmed.lastIndexOf("}"); - if (start !== -1 && end !== -1 && end > start) { - return trimmed.slice(start, end + 1); - } - return trimmed; -} - -function toThreadId(value: string): ThreadId { - return value as ThreadId; -} - -function normalizeProviderTextGenerationError( - operation: - | "generateCommitMessage" - | "generatePrContent" - | "generateBranchName" - | "generateThreadTitle", - error: unknown, - fallback: string, -): TextGenerationError { - if (Schema.is(TextGenerationError)(error)) { - return error; - } - - if (error instanceof Error) { - return new TextGenerationError({ - operation, - detail: `${fallback}: ${error.message}`, - cause: error, - }); - } - - return new TextGenerationError({ - operation, - detail: fallback, - cause: error, - }); -} - -function decodeJsonResponse( - operation: - | "generateCommitMessage" - | "generatePrContent" - | "generateBranchName" - | "generateThreadTitle", - raw: string, - schema: S, -): Effect.Effect { - return Effect.gen(function* () { - const jsonText = extractJsonObject(raw); - if (jsonText.length === 0) { - return yield* new TextGenerationError({ - operation, - detail: "Provider returned an empty response.", - }); - } - - const parsed = yield* Effect.try({ - try: () => JSON.parse(jsonText) as unknown, - catch: (cause) => - normalizeProviderTextGenerationError(operation, cause, "Provider returned invalid JSON"), - }); - - return yield* Schema.decodeUnknownEffect(schema)(parsed).pipe( - Effect.mapError( - (cause) => - new TextGenerationError({ - operation, - detail: `Provider returned an unexpected payload: ${SchemaIssue.makeFormatterDefault()(cause.issue)}`, - cause, - }), - ), - ); - }); -} - -function assistantMessageFromEvent(event: ProviderRuntimeEvent): string | null { - if ( - event.type !== "item.completed" || - event.payload.itemType !== "assistant_message" || - typeof event.payload.detail !== "string" - ) { - return null; - } - const trimmed = event.payload.detail.trim(); - return trimmed.length > 0 ? trimmed : null; -} - -const makeSessionTextGeneration = Effect.gen(function* () { - const providerService = yield* ProviderService; - - const runProviderJson = ({ - operation, - cwd, - provider, - model, - prompt, - attachments, - schema, - }: { - operation: - | "generateCommitMessage" - | "generatePrContent" - | "generateBranchName" - | "generateThreadTitle"; - cwd: string; - provider: BranchNameGenerationInput["provider"]; - model: BranchNameGenerationInput["model"]; - prompt: string; - attachments?: BranchNameGenerationInput["attachments"]; - schema: S; - }): Effect.Effect => - Effect.gen(function* () { - const resolvedProvider = provider ?? "codex"; - const resolvedModel = resolveModelSlugForProvider(resolvedProvider, model); - const threadId = toThreadId(`git-textgen-${operation}-${randomUUID()}`); - const eventQueue = yield* Queue.unbounded(); - - yield* Stream.runForEach(providerService.streamEvents, (event) => { - if (event.threadId !== threadId) { - return Effect.void; - } - return Queue.offer(eventQueue, event).pipe(Effect.asVoid); - }).pipe(Effect.forkScoped); - - const cleanup = providerService.stopSession({ threadId }).pipe( - Effect.tapError((e) => Effect.logWarning("Failed to stop text generation session", e)), - Effect.orElseSucceed(() => undefined), - Effect.asVoid, - ); - - return yield* Effect.gen(function* () { - yield* providerService.startSession(threadId, { - threadId, - provider: resolvedProvider, - cwd, - ...(resolvedModel - ? { modelSelection: { provider: resolvedProvider, model: resolvedModel } as never } - : {}), - runtimeMode: "approval-required", - }); - - const turn = yield* providerService.sendTurn({ - threadId, - input: prompt, - ...(resolvedModel - ? { modelSelection: { provider: resolvedProvider, model: resolvedModel } as never } - : {}), - ...(attachments && attachments.length > 0 ? { attachments } : {}), - interactionMode: "default", - }); - - let assistantText = ""; - let fallbackAssistantMessage: string | null = null; - - while (true) { - const event = yield* Queue.take(eventQueue); - if (event.turnId !== undefined && event.turnId !== turn.turnId) { - continue; - } - - if (event.type === "content.delta" && event.payload.streamKind === "assistant_text") { - assistantText += event.payload.delta; - continue; - } - - const assistantMessage = assistantMessageFromEvent(event); - if (assistantMessage && fallbackAssistantMessage === null) { - fallbackAssistantMessage = assistantMessage; - continue; - } - - if (event.type === "request.opened") { - return yield* new TextGenerationError({ - operation, - detail: `The ${resolvedProvider} provider requested '${event.payload.requestType}' while generating git text. Git text generation must run without tools or approvals.`, - }); - } - - if (event.type === "user-input.requested") { - return yield* new TextGenerationError({ - operation, - detail: `The ${resolvedProvider} provider requested interactive input while generating git text.`, - }); - } - - if (event.type === "runtime.error") { - return yield* new TextGenerationError({ - operation, - detail: `${resolvedProvider} provider runtime error: ${event.payload.message}`, - }); - } - - if (event.type === "session.exited") { - return yield* new TextGenerationError({ - operation, - detail: `${resolvedProvider} provider session exited unexpectedly during text generation.`, - }); - } - - if (event.type === "turn.completed") { - if (event.payload.state !== "completed") { - return yield* new TextGenerationError({ - operation, - detail: - event.payload.errorMessage ?? - `${resolvedProvider} provider turn ended with state '${event.payload.state}'.`, - }); - } - - const responseText = assistantText.trim() || fallbackAssistantMessage?.trim() || ""; - return yield* decodeJsonResponse(operation, responseText, schema); - } - } - }).pipe( - Effect.timeoutOption(PROVIDER_TEXT_GENERATION_TIMEOUT_MS), - Effect.flatMap( - Option.match({ - onNone: () => - Effect.fail( - new TextGenerationError({ - operation, - detail: `${resolvedProvider} provider request timed out.`, - }), - ), - onSome: (result) => Effect.succeed(result), - }), - ), - Effect.ensuring(cleanup), - Effect.scoped, - ); - }).pipe( - Effect.mapError((cause) => - normalizeProviderTextGenerationError( - operation, - cause, - "Provider git text generation failed", - ), - ), - ); - - const generateCommitMessage: SessionTextGenerationShape["generateCommitMessage"] = (input) => { - const wantsBranch = input.includeBranch === true; - const prompt = [ - "You write concise git commit messages.", - "Answer using only valid JSON. Do not use tools, do not ask for approvals, and do not add markdown fences or prose.", - wantsBranch - ? 'Return a JSON object with keys: "subject", "body", "branch".' - : 'Return a JSON object with keys: "subject", "body".', - "Rules:", - "- subject must be imperative, <= 72 chars, and have no trailing period", - "- body can be an empty string or short bullet points", - ...(wantsBranch - ? ["- branch must be a short semantic git branch fragment for this change"] - : []), - "- capture the primary user-visible or developer-visible change", - "", - `Branch: ${input.branch ?? "(detached)"}`, - "", - "Staged files:", - limitSection(input.stagedSummary, 6_000), - "", - "Staged patch:", - limitSection(input.stagedPatch, 40_000), - ].join("\n"); - - const schema = wantsBranch - ? Schema.Struct({ - subject: Schema.String, - body: Schema.String, - branch: Schema.String, - }) - : Schema.Struct({ - subject: Schema.String, - body: Schema.String, - }); - - return runProviderJson({ - operation: "generateCommitMessage", - cwd: input.cwd, - provider: input.provider, - model: input.model, - prompt, - schema, - }).pipe( - Effect.map( - (generated) => - ({ - subject: generated.subject, - body: generated.body.trim(), - ...("branch" in generated && typeof generated.branch === "string" - ? { branch: sanitizeFeatureBranchName(generated.branch) } - : {}), - }) satisfies CommitMessageGenerationResult, - ), - ); - }; - - const generatePrContent: SessionTextGenerationShape["generatePrContent"] = (input) => { - const prompt = [ - "You write GitHub pull request content.", - "Answer using only valid JSON. Do not use tools, do not ask for approvals, and do not add markdown fences or prose.", - 'Return a JSON object with keys: "title", "body".', - "Rules:", - "- title should be concise and specific", - "- body must be markdown and include headings '## Summary' and '## Testing'", - "- under Summary, provide short bullet points", - "- under Testing, include bullet points with concrete checks or 'Not run' where appropriate", - "", - `Base branch: ${input.baseBranch}`, - `Head branch: ${input.headBranch}`, - "", - "Commits:", - limitSection(input.commitSummary, 12_000), - "", - "Diff stat:", - limitSection(input.diffSummary, 12_000), - "", - "Diff patch:", - limitSection(input.diffPatch, 40_000), - ].join("\n"); - - return runProviderJson({ - operation: "generatePrContent", - cwd: input.cwd, - provider: input.provider, - model: input.model, - prompt, - schema: Schema.Struct({ - title: Schema.String, - body: Schema.String, - }), - }).pipe( - Effect.map( - (generated) => - ({ - title: sanitizePrTitle(generated.title), - body: generated.body.trim(), - }) satisfies PrContentGenerationResult, - ), - ); - }; - - const generateBranchName: SessionTextGenerationShape["generateBranchName"] = (input) => { - const attachmentLines = (input.attachments ?? []).map( - (attachment) => - `- ${attachment.name} (${attachment.mimeType}, ${attachment.sizeBytes} bytes)`, - ); - const promptSections = [ - "You generate concise git branch names.", - "Answer using only valid JSON. Do not use tools, do not ask for approvals, and do not add markdown fences or prose.", - 'Return a JSON object with key: "branch".', - "Rules:", - "- Branch should describe the requested work from the user message.", - "- Keep it short and specific (2-6 words).", - "- Use plain words only, no issue prefixes and no punctuation-heavy text.", - "- If images are attached, use them as primary context for visual/UI issues.", - "", - "User message:", - limitSection(input.message, 8_000), - ]; - if (attachmentLines.length > 0) { - promptSections.push( - "", - "Attachment metadata:", - limitSection(attachmentLines.join("\n"), 4_000), - ); - } - - return runProviderJson({ - operation: "generateBranchName", - cwd: input.cwd, - provider: input.provider, - model: input.model, - prompt: promptSections.join("\n"), - attachments: input.attachments, - schema: Schema.Struct({ - branch: Schema.String, - }), - }).pipe( - Effect.map( - (generated) => - ({ - branch: sanitizeBranchFragment(generated.branch), - }) satisfies BranchNameGenerationResult, - ), - ); - }; - - const generateThreadTitle: SessionTextGenerationShape["generateThreadTitle"] = (input) => { - const attachmentLines = (input.attachments ?? []).map( - (attachment) => - `- ${attachment.name} (${attachment.mimeType}, ${attachment.sizeBytes} bytes)`, - ); - const promptSections = [ - "You generate concise thread titles.", - "Answer using only valid JSON. Do not use tools, do not ask for approvals, and do not add markdown fences or prose.", - 'Return a JSON object with key: "title".', - "Rules:", - "- Keep the title short and specific.", - "- Use the user's request as the main signal.", - "- If images are attached, use them as primary context for visual/UI issues.", - "", - "User message:", - limitSection(input.message, 8_000), - ]; - if (attachmentLines.length > 0) { - promptSections.push( - "", - "Attachment metadata:", - limitSection(attachmentLines.join("\n"), 4_000), - ); - } - - return runProviderJson({ - operation: "generateThreadTitle", - cwd: input.cwd, - provider: input.modelSelection.provider, - model: input.modelSelection.model, - prompt: promptSections.join("\n"), - attachments: input.attachments, - schema: Schema.Struct({ - title: Schema.String, - }), - }).pipe( - Effect.map( - (generated) => - ({ - title: sanitizeThreadTitle(generated.title), - }) satisfies ThreadTitleGenerationResult, - ), - ); - }; - - return { - generateCommitMessage, - generatePrContent, - generateBranchName, - generateThreadTitle, - } satisfies SessionTextGenerationShape; -}); - -export const SessionTextGenerationLive = Layer.effect( - SessionTextGeneration, - makeSessionTextGeneration, -); diff --git a/apps/server/src/git/Services/CopilotTextGeneration.ts b/apps/server/src/git/Services/CopilotTextGeneration.ts deleted file mode 100644 index 45e2f117da1..00000000000 --- a/apps/server/src/git/Services/CopilotTextGeneration.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Context } from "effect"; -import type { Effect } from "effect"; - -import type { TextGenerationError } from "@t3tools/contracts"; -import type { - CommitMessageGenerationInput, - CommitMessageGenerationResult, - PrContentGenerationInput, - PrContentGenerationResult, -} from "./TextGeneration.ts"; - -export interface CopilotTextGenerationShape { - readonly generateCommitMessage: ( - input: CommitMessageGenerationInput, - ) => Effect.Effect; - readonly generatePrContent: ( - input: PrContentGenerationInput, - ) => Effect.Effect; -} - -export class CopilotTextGeneration extends Context.Service< - CopilotTextGeneration, - CopilotTextGenerationShape ->()("t3/git/Services/CopilotTextGeneration") {} diff --git a/apps/server/src/git/Services/GitCore.ts b/apps/server/src/git/Services/GitCore.ts deleted file mode 100644 index 9f3bc0b9b91..00000000000 --- a/apps/server/src/git/Services/GitCore.ts +++ /dev/null @@ -1,313 +0,0 @@ -/** - * GitCore - Effect service contract for low-level Git operations. - * - * Wraps core repository primitives used by higher-level orchestration - * services and WebSocket routes. - * - * @module GitCore - */ -import { Context } from "effect"; -import type { Effect } from "effect"; -import type { - GitCheckoutInput, - GitCheckoutResult, - GitCreateBranchInput, - GitCreateBranchResult, - GitCreateWorktreeInput, - GitCreateWorktreeResult, - GitInitInput, - GitListBranchesInput, - GitListBranchesResult, - GitPullResult, - GitRemoveWorktreeInput, - GitStatusInput, - GitStatusResult, -} from "@t3tools/contracts"; - -import type { GitCommandError } from "@t3tools/contracts"; - -export interface ExecuteGitInput { - readonly operation: string; - readonly cwd: string; - readonly args: ReadonlyArray; - readonly stdin?: string; - readonly env?: NodeJS.ProcessEnv; - readonly allowNonZeroExit?: boolean; - readonly timeoutMs?: number; - readonly maxOutputBytes?: number; - readonly truncateOutputAtMaxBytes?: boolean; - readonly progress?: ExecuteGitProgress; -} - -export interface ExecuteGitResult { - readonly code: number; - readonly stdout: string; - readonly stderr: string; - readonly stdoutTruncated: boolean; - readonly stderrTruncated: boolean; -} - -export interface GitStatusDetails extends Omit { - upstreamRef: string | null; -} - -export interface GitPreparedCommitContext { - stagedSummary: string; - stagedPatch: string; -} - -export interface ExecuteGitProgress { - readonly onStdoutLine?: (line: string) => Effect.Effect; - readonly onStderrLine?: (line: string) => Effect.Effect; - readonly onHookStarted?: (hookName: string) => Effect.Effect; - readonly onHookFinished?: (input: { - hookName: string; - exitCode: number | null; - durationMs: number | null; - }) => Effect.Effect; -} - -export interface GitCommitProgress { - readonly onOutputLine?: (input: { - stream: "stdout" | "stderr"; - text: string; - }) => Effect.Effect; - readonly onHookStarted?: (hookName: string) => Effect.Effect; - readonly onHookFinished?: (input: { - hookName: string; - exitCode: number | null; - durationMs: number | null; - }) => Effect.Effect; -} - -export interface GitCommitOptions { - readonly timeoutMs?: number; - readonly progress?: GitCommitProgress; -} - -export interface GitPushResult { - status: "pushed" | "skipped_up_to_date"; - branch: string; - upstreamBranch?: string | undefined; - setUpstream?: boolean | undefined; -} - -export interface GitRangeContext { - commitSummary: string; - diffSummary: string; - diffPatch: string; -} - -export interface GitListWorkspaceFilesResult { - readonly paths: ReadonlyArray; - readonly truncated: boolean; -} - -export interface GitRenameBranchInput { - cwd: string; - oldBranch: string; - newBranch: string; -} - -export interface GitRenameBranchResult { - branch: string; -} - -export interface GitFetchPullRequestBranchInput { - cwd: string; - prNumber: number; - branch: string; -} - -export interface GitEnsureRemoteInput { - cwd: string; - preferredName: string; - url: string; -} - -export interface GitFetchRemoteBranchInput { - cwd: string; - remoteName: string; - remoteBranch: string; - localBranch: string; -} - -export interface GitSetBranchUpstreamInput { - cwd: string; - branch: string; - remoteName: string; - remoteBranch: string; -} - -/** - * GitCoreShape - Service API for low-level Git repository interactions. - */ -export interface GitCoreShape { - /** - * Execute a raw Git command. - */ - readonly execute: (input: ExecuteGitInput) => Effect.Effect; - - /** - * Read Git status for a repository. - */ - readonly status: (input: GitStatusInput) => Effect.Effect; - - /** - * Read detailed working tree / branch status for a repository. - */ - readonly statusDetails: (cwd: string) => Effect.Effect; - - /** - * Read detailed working tree / branch status without refreshing remote tracking refs. - */ - readonly statusDetailsLocal: (cwd: string) => Effect.Effect; - - /** - * Build staged change context for commit generation. - */ - readonly prepareCommitContext: ( - cwd: string, - filePaths?: readonly string[], - ) => Effect.Effect; - - /** - * Create a commit with provided subject/body. - */ - readonly commit: ( - cwd: string, - subject: string, - body: string, - options?: GitCommitOptions, - ) => Effect.Effect<{ commitSha: string }, GitCommandError>; - - /** - * Push current branch, setting upstream if needed. - */ - readonly pushCurrentBranch: ( - cwd: string, - fallbackBranch: string | null, - ) => Effect.Effect; - - /** - * Collect commit/diff context between base branch and current HEAD. - */ - readonly readRangeContext: ( - cwd: string, - baseBranch: string, - ) => Effect.Effect; - - /** - * Read a Git config value from the local repository. - */ - readonly readConfigValue: ( - cwd: string, - key: string, - ) => Effect.Effect; - - /** - * Determine whether the provided cwd is inside a git work tree. - */ - readonly isInsideWorkTree: (cwd: string) => Effect.Effect; - - /** - * List tracked and untracked workspace file paths relative to cwd. - */ - readonly listWorkspaceFiles: ( - cwd: string, - ) => Effect.Effect; - - /** - * Remove gitignored paths from a relative path list. - */ - readonly filterIgnoredPaths: ( - cwd: string, - relativePaths: ReadonlyArray, - ) => Effect.Effect, GitCommandError>; - - /** - * List local + remote branches and branch metadata. - */ - readonly listBranches: ( - input: GitListBranchesInput, - ) => Effect.Effect; - - /** - * Pull current branch from upstream using fast-forward only. - */ - readonly pullCurrentBranch: (cwd: string) => Effect.Effect; - - /** - * Create a worktree and branch from a base branch. - */ - readonly createWorktree: ( - input: GitCreateWorktreeInput, - ) => Effect.Effect; - - /** - * Materialize a GitHub pull request head as a local branch without switching checkout. - */ - readonly fetchPullRequestBranch: ( - input: GitFetchPullRequestBranchInput, - ) => Effect.Effect; - - /** - * Ensure a named remote exists for the provided URL, returning the reused or created remote name. - */ - readonly ensureRemote: (input: GitEnsureRemoteInput) => Effect.Effect; - - /** - * Fetch a remote branch into a local branch without checkout. - */ - readonly fetchRemoteBranch: ( - input: GitFetchRemoteBranchInput, - ) => Effect.Effect; - - /** - * Set the upstream tracking branch for a local branch. - */ - readonly setBranchUpstream: ( - input: GitSetBranchUpstreamInput, - ) => Effect.Effect; - - /** - * Remove an existing worktree. - */ - readonly removeWorktree: (input: GitRemoveWorktreeInput) => Effect.Effect; - - /** - * Rename an existing local branch. - */ - readonly renameBranch: ( - input: GitRenameBranchInput, - ) => Effect.Effect; - - /** - * Create a local branch. - */ - readonly createBranch: ( - input: GitCreateBranchInput, - ) => Effect.Effect; - - /** - * Checkout an existing branch and refresh its upstream metadata in background. - */ - readonly checkoutBranch: ( - input: GitCheckoutInput, - ) => Effect.Effect; - - /** - * Initialize a repository in the provided directory. - */ - readonly initRepo: (input: GitInitInput) => Effect.Effect; - - /** - * List local branch names (short format). - */ - readonly listLocalBranchNames: (cwd: string) => Effect.Effect; -} - -/** - * GitCore - Service tag for low-level Git repository operations. - */ -export class GitCore extends Context.Service()("t3/git/Services/GitCore") {} diff --git a/apps/server/src/git/Services/GitHubCli.ts b/apps/server/src/git/Services/GitHubCli.ts deleted file mode 100644 index 53138926629..00000000000 --- a/apps/server/src/git/Services/GitHubCli.ts +++ /dev/null @@ -1,104 +0,0 @@ -/** - * GitHubCli - Effect service contract for `gh` process interactions. - * - * Provides thin command execution helpers used by Git workflow orchestration. - * - * @module GitHubCli - */ -import { Context } from "effect"; -import type { Effect } from "effect"; - -import type { ProcessRunResult } from "../../processRunner.ts"; -import type { GitHubCliError } from "@t3tools/contracts"; - -export interface GitHubPullRequestSummary { - readonly number: number; - readonly title: string; - readonly url: string; - readonly baseRefName: string; - readonly headRefName: string; - readonly state?: "open" | "closed" | "merged"; - readonly isCrossRepository?: boolean; - readonly headRepositoryNameWithOwner?: string | null; - readonly headRepositoryOwnerLogin?: string | null; -} - -export interface GitHubRepositoryCloneUrls { - readonly nameWithOwner: string; - readonly url: string; - readonly sshUrl: string; -} - -/** - * GitHubCliShape - Service API for executing GitHub CLI commands. - */ -export interface GitHubCliShape { - /** - * Execute a GitHub CLI command and return full process output. - */ - readonly execute: (input: { - readonly cwd: string; - readonly args: ReadonlyArray; - readonly timeoutMs?: number; - }) => Effect.Effect; - - /** - * List open pull requests for a head branch. - */ - readonly listOpenPullRequests: (input: { - readonly cwd: string; - readonly headSelector: string; - readonly limit?: number; - }) => Effect.Effect, GitHubCliError>; - - /** - * Resolve a pull request by URL, number, or branch-ish identifier. - */ - readonly getPullRequest: (input: { - readonly cwd: string; - readonly reference: string; - }) => Effect.Effect; - - /** - * Resolve clone URLs for a GitHub repository. - */ - readonly getRepositoryCloneUrls: (input: { - readonly cwd: string; - readonly repository: string; - }) => Effect.Effect; - - /** - * Create a pull request from branch context and body file. - */ - readonly createPullRequest: (input: { - readonly cwd: string; - readonly baseBranch: string; - readonly headSelector: string; - readonly title: string; - readonly bodyFile: string; - readonly repository?: string; - }) => Effect.Effect; - - /** - * Resolve repository default branch through GitHub metadata. - */ - readonly getDefaultBranch: (input: { - readonly cwd: string; - }) => Effect.Effect; - - /** - * Checkout a pull request into the current repository worktree. - */ - readonly checkoutPullRequest: (input: { - readonly cwd: string; - readonly reference: string; - readonly force?: boolean; - }) => Effect.Effect; -} - -/** - * GitHubCli - Service tag for GitHub CLI process execution. - */ -export class GitHubCli extends Context.Service()( - "t3/git/Services/GitHubCli", -) {} diff --git a/apps/server/src/git/Services/GitManager.ts b/apps/server/src/git/Services/GitManager.ts deleted file mode 100644 index 29c762195e5..00000000000 --- a/apps/server/src/git/Services/GitManager.ts +++ /dev/null @@ -1,104 +0,0 @@ -/** - * GitManager - Effect service contract for stacked Git workflows. - * - * Orchestrates status inspection and commit/push/PR flows by composing - * lower-level Git and external tool services. - * - * @module GitManager - */ -import { - GitActionProgressEvent, - GitPreparePullRequestThreadInput, - GitPreparePullRequestThreadResult, - GitPullRequestRefInput, - GitResolvePullRequestResult, - GitRunStackedActionInput, - GitRunStackedActionResult, - GitStatusLocalResult, - GitStatusRemoteResult, - GitStatusInput, - GitStatusResult, -} from "@t3tools/contracts"; -import { Context } from "effect"; -import type { Effect } from "effect"; -import type { GitManagerServiceError } from "@t3tools/contracts"; - -export interface GitActionProgressReporter { - readonly publish: (event: GitActionProgressEvent) => Effect.Effect; -} - -export interface GitRunStackedActionOptions { - readonly actionId?: string; - readonly progressReporter?: GitActionProgressReporter; -} - -/** - * GitManagerShape - Service API for high-level Git workflow actions. - */ -export interface GitManagerShape { - /** - * Read current repository Git status plus open PR metadata when available. - */ - readonly status: ( - input: GitStatusInput, - ) => Effect.Effect; - - /** - * Read local repository status without remote hosting enrichment. - */ - readonly localStatus: ( - input: GitStatusInput, - ) => Effect.Effect; - - /** - * Read remote tracking / PR status for a repository. - */ - readonly remoteStatus: ( - input: GitStatusInput, - ) => Effect.Effect; - - /** - * Clear any cached local status snapshot for a repository. - */ - readonly invalidateLocalStatus: (cwd: string) => Effect.Effect; - - /** - * Clear any cached remote status snapshot for a repository. - */ - readonly invalidateRemoteStatus: (cwd: string) => Effect.Effect; - - /** - * Clear any cached status snapshot for a repository so the next read is fresh. - */ - readonly invalidateStatus: (cwd: string) => Effect.Effect; - - /** - * Resolve a pull request by URL/number against the current repository. - */ - readonly resolvePullRequest: ( - input: GitPullRequestRefInput, - ) => Effect.Effect; - - /** - * Prepare a new thread workspace from a pull request in local or worktree mode. - */ - readonly preparePullRequestThread: ( - input: GitPreparePullRequestThreadInput, - ) => Effect.Effect; - - /** - * Run a Git action (`commit`, `push`, `create_pr`, `commit_push`, `commit_push_pr`). - * When `featureBranch` is set, creates and checks out a feature branch first. - */ - readonly runStackedAction: ( - input: GitRunStackedActionInput, - options?: GitRunStackedActionOptions, - ) => Effect.Effect; -} - -/** - * GitManager - Service tag for stacked Git workflow orchestration. - */ -export class GitManager extends Context.Service()( - "t3/git/Services/GitManager", -) {} diff --git a/apps/server/src/git/Services/GitStatusBroadcaster.ts b/apps/server/src/git/Services/GitStatusBroadcaster.ts deleted file mode 100644 index 647f8408242..00000000000 --- a/apps/server/src/git/Services/GitStatusBroadcaster.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { Context } from "effect"; -import type { Effect, Stream } from "effect"; -import type { - GitManagerServiceError, - GitStatusInput, - GitStatusLocalResult, - GitStatusResult, - GitStatusStreamEvent, -} from "@t3tools/contracts"; - -export interface GitStatusBroadcasterShape { - readonly getStatus: ( - input: GitStatusInput, - ) => Effect.Effect; - readonly refreshLocalStatus: ( - cwd: string, - ) => Effect.Effect; - readonly refreshStatus: (cwd: string) => Effect.Effect; - readonly streamStatus: ( - input: GitStatusInput, - ) => Stream.Stream; -} - -export class GitStatusBroadcaster extends Context.Service< - GitStatusBroadcaster, - GitStatusBroadcasterShape ->()("t3/git/Services/GitStatusBroadcaster") {} diff --git a/apps/server/src/git/Services/SessionTextGeneration.ts b/apps/server/src/git/Services/SessionTextGeneration.ts deleted file mode 100644 index 351f3fa3603..00000000000 --- a/apps/server/src/git/Services/SessionTextGeneration.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { Context } from "effect"; - -import type { TextGenerationShape } from "./TextGeneration.ts"; - -export interface SessionTextGenerationShape extends TextGenerationShape {} - -export class SessionTextGeneration extends Context.Service< - SessionTextGeneration, - SessionTextGenerationShape ->()("t3/git/Services/SessionTextGeneration") {} diff --git a/apps/server/src/git/Utils.ts b/apps/server/src/git/Utils.ts index 15015e8cda5..6faf3e99c77 100644 --- a/apps/server/src/git/Utils.ts +++ b/apps/server/src/git/Utils.ts @@ -1,171 +1,6 @@ -/** - * Shared utilities for text generation layers (Codex, Claude, etc.). - * - * @module textGenerationUtils - */ -import { Schema } from "effect"; - -import { TextGenerationError } from "@t3tools/contracts"; - import { existsSync } from "node:fs"; import { join } from "node:path"; export function isGitRepository(cwd: string): boolean { return existsSync(join(cwd, ".git")); } - -/** Convert an Effect Schema to a flat JSON Schema object, inlining `$defs` when present. */ -export function toJsonSchemaObject(schema: Schema.Top): unknown { - const document = Schema.toJsonSchemaDocument(schema); - if (document.definitions && Object.keys(document.definitions).length > 0) { - return { ...document.schema, $defs: document.definitions }; - } - return document.schema; -} - -/** Truncate a text section to `maxChars`, appending a `[truncated]` marker when needed. */ -export function limitSection(value: string, maxChars: number): string { - if (value.length <= maxChars) return value; - const truncated = value.slice(0, maxChars); - return `${truncated}\n\n[truncated]`; -} - -export function extractJsonObject(raw: string): string { - const trimmed = raw.trim(); - if (trimmed.length === 0) { - return trimmed; - } - - const start = trimmed.indexOf("{"); - if (start < 0) { - return trimmed; - } - - let depth = 0; - let inString = false; - let escaping = false; - for (let index = start; index < trimmed.length; index += 1) { - const char = trimmed[index]; - if (inString) { - if (escaping) { - escaping = false; - } else if (char === "\\") { - escaping = true; - } else if (char === '"') { - inString = false; - } - continue; - } - - if (char === '"') { - inString = true; - continue; - } - - if (char === "{") { - depth += 1; - continue; - } - - if (char === "}") { - depth -= 1; - if (depth === 0) { - return trimmed.slice(start, index + 1); - } - } - } - - return trimmed.slice(start); -} - -/** Normalise a raw commit subject to imperative-mood, ≤72 chars, no trailing period. */ -export function sanitizeCommitSubject(raw: string): string { - const singleLine = raw.trim().split(/\r?\n/g)[0]?.trim() ?? ""; - const withoutTrailingPeriod = singleLine.replace(/[.]+$/g, "").trim(); - if (withoutTrailingPeriod.length === 0) { - return "Update project files"; - } - - if (withoutTrailingPeriod.length <= 72) { - return withoutTrailingPeriod; - } - return withoutTrailingPeriod.slice(0, 72).trimEnd(); -} - -/** Normalise a raw PR title to a single line with a sensible fallback. */ -export function sanitizePrTitle(raw: string): string { - const singleLine = raw.trim().split(/\r?\n/g)[0]?.trim() ?? ""; - if (singleLine.length > 0) { - return singleLine; - } - return "Update project changes"; -} - -/** Normalise a raw thread title to a compact single-line sidebar-safe label. */ -export function sanitizeThreadTitle(raw: string): string { - const normalized = raw - .trim() - .split(/\r?\n/g)[0] - ?.trim() - .replace(/^['"`]+|['"`]+$/g, "") - .trim() - .replace(/\s+/g, " "); - - if (!normalized || normalized.trim().length === 0) { - return "New thread"; - } - - if (normalized.length <= 50) { - return normalized; - } - - return `${normalized.slice(0, 47).trimEnd()}...`; -} - -/** CLI name to human-readable label, e.g. "codex" → "Codex CLI (`codex`)" */ -function cliLabel(cliName: string): string { - const capitalized = cliName.charAt(0).toUpperCase() + cliName.slice(1); - return `${capitalized} CLI (\`${cliName}\`)`; -} - -/** - * Normalize an unknown error from a CLI text generation process into a - * typed `TextGenerationError`. Parameterized by CLI name so both Codex - * and Claude (and future providers) can share the same logic. - */ -export function normalizeCliError( - cliName: string, - operation: string, - error: unknown, - fallback: string, -): TextGenerationError { - if (Schema.is(TextGenerationError)(error)) { - return error; - } - - if (error instanceof Error) { - const lower = error.message.toLowerCase(); - if ( - error.message.includes(`Command not found: ${cliName}`) || - lower.includes(`spawn ${cliName}`) || - lower.includes("enoent") - ) { - return new TextGenerationError({ - operation, - detail: `${cliLabel(cliName)} is required but not available on PATH.`, - cause: error, - }); - } - return new TextGenerationError({ - operation, - detail: `${fallback}: ${error.message}`, - cause: error, - }); - } - - return new TextGenerationError({ - operation, - detail: fallback, - cause: error, - }); -} diff --git a/apps/server/src/keybindings.ts b/apps/server/src/keybindings.ts index 9a29378a8a2..eec0f23e553 100644 --- a/apps/server/src/keybindings.ts +++ b/apps/server/src/keybindings.ts @@ -13,14 +13,10 @@ import { KeybindingShortcut, KeybindingWhenNode, MAX_KEYBINDINGS_COUNT, - MODEL_PICKER_JUMP_KEYBINDING_COMMANDS, - MAX_WHEN_EXPRESSION_DEPTH, ResolvedKeybindingRule, ResolvedKeybindingsConfig, - THREAD_JUMP_KEYBINDING_COMMANDS, type ServerConfigIssue, } from "@t3tools/contracts"; -import type { Mutable } from "effect/Types"; import { Array, Cache, @@ -48,278 +44,20 @@ import * as Semaphore from "effect/Semaphore"; import { ServerConfig } from "./config.ts"; import { writeFileStringAtomically } from "./atomicWrite.ts"; import { fromLenientJson } from "@t3tools/shared/schemaJson"; - -type WhenToken = - | { type: "identifier"; value: string } - | { type: "not" } - | { type: "and" } - | { type: "or" } - | { type: "lparen" } - | { type: "rparen" }; - -export const DEFAULT_KEYBINDINGS: ReadonlyArray = [ - { key: "mod+j", command: "terminal.toggle" }, - { key: "mod+d", command: "terminal.split", when: "terminalFocus" }, - { key: "mod+n", command: "terminal.new", when: "terminalFocus" }, - { key: "mod+w", command: "terminal.close", when: "terminalFocus" }, - { key: "mod+d", command: "diff.toggle", when: "!terminalFocus" }, - { key: "mod+k", command: "commandPalette.toggle", when: "!terminalFocus" }, - { key: "mod+n", command: "chat.new", when: "!terminalFocus" }, - { key: "mod+shift+o", command: "chat.new", when: "!terminalFocus" }, - { key: "mod+shift+n", command: "chat.newLocal", when: "!terminalFocus" }, - { key: "mod+shift+m", command: "modelPicker.toggle", when: "!terminalFocus" }, - { key: "mod+o", command: "editor.openFavorite" }, - { key: "mod+shift+[", command: "thread.previous" }, - { key: "mod+shift+]", command: "thread.next" }, - ...THREAD_JUMP_KEYBINDING_COMMANDS.map((command, index) => ({ - key: `mod+${index + 1}`, - command, - })), - ...MODEL_PICKER_JUMP_KEYBINDING_COMMANDS.map((command, index) => ({ - key: `mod+${index + 1}`, - command, - when: "modelPickerOpen", - })), -]; - -function normalizeKeyToken(token: string): string { - if (token === "space") return " "; - if (token === "esc") return "escape"; - return token; -} - -/** @internal - Exported for testing */ -export function parseKeybindingShortcut(value: string): KeybindingShortcut | null { - const rawTokens = value - .toLowerCase() - .split("+") - .map((token) => token.trim()); - const tokens = [...rawTokens]; - let trailingEmptyCount = 0; - while (tokens[tokens.length - 1] === "") { - trailingEmptyCount += 1; - tokens.pop(); - } - if (trailingEmptyCount > 0) { - tokens.push("+"); - } - if (tokens.some((token) => token.length === 0)) { - return null; - } - if (tokens.length === 0) return null; - - let key: string | null = null; - let metaKey = false; - let ctrlKey = false; - let shiftKey = false; - let altKey = false; - let modKey = false; - - for (const token of tokens) { - switch (token) { - case "cmd": - case "meta": - metaKey = true; - break; - case "ctrl": - case "control": - ctrlKey = true; - break; - case "shift": - shiftKey = true; - break; - case "alt": - case "option": - altKey = true; - break; - case "mod": - modKey = true; - break; - default: { - if (key !== null) return null; - key = normalizeKeyToken(token); - } - } - } - - if (key === null) return null; - return { - key, - metaKey, - ctrlKey, - shiftKey, - altKey, - modKey, - }; -} - -function tokenizeWhenExpression(expression: string): WhenToken[] | null { - const tokens: WhenToken[] = []; - let index = 0; - - while (index < expression.length) { - const current = expression[index]; - if (!current) break; - - if (/\s/.test(current)) { - index += 1; - continue; - } - if (expression.startsWith("&&", index)) { - tokens.push({ type: "and" }); - index += 2; - continue; - } - if (expression.startsWith("||", index)) { - tokens.push({ type: "or" }); - index += 2; - continue; - } - if (current === "!") { - tokens.push({ type: "not" }); - index += 1; - continue; - } - if (current === "(") { - tokens.push({ type: "lparen" }); - index += 1; - continue; - } - if (current === ")") { - tokens.push({ type: "rparen" }); - index += 1; - continue; - } - - const identifier = /^[A-Za-z_][A-Za-z0-9_.-]*/.exec(expression.slice(index)); - if (!identifier) { - return null; - } - tokens.push({ type: "identifier", value: identifier[0] }); - index += identifier[0].length; - } - - return tokens; -} - -function parseKeybindingWhenExpression(expression: string): KeybindingWhenNode | null { - const tokens = tokenizeWhenExpression(expression); - if (!tokens || tokens.length === 0) return null; - let index = 0; - - const parsePrimary = (depth: number): KeybindingWhenNode | null => { - if (depth > MAX_WHEN_EXPRESSION_DEPTH) { - return null; - } - const token = tokens[index]; - if (!token) return null; - - if (token.type === "identifier") { - index += 1; - return { type: "identifier", name: token.value }; - } - - if (token.type === "lparen") { - index += 1; - const expressionNode = parseOr(depth + 1); - const closeToken = tokens[index]; - if (!expressionNode || !closeToken || closeToken.type !== "rparen") { - return null; - } - index += 1; - return expressionNode; - } - - return null; - }; - - const parseUnary = (depth: number): KeybindingWhenNode | null => { - let notCount = 0; - while (tokens[index]?.type === "not") { - index += 1; - notCount += 1; - if (notCount > MAX_WHEN_EXPRESSION_DEPTH) { - return null; - } - } - - let node = parsePrimary(depth); - if (!node) return null; - - while (notCount > 0) { - node = { type: "not", node }; - notCount -= 1; - } - - return node; - }; - - const parseAnd = (depth: number): KeybindingWhenNode | null => { - let left = parseUnary(depth); - if (!left) return null; - - while (tokens[index]?.type === "and") { - index += 1; - const right = parseUnary(depth); - if (!right) return null; - left = { type: "and", left, right }; - } - - return left; - }; - - const parseOr = (depth: number): KeybindingWhenNode | null => { - let left = parseAnd(depth); - if (!left) return null; - - while (tokens[index]?.type === "or") { - index += 1; - const right = parseAnd(depth); - if (!right) return null; - left = { type: "or", left, right }; - } - - return left; - }; - - const ast = parseOr(0); - if (!ast || index !== tokens.length) return null; - return ast; -} - -/** @internal - Exported for testing */ -export function compileResolvedKeybindingRule(rule: KeybindingRule): ResolvedKeybindingRule | null { - const shortcut = parseKeybindingShortcut(rule.key); - if (!shortcut) return null; - - if (rule.when !== undefined) { - const whenAst = parseKeybindingWhenExpression(rule.when); - if (!whenAst) return null; - return { - command: rule.command, - shortcut, - whenAst, - }; - } - - return { - command: rule.command, - shortcut, - }; -} - -export function compileResolvedKeybindingsConfig( - config: KeybindingsConfig, -): ResolvedKeybindingsConfig { - const compiled: Mutable = []; - for (const rule of config) { - const result = Schema.decodeExit(ResolvedKeybindingFromConfig)(rule); - if (result._tag === "Success") { - compiled.push(result.value); - } - } - return compiled; -} +import { + DEFAULT_KEYBINDINGS, + DEFAULT_RESOLVED_KEYBINDINGS, + compileResolvedKeybindingRule, + compileResolvedKeybindingsConfig, + parseKeybindingShortcut, +} from "@t3tools/shared/keybindings"; + +export { + DEFAULT_KEYBINDINGS, + compileResolvedKeybindingRule, + compileResolvedKeybindingsConfig, + parseKeybindingShortcut, +}; export const ResolvedKeybindingFromConfig = KeybindingRule.pipe( Schema.decodeTo( @@ -412,8 +150,6 @@ function encodeWhenAst(node: KeybindingWhenNode): string { } } -const DEFAULT_RESOLVED_KEYBINDINGS = compileResolvedKeybindingsConfig(DEFAULT_KEYBINDINGS); - const RawKeybindingsEntries = fromLenientJson(Schema.Array(Schema.Unknown)); const KeybindingsConfigJson = Schema.fromJsonString(KeybindingsConfig); const PrettyJsonString = SchemaGetter.parseJson().compose( diff --git a/apps/server/src/kiloServerManager.test.ts b/apps/server/src/kiloServerManager.test.ts index a863e7b9e28..aff45303f72 100644 --- a/apps/server/src/kiloServerManager.test.ts +++ b/apps/server/src/kiloServerManager.test.ts @@ -7,8 +7,22 @@ import { type KiloClient, type KiloProviderSession, type KiloSessionContext, + type SharedServerState, } from "./kilo/types.ts"; +vi.mock("./kilo/serverLifecycle.ts", async () => { + const actual = + await vi.importActual( + "./kilo/serverLifecycle.ts", + ); + return { + ...actual, + ensureServer: vi.fn(), + }; +}); + +import * as ServerLifecycle from "./kilo/serverLifecycle.ts"; + class TestKiloServerManager extends KiloServerManager { seedSession(context: KiloSessionContext) { (this as unknown as { sessions: Map }).sessions.set( @@ -139,3 +153,58 @@ describe("KiloServerManager.respondToRequest", () => { expect(context.activeTurnId).toBe(TurnId.make("turn-kilo")); }); }); + +describe("KiloServerManager.getOrStartServer", () => { + it("kills the spawned server child on stopAll (manager-owned lifecycle)", async () => { + // Regression: KiloTextGeneration previously called the standalone + // `ensureServer` helper directly and stored the returned server in a + // local closure variable, so the manager's `stopAll()` finalizer never + // killed the spawned child. Routing through `getOrStartServer` keeps the + // child owned by the manager. + const childKill = vi.fn(); + const fakeServer = { + baseUrl: "http://127.0.0.1:1", + child: { kill: childKill, killed: false } as unknown, + authHeader: undefined, + } as unknown as SharedServerState; + + const ensureServerMock = vi.mocked(ServerLifecycle.ensureServer); + ensureServerMock.mockReset(); + ensureServerMock.mockResolvedValue({ state: fakeServer, serverPromise: undefined }); + + const manager = new KiloServerManager(); + const server = await manager.getOrStartServer({ binaryPath: "kilo" }); + expect(server).toBe(fakeServer); + + manager.stopAll(); + expect(childKill).toHaveBeenCalledTimes(1); + }); + + it("serializes concurrent starts so callers share one server", async () => { + let started = 0; + const ensureServerMock = vi.mocked(ServerLifecycle.ensureServer); + ensureServerMock.mockReset(); + ensureServerMock.mockImplementation(async () => { + started += 1; + // Simulate an async start — both callers should see the same pending + // promise and end up with the same SharedServerState. + await new Promise((resolve) => setTimeout(resolve, 5)); + const fakeServer = { + baseUrl: `http://127.0.0.1:${started}`, + child: { kill: vi.fn(), killed: false } as unknown, + authHeader: undefined, + } as unknown as SharedServerState; + return { state: fakeServer, serverPromise: undefined }; + }); + + const manager = new KiloServerManager(); + const [a, b] = await Promise.all([ + manager.getOrStartServer({ binaryPath: "kilo" }), + manager.getOrStartServer({ binaryPath: "kilo" }), + ]); + expect(started).toBe(1); + expect(a).toBe(b); + + manager.stopAll(); + }); +}); diff --git a/apps/server/src/kiloServerManager.ts b/apps/server/src/kiloServerManager.ts index 728c5c048cb..d1537a3ad3c 100644 --- a/apps/server/src/kiloServerManager.ts +++ b/apps/server/src/kiloServerManager.ts @@ -552,6 +552,16 @@ export class KiloServerManager extends EventEmitter { this.serverPromise = undefined; } + /** + * Returns the manager's shared Kilo server, starting it if needed. The child + * process is owned by this manager — `stopAll()` will kill it. Use this from + * callers (e.g. text generation) that need the server but don't want to + * register a full session on the manager. + */ + async getOrStartServer(options?: KiloProviderOptions): Promise { + return this.ensureServer(options); + } + private requireSession(threadId: ThreadId): KiloSessionContext { const context = this.sessions.get(threadId); if (!context) { diff --git a/apps/server/src/observability/Metrics.test.ts b/apps/server/src/observability/Metrics.test.ts index 4604f43b63a..b5eeedaaa43 100644 --- a/apps/server/src/observability/Metrics.test.ts +++ b/apps/server/src/observability/Metrics.test.ts @@ -1,4 +1,5 @@ import { assert, describe, it } from "@effect/vitest"; +import { ProviderDriverKind } from "@t3tools/contracts"; import { Effect, Metric } from "effect"; import { withMetrics } from "./Metrics.ts"; @@ -75,10 +76,11 @@ describe("withMetrics", () => { Effect.gen(function* () { const counter = Metric.counter("with_metrics_lazy_total"); const timer = Metric.timer("with_metrics_lazy_duration"); - let provider = "unknown"; + let provider = ProviderDriverKind.make("unknown"); + const lazyInittedProvider = ProviderDriverKind.make("codex"); yield* Effect.sync(() => { - provider = "codex"; + provider = lazyInittedProvider; }).pipe( withMetrics({ counter, @@ -93,7 +95,7 @@ describe("withMetrics", () => { const snapshots = yield* Metric.snapshot; assert.equal( hasMetricSnapshot(snapshots, "with_metrics_lazy_total", { - provider: "codex", + provider: lazyInittedProvider, operation: "lazy", outcome: "success", }), @@ -101,7 +103,7 @@ describe("withMetrics", () => { ); assert.equal( hasMetricSnapshot(snapshots, "with_metrics_lazy_duration", { - provider: "codex", + provider: lazyInittedProvider, operation: "lazy", }), true, diff --git a/apps/server/src/orchestration/Layers/CheckpointReactor.test.ts b/apps/server/src/orchestration/Layers/CheckpointReactor.test.ts index 954c4d4726c..ad5fb59bd1e 100644 --- a/apps/server/src/orchestration/Layers/CheckpointReactor.test.ts +++ b/apps/server/src/orchestration/Layers/CheckpointReactor.test.ts @@ -3,7 +3,12 @@ import os from "node:os"; import path from "node:path"; import { execFileSync } from "node:child_process"; -import type { ProviderKind, ProviderRuntimeEvent, ProviderSession } from "@t3tools/contracts"; +import { + ProviderDriverKind, + ProviderRuntimeEvent, + ProviderSession, + ProviderInstanceId, +} from "@t3tools/contracts"; import { CommandId, DEFAULT_PROVIDER_INTERACTION_MODE, @@ -19,8 +24,9 @@ import { afterEach, describe, expect, it, vi } from "vitest"; import { CheckpointStoreLive } from "../../checkpointing/Layers/CheckpointStore.ts"; import { CheckpointStore } from "../../checkpointing/Services/CheckpointStore.ts"; -import { GitCoreLive } from "../../git/Layers/GitCore.ts"; -import { GitStatusBroadcaster } from "../../git/Services/GitStatusBroadcaster.ts"; +import * as VcsDriverRegistry from "../../vcs/VcsDriverRegistry.ts"; +import * as VcsProcess from "../../vcs/VcsProcess.ts"; +import { VcsStatusBroadcaster } from "../../vcs/VcsStatusBroadcaster.ts"; import { RepositoryIdentityResolverLive } from "../../project/Layers/RepositoryIdentityResolver.ts"; import { CheckpointReactorLive } from "./CheckpointReactor.ts"; import { OrchestrationEngineLive } from "./OrchestrationEngine.ts"; @@ -50,7 +56,7 @@ const asTurnId = (value: string): TurnId => TurnId.make(value); type LegacyProviderRuntimeEvent = { readonly type: string; readonly eventId: EventId; - readonly provider: ProviderKind; + readonly provider: ProviderDriverKind; readonly createdAt: string; readonly threadId: ThreadId; readonly turnId?: string | undefined; @@ -64,7 +70,7 @@ function createProviderServiceHarness( cwd: string, hasSession = true, sessionCwd = cwd, - providerName: ProviderSession["provider"] = "codex", + providerName: ProviderSession["provider"] = ProviderDriverKind.make("codex"), ) { const now = new Date().toISOString(); const runtimeEventPubSub = Effect.runSync(PubSub.unbounded()); @@ -96,7 +102,18 @@ function createProviderServiceHarness( respondToUserInput: () => unsupported(), stopSession: () => unsupported(), listSessions, - getCapabilities: () => Effect.succeed({ sessionModelSwitch: "in-session" } as any), + getCapabilities: () => Effect.succeed({ sessionModelSwitch: "in-session" }), + getInstanceInfo: (instanceId) => + Effect.succeed({ + instanceId, + driverKind: ProviderDriverKind.make(providerName), + displayName: undefined, + enabled: true, + continuationIdentity: { + driverKind: ProviderDriverKind.make(providerName), + continuationKey: `${providerName}:instance:${instanceId}`, + }, + }), rollbackConversation, get streamEvents() { return Stream.fromPubSub(runtimeEventPubSub); @@ -243,7 +260,7 @@ describe("CheckpointReactor", () => { readonly projectWorkspaceRoot?: string; readonly threadWorktreePath?: string | null; readonly providerSessionCwd?: string; - readonly providerName?: ProviderKind; + readonly providerName?: ProviderDriverKind; readonly gitStatusRefreshCalls?: Array; }) { const cwd = createGitRepository(); @@ -252,7 +269,7 @@ describe("CheckpointReactor", () => { cwd, options?.hasSession ?? true, options?.providerSessionCwd ?? cwd, - options?.providerName ?? "codex", + options?.providerName ?? ProviderDriverKind.make("codex"), ); const orchestrationLayer = OrchestrationEngineLive.pipe( Layer.provide(OrchestrationProjectionSnapshotQueryLive), @@ -266,7 +283,7 @@ describe("CheckpointReactor", () => { const ServerConfigLayer = ServerConfig.layerTest(process.cwd(), { prefix: "t3-checkpoint-reactor-test-", }); - const gitStatusBroadcasterLayer = Layer.succeed(GitStatusBroadcaster, { + const vcsStatusBroadcasterLayer = Layer.succeed(VcsStatusBroadcaster, { getStatus: () => Effect.die("getStatus should not be called in this test"), refreshLocalStatus: (cwd: string) => Effect.sync(() => { @@ -274,9 +291,9 @@ describe("CheckpointReactor", () => { }).pipe( Effect.as({ isRepo: true, - hasOriginRemote: false, - isDefaultBranch: true, - branch: "main", + hasPrimaryRemote: false, + isDefaultRef: true, + refName: "main", hasWorkingTreeChanges: false, workingTree: { files: [], insertions: 0, deletions: 0 }, }), @@ -289,11 +306,16 @@ describe("CheckpointReactor", () => { Layer.provideMerge(orchestrationLayer), Layer.provideMerge(RuntimeReceiptBusLive), Layer.provideMerge(Layer.succeed(ProviderService, provider.service)), - Layer.provideMerge(gitStatusBroadcasterLayer), - Layer.provideMerge(CheckpointStoreLive), - Layer.provideMerge(WorkspaceEntriesLive.pipe(Layer.provide(WorkspacePathsLive))), + Layer.provideMerge(vcsStatusBroadcasterLayer), + Layer.provideMerge(CheckpointStoreLive.pipe(Layer.provide(VcsDriverRegistry.layer))), + Layer.provideMerge( + WorkspaceEntriesLive.pipe( + Layer.provide(WorkspacePathsLive), + Layer.provideMerge(VcsDriverRegistry.layer), + ), + ), Layer.provideMerge(WorkspacePathsLive), - Layer.provideMerge(GitCoreLive), + Layer.provideMerge(VcsProcess.layer), Layer.provideMerge(ServerConfigLayer), Layer.provideMerge(NodeServices.layer), ); @@ -315,7 +337,7 @@ describe("CheckpointReactor", () => { title: "Test Project", workspaceRoot: options?.projectWorkspaceRoot ?? cwd, defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, createdAt, @@ -329,7 +351,7 @@ describe("CheckpointReactor", () => { projectId: asProjectId("project-1"), title: "Thread", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -396,7 +418,7 @@ describe("CheckpointReactor", () => { harness.provider.emit({ type: "turn.started", eventId: EventId.make("evt-turn-started-1"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: ThreadId.make("thread-1"), @@ -411,7 +433,7 @@ describe("CheckpointReactor", () => { harness.provider.emit({ type: "turn.completed", eventId: EventId.make("evt-turn-completed-1"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: ThreadId.make("thread-1"), @@ -457,7 +479,7 @@ describe("CheckpointReactor", () => { harness.provider.emit({ type: "turn.completed", eventId: EventId.make("evt-turn-completed-refresh-local-status"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: ThreadId.make("thread-1"), turnId: asTurnId("turn-refresh-local-status"), @@ -494,7 +516,7 @@ describe("CheckpointReactor", () => { harness.provider.emit({ type: "turn.started", eventId: EventId.make("evt-turn-started-main"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: ThreadId.make("thread-1"), @@ -510,7 +532,7 @@ describe("CheckpointReactor", () => { harness.provider.emit({ type: "turn.completed", eventId: EventId.make("evt-turn-completed-aux"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: ThreadId.make("thread-1"), @@ -526,7 +548,7 @@ describe("CheckpointReactor", () => { harness.provider.emit({ type: "turn.completed", eventId: EventId.make("evt-turn-completed-main"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: ThreadId.make("thread-1"), @@ -544,7 +566,7 @@ describe("CheckpointReactor", () => { it("captures pre-turn and completion checkpoints for claude runtime events", async () => { const harness = await createHarness({ seedFilesystemCheckpoints: false, - providerName: "claudeAgent", + providerName: ProviderDriverKind.make("claudeAgent"), }); const createdAt = new Date().toISOString(); @@ -569,7 +591,7 @@ describe("CheckpointReactor", () => { harness.provider.emit({ type: "turn.started", eventId: EventId.make("evt-turn-started-claude-1"), - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), createdAt: new Date().toISOString(), threadId: ThreadId.make("thread-1"), turnId: asTurnId("turn-claude-1"), @@ -583,7 +605,7 @@ describe("CheckpointReactor", () => { harness.provider.emit({ type: "turn.completed", eventId: EventId.make("evt-turn-completed-claude-1"), - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), createdAt: new Date().toISOString(), threadId: ThreadId.make("thread-1"), turnId: asTurnId("turn-claude-1"), @@ -627,7 +649,7 @@ describe("CheckpointReactor", () => { harness.provider.emit({ type: "turn.completed", eventId: EventId.make("evt-turn-completed-missing-baseline"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: ThreadId.make("thread-1"), @@ -716,7 +738,7 @@ describe("CheckpointReactor", () => { harness.provider.emit({ type: "turn.completed", eventId: EventId.make("evt-turn-completed-missing-provider-cwd"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: ThreadId.make("thread-1"), @@ -762,7 +784,7 @@ describe("CheckpointReactor", () => { harness.provider.emit({ type: "checkpoint.captured", eventId: EventId.make("evt-checkpoint-captured-3"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: ThreadId.make("thread-1"), @@ -812,7 +834,7 @@ describe("CheckpointReactor", () => { harness.provider.emit({ type: "turn.completed", eventId: EventId.make("evt-runtime-capture-failure"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: ThreadId.make("thread-1"), @@ -823,7 +845,7 @@ describe("CheckpointReactor", () => { harness.provider.emit({ type: "turn.started", eventId: EventId.make("evt-turn-started-after-runtime-failure"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: ThreadId.make("thread-1"), @@ -918,7 +940,7 @@ describe("CheckpointReactor", () => { }); it("executes provider revert and emits thread.reverted for claude sessions", async () => { - const harness = await createHarness({ providerName: "claudeAgent" }); + const harness = await createHarness({ providerName: ProviderDriverKind.make("claudeAgent") }); const createdAt = new Date().toISOString(); await Effect.runPromise( diff --git a/apps/server/src/orchestration/Layers/CheckpointReactor.ts b/apps/server/src/orchestration/Layers/CheckpointReactor.ts index 71445b4671d..e534f6851ae 100644 --- a/apps/server/src/orchestration/Layers/CheckpointReactor.ts +++ b/apps/server/src/orchestration/Layers/CheckpointReactor.ts @@ -24,7 +24,7 @@ import { RuntimeReceiptBus } from "../Services/RuntimeReceiptBus.ts"; import type { CheckpointStoreError } from "../../checkpointing/Errors.ts"; import type { OrchestrationDispatchError } from "../Errors.ts"; import { isGitRepository } from "../../git/Utils.ts"; -import { GitStatusBroadcaster } from "../../git/Services/GitStatusBroadcaster.ts"; +import { VcsStatusBroadcaster } from "../../vcs/VcsStatusBroadcaster.ts"; import { WorkspaceEntries } from "../../workspace/Services/WorkspaceEntries.ts"; type ReactorInput = @@ -70,7 +70,7 @@ const make = Effect.gen(function* () { const checkpointStore = yield* CheckpointStore; const receiptBus = yield* RuntimeReceiptBus; const workspaceEntries = yield* WorkspaceEntries; - const gitStatusBroadcaster = yield* GitStatusBroadcaster; + const vcsStatusBroadcaster = yield* VcsStatusBroadcaster; const appendRevertFailureActivity = (input: { readonly threadId: ThreadId; @@ -237,6 +237,7 @@ const make = Effect.gen(function* () { fromCheckpointRef, toCheckpointRef: targetCheckpointRef, fallbackFromToHead: false, + ignoreWhitespace: false, }) .pipe( Effect.map((diff) => @@ -508,7 +509,7 @@ const make = Effect.gen(function* () { return; } - yield* gitStatusBroadcaster.refreshLocalStatus(sessionRuntime.value.cwd).pipe( + yield* vcsStatusBroadcaster.refreshLocalStatus(sessionRuntime.value.cwd).pipe( Effect.catch((error) => Effect.logWarning("failed to refresh local git status after turn completion", { threadId: event.threadId, diff --git a/apps/server/src/orchestration/Layers/OrchestrationEngine.test.ts b/apps/server/src/orchestration/Layers/OrchestrationEngine.test.ts index b61664f1619..0af5b099a64 100644 --- a/apps/server/src/orchestration/Layers/OrchestrationEngine.test.ts +++ b/apps/server/src/orchestration/Layers/OrchestrationEngine.test.ts @@ -7,6 +7,7 @@ import { ThreadId, TurnId, type OrchestrationEvent, + ProviderInstanceId, } from "@t3tools/contracts"; import { Effect, Layer, ManagedRuntime, Metric, Option, Queue, Stream } from "effect"; import { describe, expect, it } from "vitest"; @@ -104,7 +105,7 @@ describe("OrchestrationEngine", () => { title: "Bootstrap Project", workspaceRoot: "/tmp/project-bootstrap", defaultModelSelection: { - provider: "codex" as const, + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, scripts: [], @@ -119,7 +120,7 @@ describe("OrchestrationEngine", () => { projectId: asProjectId("project-bootstrap"), title: "Bootstrap Thread", modelSelection: { - provider: "codex" as const, + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -198,7 +199,7 @@ describe("OrchestrationEngine", () => { title: "Project 1", workspaceRoot: "/tmp/project-1", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, createdAt, @@ -212,7 +213,7 @@ describe("OrchestrationEngine", () => { projectId: asProjectId("project-1"), title: "Thread", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -258,7 +259,7 @@ describe("OrchestrationEngine", () => { title: "Project Archive", workspaceRoot: "/tmp/project-archive", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, createdAt, @@ -272,7 +273,7 @@ describe("OrchestrationEngine", () => { projectId: asProjectId("project-archive"), title: "Archive me", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -325,7 +326,7 @@ describe("OrchestrationEngine", () => { title: "Replay Project", workspaceRoot: "/tmp/project-replay", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, createdAt, @@ -339,7 +340,7 @@ describe("OrchestrationEngine", () => { projectId: asProjectId("project-replay"), title: "replay", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -383,7 +384,7 @@ describe("OrchestrationEngine", () => { title: "Stream Project", workspaceRoot: "/tmp/project-stream", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, createdAt, @@ -407,7 +408,7 @@ describe("OrchestrationEngine", () => { projectId: asProjectId("project-stream"), title: "domain-stream", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -444,7 +445,7 @@ describe("OrchestrationEngine", () => { title: "Ack Project", workspaceRoot: "/tmp/project-ack", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, createdAt, @@ -459,7 +460,7 @@ describe("OrchestrationEngine", () => { projectId: asProjectId("project-ack"), title: "Ack Thread", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -496,7 +497,7 @@ describe("OrchestrationEngine", () => { projectId: asProjectId("project-missing"), title: "Missing Project Thread", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -533,7 +534,7 @@ describe("OrchestrationEngine", () => { title: "Turn Diff Project", workspaceRoot: "/tmp/project-turn-diff", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, createdAt, @@ -547,7 +548,7 @@ describe("OrchestrationEngine", () => { projectId: asProjectId("project-turn-diff"), title: "Turn diff thread", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -652,7 +653,7 @@ describe("OrchestrationEngine", () => { title: "Flaky Project", workspaceRoot: "/tmp/project-flaky", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, createdAt, @@ -668,7 +669,7 @@ describe("OrchestrationEngine", () => { projectId: asProjectId("project-flaky"), title: "flaky-fail", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -688,7 +689,7 @@ describe("OrchestrationEngine", () => { projectId: asProjectId("project-flaky"), title: "flaky-ok", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -747,7 +748,7 @@ describe("OrchestrationEngine", () => { title: "Atomic Project", workspaceRoot: "/tmp/project-atomic", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, createdAt, @@ -761,7 +762,7 @@ describe("OrchestrationEngine", () => { projectId: asProjectId("project-atomic"), title: "atomic", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -890,7 +891,7 @@ describe("OrchestrationEngine", () => { title: "Sync Project", workspaceRoot: "/tmp/project-sync", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, createdAt, @@ -904,7 +905,7 @@ describe("OrchestrationEngine", () => { projectId: asProjectId("project-sync"), title: "sync-before", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -975,7 +976,7 @@ describe("OrchestrationEngine", () => { title: "Duplicate Project", workspaceRoot: "/tmp/project-duplicate", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, createdAt, @@ -990,7 +991,7 @@ describe("OrchestrationEngine", () => { projectId: asProjectId("project-duplicate"), title: "duplicate", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -1010,7 +1011,7 @@ describe("OrchestrationEngine", () => { projectId: asProjectId("project-duplicate"), title: "duplicate", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, diff --git a/apps/server/src/orchestration/Layers/ProjectionPipeline.test.ts b/apps/server/src/orchestration/Layers/ProjectionPipeline.test.ts index 7a08765647f..7f364c717a7 100644 --- a/apps/server/src/orchestration/Layers/ProjectionPipeline.test.ts +++ b/apps/server/src/orchestration/Layers/ProjectionPipeline.test.ts @@ -7,6 +7,7 @@ import { ProjectId, ThreadId, TurnId, + ProviderInstanceId, } from "@t3tools/contracts"; import * as NodeServices from "@effect/platform-node/NodeServices"; import { assert, it } from "@effect/vitest"; @@ -92,7 +93,7 @@ it.layer(BaseTestLayer)("OrchestrationProjectionPipeline", (it) => { projectId: ProjectId.make("project-1"), title: "Thread 1", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, runtimeMode: "full-access", @@ -364,7 +365,7 @@ it.layer(BaseTestLayer)("OrchestrationProjectionPipeline", (it) => { projectId: ProjectId.make("project-clear-attachments"), title: "Thread Clear Attachments", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, runtimeMode: "full-access", @@ -492,7 +493,7 @@ it.layer( projectId: ProjectId.make("project-overwrite"), title: "Thread Overwrite", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, runtimeMode: "full-access", @@ -640,7 +641,7 @@ it.layer( projectId: ProjectId.make("project-rollback"), title: "Thread Rollback", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, runtimeMode: "full-access", @@ -769,7 +770,7 @@ it.layer( projectId: ProjectId.make("project-revert-files"), title: "Thread Revert Files", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, runtimeMode: "full-access", @@ -977,7 +978,7 @@ it.layer(Layer.fresh(makeProjectionPipelinePrefixedTestLayer("t3-projection-atta projectId: ProjectId.make("project-delete-files"), title: "Thread Delete Files", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, runtimeMode: "full-access", @@ -1140,7 +1141,7 @@ it.layer(BaseTestLayer)("OrchestrationProjectionPipeline", (it) => { projectId: ProjectId.make("project-a"), title: "Thread A", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, runtimeMode: "full-access", @@ -1267,7 +1268,7 @@ it.layer(BaseTestLayer)("OrchestrationProjectionPipeline", (it) => { projectId: ProjectId.make("project-empty"), title: "Thread Empty", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, runtimeMode: "full-access", @@ -1407,7 +1408,7 @@ it.layer(BaseTestLayer)("OrchestrationProjectionPipeline", (it) => { projectId: ProjectId.make("project-conflict"), title: "Thread Conflict", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, runtimeMode: "full-access", @@ -1551,7 +1552,7 @@ it.layer(BaseTestLayer)("OrchestrationProjectionPipeline", (it) => { projectId: ProjectId.make("project-stale-approval"), title: "Thread Stale Approval", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, runtimeMode: "approval-required", @@ -1694,7 +1695,7 @@ it.layer(BaseTestLayer)("OrchestrationProjectionPipeline", (it) => { projectId: ProjectId.make("project-nonstale-approval"), title: "Thread Non-Stale Approval", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, runtimeMode: "approval-required", @@ -1874,7 +1875,7 @@ it.layer(BaseTestLayer)("OrchestrationProjectionPipeline", (it) => { projectId: ProjectId.make("project-revert"), title: "Thread Revert", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, runtimeMode: "full-access", @@ -2195,7 +2196,7 @@ engineLayer("OrchestrationProjectionPipeline via engine dispatch", (it) => { title: "Live Project", workspaceRoot: "/tmp/project-live", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, createdAt, @@ -2233,7 +2234,7 @@ engineLayer("OrchestrationProjectionPipeline via engine dispatch", (it) => { title: "Scripts Project", workspaceRoot: "/tmp/project-scripts", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, createdAt, @@ -2253,7 +2254,7 @@ engineLayer("OrchestrationProjectionPipeline via engine dispatch", (it) => { }, ], defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5", }, }); @@ -2272,7 +2273,7 @@ engineLayer("OrchestrationProjectionPipeline via engine dispatch", (it) => { { scriptsJson: '[{"id":"script-1","name":"Build","command":"bun run build","icon":"build","runOnWorktreeCreate":false}]', - defaultModelSelection: '{"provider":"codex","model":"gpt-5"}', + defaultModelSelection: '{"instanceId":"codex","model":"gpt-5"}', }, ]); }), diff --git a/apps/server/src/orchestration/Layers/ProjectionPipeline.ts b/apps/server/src/orchestration/Layers/ProjectionPipeline.ts index d981ae0da62..28a0208e75c 100644 --- a/apps/server/src/orchestration/Layers/ProjectionPipeline.ts +++ b/apps/server/src/orchestration/Layers/ProjectionPipeline.ts @@ -945,6 +945,7 @@ const makeOrchestrationProjectionPipeline = Effect.fn("makeOrchestrationProjecti threadId: event.payload.threadId, status: event.payload.session.status, providerName: event.payload.session.providerName, + providerInstanceId: event.payload.session.providerInstanceId ?? null, runtimeMode: event.payload.session.runtimeMode, activeTurnId: event.payload.session.activeTurnId, lastError: event.payload.session.lastError, diff --git a/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.test.ts b/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.test.ts index 9f0d63545fc..cba5ce7e830 100644 --- a/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.test.ts +++ b/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.test.ts @@ -1,4 +1,12 @@ -import { CheckpointRef, EventId, MessageId, ProjectId, ThreadId, TurnId } from "@t3tools/contracts"; +import { + CheckpointRef, + EventId, + MessageId, + ProjectId, + ThreadId, + TurnId, + ProviderInstanceId, +} from "@t3tools/contracts"; import { assert, it } from "@effect/vitest"; import { Effect, Layer } from "effect"; import * as SqlClient from "effect/unstable/sql/SqlClient"; @@ -252,7 +260,7 @@ projectionSnapshotLayer("ProjectionSnapshotQuery", (it) => { workspaceRoot: "/tmp/project-1", repositoryIdentity: null, defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, scripts: [ @@ -275,7 +283,7 @@ projectionSnapshotLayer("ProjectionSnapshotQuery", (it) => { projectId: asProjectId("project-1"), title: "Thread 1", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: "default", @@ -363,7 +371,7 @@ projectionSnapshotLayer("ProjectionSnapshotQuery", (it) => { workspaceRoot: "/tmp/project-1", repositoryIdentity: null, defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, scripts: [ @@ -385,7 +393,7 @@ projectionSnapshotLayer("ProjectionSnapshotQuery", (it) => { projectId: asProjectId("project-1"), title: "Thread 1", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: "default", diff --git a/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.ts b/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.ts index ddcc99902b8..10639c1b42d 100644 --- a/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.ts +++ b/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.ts @@ -195,6 +195,7 @@ function mapSessionRow( threadId: row.threadId, status: row.status, providerName: row.providerName, + ...(row.providerInstanceId !== null ? { providerInstanceId: row.providerInstanceId } : {}), runtimeMode: row.runtimeMode, activeTurnId: row.activeTurnId, lastError: row.lastError, @@ -351,6 +352,7 @@ const makeProjectionSnapshotQuery = Effect.gen(function* () { thread_id AS "threadId", status, provider_name AS "providerName", + provider_instance_id AS "providerInstanceId", provider_session_id AS "providerSessionId", provider_thread_id AS "providerThreadId", runtime_mode AS "runtimeMode", @@ -609,6 +611,7 @@ const makeProjectionSnapshotQuery = Effect.gen(function* () { thread_id AS "threadId", status, provider_name AS "providerName", + provider_instance_id AS "providerInstanceId", runtime_mode AS "runtimeMode", active_turn_id AS "activeTurnId", last_error AS "lastError", @@ -880,6 +883,9 @@ const makeProjectionSnapshotQuery = Effect.gen(function* () { threadId: row.threadId, status: row.status, providerName: row.providerName, + ...(row.providerInstanceId !== null + ? { providerInstanceId: row.providerInstanceId } + : {}), runtimeMode: row.runtimeMode, activeTurnId: row.activeTurnId, lastError: row.lastError, diff --git a/apps/server/src/orchestration/Layers/ProviderCommandReactor.test.ts b/apps/server/src/orchestration/Layers/ProviderCommandReactor.test.ts index ce796de7842..09252571c37 100644 --- a/apps/server/src/orchestration/Layers/ProviderCommandReactor.test.ts +++ b/apps/server/src/orchestration/Layers/ProviderCommandReactor.test.ts @@ -2,7 +2,14 @@ import fs from "node:fs"; import os from "node:os"; import path from "node:path"; -import type { ModelSelection, ProviderRuntimeEvent, ProviderSession } from "@t3tools/contracts"; +import { + ModelSelection, + ProviderRuntimeEvent, + ProviderSession, + ProviderDriverKind, + ProviderInstanceId, +} from "@t3tools/contracts"; +import { createModelSelection } from "@t3tools/shared/model"; import { ApprovalRequestId, CommandId, @@ -26,25 +33,22 @@ import { ProviderService, type ProviderServiceShape, } from "../../provider/Services/ProviderService.ts"; -import { GitCore, type GitCoreShape } from "../../git/Services/GitCore.ts"; -import { - GitStatusBroadcaster, - type GitStatusBroadcasterShape, -} from "../../git/Services/GitStatusBroadcaster.ts"; -import { - TextGeneration, - type TextGenerationShape, - type ThreadTitleGenerationResult, -} from "../../git/Services/TextGeneration.ts"; +import { TextGeneration, type TextGenerationShape } from "../../textGeneration/TextGeneration.ts"; import { RepositoryIdentityResolverLive } from "../../project/Layers/RepositoryIdentityResolver.ts"; import { OrchestrationEngineLive } from "./OrchestrationEngine.ts"; import { OrchestrationProjectionPipelineLive } from "./ProjectionPipeline.ts"; import { OrchestrationProjectionSnapshotQueryLive } from "./ProjectionSnapshotQuery.ts"; -import { ProviderCommandReactorLive } from "./ProviderCommandReactor.ts"; +import { + providerErrorLabel, + providerErrorLabelFromInstanceHint, + ProviderCommandReactorLive, +} from "./ProviderCommandReactor.ts"; import { OrchestrationEngineService } from "../Services/OrchestrationEngine.ts"; import { ProviderCommandReactor } from "../Services/ProviderCommandReactor.ts"; import * as NodeServices from "@effect/platform-node/NodeServices"; import { ServerSettingsService } from "../../serverSettings.ts"; +import { VcsStatusBroadcaster } from "../../vcs/VcsStatusBroadcaster.ts"; +import { GitWorkflowService, type GitWorkflowServiceShape } from "../../git/GitWorkflowService.ts"; const asProjectId = (value: string): ProjectId => ProjectId.make(value); const asApprovalRequestId = (value: string): ApprovalRequestId => ApprovalRequestId.make(value); @@ -101,6 +105,30 @@ describe("ProviderCommandReactor", () => { createdBaseDirs.clear(); }); + describe("provider error attribution", () => { + it("uses the current provider instance slug when current instance lookup fails", () => { + expect( + providerErrorLabelFromInstanceHint({ + instanceId: "codex_personal", + modelSelectionInstanceId: "codex", + sessionProvider: "codex", + }), + ).toBe("codex_personal"); + }); + + it("uses the desired provider instance slug when desired instance lookup fails", () => { + expect( + providerErrorLabelFromInstanceHint({ + instanceId: "claude_openrouter", + }), + ).toBe("claude_openrouter"); + }); + + it("uses the unknown driver kind when the resolved driver is not registered locally", () => { + expect(providerErrorLabel("third_party_driver")).toBe("third_party_driver"); + }); + }); + async function createHarness(input?: { readonly baseDir?: string; readonly threadModelSelection?: ModelSelection; @@ -115,7 +143,7 @@ describe("ProviderCommandReactor", () => { let nextSessionIndex = 1; const runtimeSessions: Array = []; const modelSelection = input?.threadModelSelection ?? { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }; const startSession = vi.fn((_: unknown, input: unknown) => { @@ -131,8 +159,24 @@ describe("ProviderCommandReactor", () => { typeof input.threadId === "string" ? ThreadId.make(input.threadId) : ThreadId.make(`thread-${sessionIndex}`); + const inputModelSelection = + typeof input === "object" && input !== null && "modelSelection" in input + ? (input.modelSelection as ModelSelection | undefined) + : undefined; + const providerInstanceId = + typeof input === "object" && input !== null && "providerInstanceId" in input + ? (input.providerInstanceId as ProviderInstanceId | undefined) + : inputModelSelection?.instanceId; + const provider = + typeof input === "object" && + input !== null && + "provider" in input && + typeof input.provider === "string" + ? (input.provider as ProviderSession["provider"]) + : ProviderDriverKind.make(inputModelSelection?.instanceId ?? modelSelection.instanceId); const session: ProviderSession = { - provider: modelSelection.provider, + provider, + ...(providerInstanceId ? { providerInstanceId } : {}), status: "ready" as const, runtimeMode: typeof input === "object" && @@ -147,7 +191,9 @@ describe("ProviderCommandReactor", () => { typeof input.cwd === "string" ? { cwd: input.cwd } : {}), - ...(modelSelection.model !== undefined ? { model: modelSelection.model } : {}), + ...((inputModelSelection?.model ?? modelSelection.model) + ? { model: inputModelSelection?.model ?? modelSelection.model } + : {}), threadId, resumeCursor: resumeCursor ?? { opaque: `resume-${sessionIndex}` }, createdAt: now, @@ -194,9 +240,9 @@ describe("ProviderCommandReactor", () => { const refreshStatus = vi.fn((_: string) => Effect.succeed({ isRepo: true, - hasOriginRemote: true, - isDefaultBranch: false, - branch: "renamed-branch", + hasPrimaryRemote: true, + isDefaultRef: false, + refName: "renamed-branch", hasWorkingTreeChanges: false, workingTree: { files: [], @@ -217,14 +263,13 @@ describe("ProviderCommandReactor", () => { }), ), ); - const generateThreadTitle = vi.fn( - (_input?: unknown): Effect.Effect => - Effect.fail( - new TextGenerationError({ - operation: "generateThreadTitle", - detail: "disabled in test harness", - }), - ), + const generateThreadTitle = vi.fn((_) => + Effect.fail( + new TextGenerationError({ + operation: "generateThreadTitle", + detail: "disabled in test harness", + }), + ), ); const unsupported = () => Effect.die(new Error("Unsupported provider call in test")) as never; @@ -239,14 +284,26 @@ describe("ProviderCommandReactor", () => { getCapabilities: (_provider) => Effect.succeed({ sessionModelSwitch: input?.sessionModelSwitch ?? "in-session", - transport: "app-server-json-rpc" as const, - modelDiscovery: "native" as const, - supportsModelDiscovery: true, - supportsResume: true, - supportsRollback: true, - supportsAttachments: true, - persistentRuntime: true, }), + getInstanceInfo: (instanceId) => { + const raw = String(instanceId); + const driverKind = ProviderDriverKind.make( + raw.startsWith("claude") ? "claudeAgent" : raw.startsWith("codex") ? "codex" : raw, + ); + return Effect.succeed({ + instanceId, + driverKind, + displayName: undefined, + enabled: true, + continuationIdentity: { + driverKind, + continuationKey: + driverKind === ProviderDriverKind.make("codex") + ? "codex:home:/shared-codex" + : `${driverKind}:instance:${instanceId}`, + }, + }); + }, rollbackConversation: () => unsupported(), get streamEvents() { return Stream.fromPubSub(runtimeEventPubSub); @@ -264,21 +321,25 @@ describe("ProviderCommandReactor", () => { const layer = ProviderCommandReactorLive.pipe( Layer.provideMerge(orchestrationLayer), Layer.provideMerge(Layer.succeed(ProviderService, service)), - Layer.provideMerge(Layer.succeed(GitCore, { renameBranch } as unknown as GitCoreShape)), Layer.provideMerge( - Layer.succeed(GitStatusBroadcaster, { + Layer.mock(GitWorkflowService)({ + renameBranch, + } satisfies Partial), + ), + Layer.provideMerge( + Layer.succeed(VcsStatusBroadcaster, { getStatus: () => Effect.die("getStatus should not be called in this test"), refreshLocalStatus: () => Effect.die("refreshLocalStatus should not be called in this test"), refreshStatus, streamStatus: () => Stream.die("streamStatus should not be called in this test"), - } satisfies GitStatusBroadcasterShape), + }), ), Layer.provideMerge( Layer.mock(TextGeneration, { generateBranchName, generateThreadTitle, - } as unknown as TextGenerationShape), + }), ), Layer.provideMerge(ServerSettingsService.layerTest()), Layer.provideMerge(ServerConfig.layerTest(process.cwd(), baseDir)), @@ -331,6 +392,7 @@ describe("ProviderCommandReactor", () => { refreshStatus, generateBranchName, generateThreadTitle, + runtimeSessions, stateDir, drain, }; @@ -363,7 +425,7 @@ describe("ProviderCommandReactor", () => { expect(harness.startSession.mock.calls[0]?.[1]).toMatchObject({ cwd: "/tmp/provider-project", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, runtimeMode: "approval-required", @@ -585,14 +647,10 @@ describe("ProviderCommandReactor", () => { text: "hello fast mode", attachments: [], }, - modelSelection: { - provider: "codex", - model: "gpt-5.3-codex", - options: { - reasoningEffort: "high", - fastMode: true, - }, - }, + modelSelection: createModelSelection(ProviderInstanceId.make("codex"), "gpt-5.3-codex", [ + { id: "reasoningEffort", value: "high" }, + { id: "fastMode", value: true }, + ]), interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, runtimeMode: "approval-required", createdAt: now, @@ -602,31 +660,26 @@ describe("ProviderCommandReactor", () => { await waitFor(() => harness.startSession.mock.calls.length === 1); await waitFor(() => harness.sendTurn.mock.calls.length === 1); expect(harness.startSession.mock.calls[0]?.[1]).toMatchObject({ - modelSelection: { - provider: "codex", - model: "gpt-5.3-codex", - options: { - reasoningEffort: "high", - fastMode: true, - }, - }, + modelSelection: createModelSelection(ProviderInstanceId.make("codex"), "gpt-5.3-codex", [ + { id: "reasoningEffort", value: "high" }, + { id: "fastMode", value: true }, + ]), }); expect(harness.sendTurn.mock.calls[0]?.[0]).toMatchObject({ threadId: ThreadId.make("thread-1"), - modelSelection: { - provider: "codex", - model: "gpt-5.3-codex", - options: { - reasoningEffort: "high", - fastMode: true, - }, - }, + modelSelection: createModelSelection(ProviderInstanceId.make("codex"), "gpt-5.3-codex", [ + { id: "reasoningEffort", value: "high" }, + { id: "fastMode", value: true }, + ]), }); }); it("forwards claude effort options through session start and turn send", async () => { const harness = await createHarness({ - threadModelSelection: { provider: "claudeAgent", model: "claude-sonnet-4-6" }, + threadModelSelection: { + instanceId: ProviderInstanceId.make("claudeAgent"), + model: "claude-sonnet-4-6", + }, }); const now = new Date().toISOString(); @@ -641,13 +694,11 @@ describe("ProviderCommandReactor", () => { text: "hello with effort", attachments: [], }, - modelSelection: { - provider: "claudeAgent", - model: "claude-sonnet-4-6", - options: { - effort: "max", - }, - }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-sonnet-4-6", + [{ id: "effort", value: "max" }], + ), interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, runtimeMode: "approval-required", createdAt: now, @@ -657,29 +708,28 @@ describe("ProviderCommandReactor", () => { await waitFor(() => harness.startSession.mock.calls.length === 1); await waitFor(() => harness.sendTurn.mock.calls.length === 1); expect(harness.startSession.mock.calls[0]?.[1]).toMatchObject({ - modelSelection: { - provider: "claudeAgent", - model: "claude-sonnet-4-6", - options: { - effort: "max", - }, - }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-sonnet-4-6", + [{ id: "effort", value: "max" }], + ), }); expect(harness.sendTurn.mock.calls[0]?.[0]).toMatchObject({ threadId: ThreadId.make("thread-1"), - modelSelection: { - provider: "claudeAgent", - model: "claude-sonnet-4-6", - options: { - effort: "max", - }, - }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-sonnet-4-6", + [{ id: "effort", value: "max" }], + ), }); }); it("forwards claude fast mode options through session start and turn send", async () => { const harness = await createHarness({ - threadModelSelection: { provider: "claudeAgent", model: "claude-opus-4-6" }, + threadModelSelection: { + instanceId: ProviderInstanceId.make("claudeAgent"), + model: "claude-opus-4-6", + }, }); const now = new Date().toISOString(); @@ -694,13 +744,11 @@ describe("ProviderCommandReactor", () => { text: "hello with fast mode", attachments: [], }, - modelSelection: { - provider: "claudeAgent", - model: "claude-opus-4-6", - options: { - fastMode: true, - }, - }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-opus-4-6", + [{ id: "fastMode", value: true }], + ), interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, runtimeMode: "approval-required", createdAt: now, @@ -710,23 +758,19 @@ describe("ProviderCommandReactor", () => { await waitFor(() => harness.startSession.mock.calls.length === 1); await waitFor(() => harness.sendTurn.mock.calls.length === 1); expect(harness.startSession.mock.calls[0]?.[1]).toMatchObject({ - modelSelection: { - provider: "claudeAgent", - model: "claude-opus-4-6", - options: { - fastMode: true, - }, - }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-opus-4-6", + [{ id: "fastMode", value: true }], + ), }); expect(harness.sendTurn.mock.calls[0]?.[0]).toMatchObject({ threadId: ThreadId.make("thread-1"), - modelSelection: { - provider: "claudeAgent", - model: "claude-opus-4-6", - options: { - fastMode: true, - }, - }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-opus-4-6", + [{ id: "fastMode", value: true }], + ), }); }); @@ -813,15 +857,15 @@ describe("ProviderCommandReactor", () => { expect(harness.sendTurn.mock.calls[1]?.[0]).toMatchObject({ threadId: ThreadId.make("thread-1"), modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, }); }); - it("rejects a first turn when requested provider conflicts with the thread model", async () => { + it("starts a first turn on the requested provider instance even when it differs from the thread model", async () => { const harness = await createHarness({ - threadModelSelection: { provider: "codex", model: "gpt-5-codex" }, + threadModelSelection: { instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex" }, }); const now = new Date().toISOString(); @@ -837,7 +881,7 @@ describe("ProviderCommandReactor", () => { attachments: [], }, modelSelection: { - provider: "claudeAgent", + instanceId: ProviderInstanceId.make("claudeAgent"), model: "claude-opus-4-6", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -846,29 +890,25 @@ describe("ProviderCommandReactor", () => { }), ); - await waitFor(async () => { - const readModel = await Effect.runPromise(harness.engine.getReadModel()); - const thread = readModel.threads.find((entry) => entry.id === ThreadId.make("thread-1")); - return ( - thread?.activities.some((activity) => activity.kind === "provider.turn.start.failed") ?? - false - ); - }); + await waitFor(() => harness.sendTurn.mock.calls.length === 1); - expect(harness.startSession).not.toHaveBeenCalled(); - expect(harness.sendTurn).not.toHaveBeenCalled(); + expect(harness.startSession).toHaveBeenCalledTimes(1); + expect(harness.startSession.mock.calls[0]?.[1]).toMatchObject({ + provider: ProviderDriverKind.make("claudeAgent"), + providerInstanceId: ProviderInstanceId.make("claudeAgent"), + modelSelection: { + instanceId: ProviderInstanceId.make("claudeAgent"), + model: "claude-opus-4-6", + }, + }); const readModel = await Effect.runPromise(harness.engine.getReadModel()); const thread = readModel.threads.find((entry) => entry.id === ThreadId.make("thread-1")); - expect(thread?.session).toBeNull(); + expect(thread?.session?.providerName).toBe("claudeAgent"); + expect(thread?.session?.providerInstanceId).toBe(ProviderInstanceId.make("claudeAgent")); expect( thread?.activities.find((activity) => activity.kind === "provider.turn.start.failed"), - ).toMatchObject({ - summary: "Provider turn start failed", - payload: { - detail: expect.stringContaining("cannot switch to 'claudeAgent'"), - }, - }); + ).toBeUndefined(); }); it("reuses the same provider session when runtime mode is unchanged", async () => { @@ -917,9 +957,147 @@ describe("ProviderCommandReactor", () => { expect(harness.stopSession.mock.calls.length).toBe(0); }); + it("restarts an existing Codex thread on a compatible requested instance", async () => { + const harness = await createHarness(); + const now = new Date().toISOString(); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.turn.start", + commandId: CommandId.make("cmd-turn-start-compatible-codex-1"), + threadId: ThreadId.make("thread-1"), + message: { + messageId: asMessageId("user-message-compatible-codex-1"), + role: "user", + text: "first", + attachments: [], + }, + modelSelection: { + instanceId: ProviderInstanceId.make("codex"), + model: "gpt-5-codex", + }, + interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, + runtimeMode: "approval-required", + createdAt: now, + }), + ); + + await waitFor(() => harness.sendTurn.mock.calls.length === 1); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.turn.start", + commandId: CommandId.make("cmd-turn-start-compatible-codex-2"), + threadId: ThreadId.make("thread-1"), + message: { + messageId: asMessageId("user-message-compatible-codex-2"), + role: "user", + text: "second", + attachments: [], + }, + modelSelection: { + instanceId: ProviderInstanceId.make("codex_work"), + model: "gpt-5-codex", + }, + interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, + runtimeMode: "approval-required", + createdAt: new Date().toISOString(), + }), + ); + + await waitFor(() => harness.sendTurn.mock.calls.length === 2); + + expect(harness.startSession).toHaveBeenCalledTimes(2); + expect(harness.startSession.mock.calls[1]?.[1]).toMatchObject({ + provider: ProviderDriverKind.make("codex"), + providerInstanceId: ProviderInstanceId.make("codex_work"), + resumeCursor: { opaque: "resume-1" }, + }); + + const readModel = await Effect.runPromise(harness.engine.getReadModel()); + const thread = readModel.threads.find((entry) => entry.id === ThreadId.make("thread-1")); + expect(thread?.session?.providerInstanceId).toBe(ProviderInstanceId.make("codex_work")); + }); + + it("restarts the provider session when the thread workspace changes", async () => { + const harness = await createHarness({ + threadModelSelection: { + instanceId: ProviderInstanceId.make("claudeAgent"), + model: "claude-sonnet-4-6", + }, + }); + const now = new Date().toISOString(); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.turn.start", + commandId: CommandId.make("cmd-turn-start-workspace-1"), + threadId: ThreadId.make("thread-1"), + message: { + messageId: asMessageId("user-message-workspace-1"), + role: "user", + text: "first in project root", + attachments: [], + }, + interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, + runtimeMode: "approval-required", + createdAt: now, + }), + ); + + await waitFor(() => harness.startSession.mock.calls.length === 1); + await waitFor(() => harness.sendTurn.mock.calls.length === 1); + expect(harness.startSession.mock.calls[0]?.[1]).toMatchObject({ + cwd: "/tmp/provider-project", + }); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.meta.update", + commandId: CommandId.make("cmd-thread-worktree-change"), + threadId: ThreadId.make("thread-1"), + worktreePath: "/tmp/provider-project-worktree", + }), + ); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.turn.start", + commandId: CommandId.make("cmd-turn-start-workspace-2"), + threadId: ThreadId.make("thread-1"), + message: { + messageId: asMessageId("user-message-workspace-2"), + role: "user", + text: "second in worktree", + attachments: [], + }, + interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, + runtimeMode: "approval-required", + createdAt: now, + }), + ); + + await waitFor(() => harness.startSession.mock.calls.length === 2); + await waitFor(() => harness.sendTurn.mock.calls.length === 2); + expect(harness.stopSession.mock.calls.length).toBe(0); + expect(harness.startSession.mock.calls[1]?.[1]).toMatchObject({ + threadId: ThreadId.make("thread-1"), + cwd: "/tmp/provider-project-worktree", + resumeCursor: { opaque: "resume-1" }, + modelSelection: { + instanceId: ProviderInstanceId.make("claudeAgent"), + model: "claude-sonnet-4-6", + }, + runtimeMode: "approval-required", + }); + }); + it("restarts claude sessions when claude effort changes", async () => { const harness = await createHarness({ - threadModelSelection: { provider: "claudeAgent", model: "claude-sonnet-4-6" }, + threadModelSelection: { + instanceId: ProviderInstanceId.make("claudeAgent"), + model: "claude-sonnet-4-6", + }, }); const now = new Date().toISOString(); @@ -934,13 +1112,11 @@ describe("ProviderCommandReactor", () => { text: "first claude turn", attachments: [], }, - modelSelection: { - provider: "claudeAgent", - model: "claude-sonnet-4-6", - options: { - effort: "medium", - }, - }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-sonnet-4-6", + [{ id: "effort", value: "medium" }], + ), interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, runtimeMode: "approval-required", createdAt: now, @@ -961,13 +1137,11 @@ describe("ProviderCommandReactor", () => { text: "second claude turn", attachments: [], }, - modelSelection: { - provider: "claudeAgent", - model: "claude-sonnet-4-6", - options: { - effort: "max", - }, - }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-sonnet-4-6", + [{ id: "effort", value: "max" }], + ), interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, runtimeMode: "approval-required", createdAt: now, @@ -978,13 +1152,11 @@ describe("ProviderCommandReactor", () => { await waitFor(() => harness.sendTurn.mock.calls.length === 2); expect(harness.startSession.mock.calls[1]?.[1]).toMatchObject({ resumeCursor: { opaque: "resume-1" }, - modelSelection: { - provider: "claudeAgent", - model: "claude-sonnet-4-6", - options: { - effort: "max", - }, - }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-sonnet-4-6", + [{ id: "effort", value: "max" }], + ), }); }); @@ -1075,7 +1247,10 @@ describe("ProviderCommandReactor", () => { it("does not inject derived model options when restarting claude on runtime mode changes", async () => { const harness = await createHarness({ - threadModelSelection: { provider: "claudeAgent", model: "claude-opus-4-6" }, + threadModelSelection: { + instanceId: ProviderInstanceId.make("claudeAgent"), + model: "claude-opus-4-6", + }, }); const now = new Date().toISOString(); @@ -1111,7 +1286,7 @@ describe("ProviderCommandReactor", () => { expect(harness.startSession.mock.calls[0]?.[1]).toMatchObject({ modelSelection: { - provider: "claudeAgent", + instanceId: ProviderInstanceId.make("claudeAgent"), model: "claude-opus-4-6", }, runtimeMode: "approval-required", @@ -1219,7 +1394,7 @@ describe("ProviderCommandReactor", () => { attachments: [], }, modelSelection: { - provider: "claudeAgent", + instanceId: ProviderInstanceId.make("claudeAgent"), model: "claude-opus-4-6", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -1245,10 +1420,76 @@ describe("ProviderCommandReactor", () => { const thread = readModel.threads.find((entry) => entry.id === ThreadId.make("thread-1")); expect(thread?.session?.threadId).toBe("thread-1"); expect(thread?.session?.providerName).toBe("codex"); + expect(thread?.session?.runtimeMode).toBe("approval-required"); + expect( + thread?.activities.find((activity) => activity.kind === "provider.turn.start.failed"), + ).toMatchObject({ + payload: { + detail: expect.stringContaining("cannot switch to 'claudeAgent'"), + }, + }); + }); + + it("rejects cross-driver provider changes after the existing thread session has stopped", async () => { + const harness = await createHarness(); + const now = new Date().toISOString(); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.session.set", + commandId: CommandId.make("cmd-session-set-stopped-provider-switch"), + threadId: ThreadId.make("thread-1"), + session: { + threadId: ThreadId.make("thread-1"), + status: "stopped", + providerName: "codex", + providerInstanceId: ProviderInstanceId.make("codex"), + runtimeMode: "approval-required", + activeTurnId: null, + lastError: null, + updatedAt: now, + }, + createdAt: now, + }), + ); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.turn.start", + commandId: CommandId.make("cmd-turn-start-stopped-provider-switch"), + threadId: ThreadId.make("thread-1"), + message: { + messageId: asMessageId("user-message-stopped-provider-switch"), + role: "user", + text: "continue with claude", + attachments: [], + }, + modelSelection: { + instanceId: ProviderInstanceId.make("claudeAgent"), + model: "claude-opus-4-6", + }, + interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, + runtimeMode: "approval-required", + createdAt: now, + }), + ); + + await waitFor(async () => { + const readModel = await Effect.runPromise(harness.engine.getReadModel()); + const thread = readModel.threads.find((entry) => entry.id === ThreadId.make("thread-1")); + return ( + thread?.activities.some((activity) => activity.kind === "provider.turn.start.failed") ?? + false + ); + }); + + expect(harness.startSession.mock.calls.length).toBe(0); + expect(harness.sendTurn.mock.calls.length).toBe(0); + const readModel = await Effect.runPromise(harness.engine.getReadModel()); + const thread = readModel.threads.find((entry) => entry.id === ThreadId.make("thread-1")); expect( thread?.activities.find((activity) => activity.kind === "provider.turn.start.failed"), ).toMatchObject({ - summary: "Provider turn start failed", payload: { detail: expect.stringContaining("cannot switch to 'claudeAgent'"), }, @@ -1338,7 +1579,7 @@ describe("ProviderCommandReactor", () => { expect(harness.startSession.mock.calls[0]?.[1]).toMatchObject({ threadId: ThreadId.make("thread-1"), modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, runtimeMode: "approval-required", @@ -1348,6 +1589,77 @@ describe("ProviderCommandReactor", () => { }); }); + it("rejects active runtime sessions that are missing provider instance ids", async () => { + const harness = await createHarness(); + const now = new Date().toISOString(); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.session.set", + commandId: CommandId.make("cmd-session-set-missing-instance"), + threadId: ThreadId.make("thread-1"), + session: { + threadId: ThreadId.make("thread-1"), + status: "ready", + providerName: "codex", + runtimeMode: "approval-required", + activeTurnId: null, + lastError: null, + updatedAt: now, + }, + createdAt: now, + }), + ); + harness.runtimeSessions.push({ + provider: ProviderDriverKind.make("codex"), + status: "ready", + runtimeMode: "approval-required", + threadId: ThreadId.make("thread-1"), + cwd: "/tmp/provider-project", + resumeCursor: { opaque: "resume-without-instance" }, + createdAt: now, + updatedAt: now, + }); + + await Effect.runPromise( + harness.engine.dispatch({ + type: "thread.turn.start", + commandId: CommandId.make("cmd-turn-start-missing-instance"), + threadId: ThreadId.make("thread-1"), + message: { + messageId: asMessageId("user-message-missing-instance"), + role: "user", + text: "resume codex", + attachments: [], + }, + interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, + runtimeMode: "approval-required", + createdAt: now, + }), + ); + + await waitFor(async () => { + const readModel = await Effect.runPromise(harness.engine.getReadModel()); + const thread = readModel.threads.find((entry) => entry.id === ThreadId.make("thread-1")); + return ( + thread?.activities.some((activity) => activity.kind === "provider.turn.start.failed") ?? + false + ); + }); + + expect(harness.startSession.mock.calls.length).toBe(0); + expect(harness.sendTurn.mock.calls.length).toBe(0); + const readModel = await Effect.runPromise(harness.engine.getReadModel()); + const thread = readModel.threads.find((entry) => entry.id === ThreadId.make("thread-1")); + expect( + thread?.activities.find((activity) => activity.kind === "provider.turn.start.failed"), + ).toMatchObject({ + payload: { + detail: expect.stringContaining("without a provider instance id"), + }, + }); + }); + it("reacts to thread.approval.respond by forwarding provider approval response", async () => { const harness = await createHarness(); const now = new Date().toISOString(); @@ -1440,7 +1752,7 @@ describe("ProviderCommandReactor", () => { harness.respondToRequest.mockImplementation(() => Effect.fail( new ProviderAdapterRequestError({ - provider: "cursor", + provider: ProviderDriverKind.make("codex"), method: "session/request_permission", detail: "Unknown pending permission request: approval-request-1", }), @@ -1455,7 +1767,7 @@ describe("ProviderCommandReactor", () => { session: { threadId: ThreadId.make("thread-1"), status: "running", - providerName: "cursor", + providerName: "codex", runtimeMode: "approval-required", activeTurnId: null, lastError: null, @@ -1535,7 +1847,7 @@ describe("ProviderCommandReactor", () => { harness.respondToUserInput.mockImplementation(() => Effect.fail( new ProviderAdapterRequestError({ - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), method: "item/tool/respondToUserInput", detail: "Unknown pending user-input request: user-input-request-1", }), @@ -1651,6 +1963,7 @@ describe("ProviderCommandReactor", () => { threadId: ThreadId.make("thread-1"), status: "ready", providerName: "codex", + providerInstanceId: ProviderInstanceId.make("codex_work"), runtimeMode: "approval-required", activeTurnId: null, lastError: null, @@ -1675,6 +1988,7 @@ describe("ProviderCommandReactor", () => { expect(thread?.session).not.toBeNull(); expect(thread?.session?.status).toBe("stopped"); expect(thread?.session?.threadId).toBe("thread-1"); + expect(thread?.session?.providerInstanceId).toBe(ProviderInstanceId.make("codex_work")); expect(thread?.session?.activeTurnId).toBeNull(); }); }); diff --git a/apps/server/src/orchestration/Layers/ProviderCommandReactor.ts b/apps/server/src/orchestration/Layers/ProviderCommandReactor.ts index 8a24c9538a0..998475f6118 100644 --- a/apps/server/src/orchestration/Layers/ProviderCommandReactor.ts +++ b/apps/server/src/orchestration/Layers/ProviderCommandReactor.ts @@ -4,7 +4,7 @@ import { EventId, type ModelSelection, type OrchestrationEvent, - ProviderKind, + ProviderDriverKind, type OrchestrationSession, ThreadId, type ProviderSession, @@ -16,12 +16,10 @@ import { Cache, Cause, Duration, Effect, Equal, Layer, Option, Schema, Stream } import { makeDrainableWorker } from "@t3tools/shared/DrainableWorker"; import { resolveThreadWorkspaceCwd } from "../../checkpointing/Utils.ts"; -import { GitCore } from "../../git/Services/GitCore.ts"; -import { GitStatusBroadcaster } from "../../git/Services/GitStatusBroadcaster.ts"; import { increment, orchestrationEventsProcessedTotal } from "../../observability/Metrics.ts"; import { ProviderAdapterRequestError } from "../../provider/Errors.ts"; import type { ProviderServiceError } from "../../provider/Errors.ts"; -import { TextGeneration } from "../../git/Services/TextGeneration.ts"; +import { TextGeneration } from "../../textGeneration/TextGeneration.ts"; import { ProviderService } from "../../provider/Services/ProviderService.ts"; import { OrchestrationEngineService } from "../Services/OrchestrationEngine.ts"; import { @@ -29,6 +27,8 @@ import { type ProviderCommandReactorShape, } from "../Services/ProviderCommandReactor.ts"; import { ServerSettingsService } from "../../serverSettings.ts"; +import { VcsStatusBroadcaster } from "../../vcs/VcsStatusBroadcaster.ts"; +import { GitWorkflowService } from "../../git/GitWorkflowService.ts"; type ProviderIntentEvent = Extract< OrchestrationEvent, @@ -77,6 +77,21 @@ const HANDLED_TURN_START_KEY_TTL = Duration.minutes(30); const DEFAULT_RUNTIME_MODE: RuntimeMode = "full-access"; const DEFAULT_THREAD_TITLE = "New thread"; +export function providerErrorLabel(value: string | undefined): string { + const normalized = value?.trim(); + return normalized && normalized.length > 0 ? normalized : "unknown"; +} + +export function providerErrorLabelFromInstanceHint(input: { + readonly instanceId?: string | undefined; + readonly modelSelectionInstanceId?: string | undefined; + readonly sessionProvider?: string | undefined; +}): string { + return providerErrorLabel( + input.instanceId ?? input.modelSelectionInstanceId ?? input.sessionProvider, + ); +} + function canReplaceThreadTitle(currentTitle: string, titleSeed?: string): boolean { const trimmedCurrentTitle = currentTitle.trim(); if (trimmedCurrentTitle === DEFAULT_THREAD_TITLE) { @@ -153,8 +168,8 @@ function buildGeneratedWorktreeBranchName(raw: string): string { const make = Effect.gen(function* () { const orchestrationEngine = yield* OrchestrationEngineService; const providerService = yield* ProviderService; - const git = yield* GitCore; - const gitStatusBroadcaster = yield* GitStatusBroadcaster; + const gitWorkflow = yield* GitWorkflowService; + const vcsStatusBroadcaster = yield* VcsStatusBroadcaster; const textGeneration = yield* TextGeneration; const serverSettingsService = yield* ServerSettingsService; const handledTurnStartKeys = yield* Cache.make({ @@ -170,18 +185,7 @@ const make = Effect.gen(function* () { ), ); - const threadModelSelections = new Map(); - const getThreadModelSelection = (threadId: ThreadId) => - Effect.sync(() => { - const modelSelection = threadModelSelections.get(threadId); - return modelSelection === undefined - ? Option.none() - : Option.some(modelSelection); - }); - const setThreadModelSelection = (threadId: ThreadId, modelSelection: ModelSelection) => - Effect.sync(() => { - threadModelSelections.set(threadId, modelSelection); - }); + const threadModelSelections = new Map(); const appendProviderFailureActivity = (input: { readonly threadId: ThreadId; @@ -282,42 +286,108 @@ const make = Effect.gen(function* () { } const desiredRuntimeMode = thread.runtimeMode; - const currentProvider: ProviderKind | undefined = Schema.is(ProviderKind)( - thread.session?.providerName, - ) - ? thread.session.providerName - : undefined; const requestedModelSelection = options?.modelSelection; - const threadProvider: ProviderKind = currentProvider ?? thread.modelSelection.provider; + const resolveActiveSession = (threadId: ThreadId) => + providerService + .listSessions() + .pipe(Effect.map((sessions) => sessions.find((session) => session.threadId === threadId))); + + const activeSession = yield* resolveActiveSession(threadId); + const activeThreadSession = + thread.session !== null && thread.session.status !== "stopped" && activeSession + ? thread.session + : null; if ( - requestedModelSelection !== undefined && - requestedModelSelection.provider !== threadProvider + activeThreadSession !== null && + activeSession !== undefined && + (activeThreadSession.providerInstanceId === undefined || + activeSession.providerInstanceId === undefined) ) { return yield* new ProviderAdapterRequestError({ - provider: threadProvider, + provider: providerErrorLabel(activeThreadSession.providerName ?? undefined), method: "thread.turn.start", - detail: `Thread '${threadId}' is bound to provider '${threadProvider}' and cannot switch to '${requestedModelSelection.provider}'.`, + detail: `Thread '${threadId}' has an active provider session without a provider instance id.`, }); } - const preferredProvider: ProviderKind = threadProvider; + const currentInstanceId = + activeThreadSession !== null && + activeSession !== undefined && + activeSession.providerInstanceId !== undefined + ? activeSession.providerInstanceId + : thread.modelSelection.instanceId; const desiredModelSelection = requestedModelSelection ?? thread.modelSelection; + const desiredInstanceId = desiredModelSelection.instanceId; + const currentInfo = yield* providerService.getInstanceInfo(currentInstanceId).pipe( + Effect.mapError( + () => + new ProviderAdapterRequestError({ + provider: providerErrorLabelFromInstanceHint({ + instanceId: String(currentInstanceId), + modelSelectionInstanceId: String(thread.modelSelection.instanceId), + sessionProvider: thread.session?.providerName ?? undefined, + }), + method: "thread.turn.start", + detail: `Thread '${threadId}' references unknown provider instance '${currentInstanceId}'. The instance is not configured in this build.`, + }), + ), + ); + const desiredInfo = yield* providerService.getInstanceInfo(desiredInstanceId).pipe( + Effect.mapError( + () => + new ProviderAdapterRequestError({ + provider: providerErrorLabelFromInstanceHint({ + instanceId: String(desiredModelSelection.instanceId), + }), + method: "thread.turn.start", + detail: `Requested provider instance '${desiredInstanceId}' is not configured in this build.`, + }), + ), + ); + const desiredDriverKind = desiredInfo.driverKind; + if (!Schema.is(ProviderDriverKind)(desiredDriverKind)) { + return yield* new ProviderAdapterRequestError({ + provider: providerErrorLabel(String(desiredDriverKind)), + method: "thread.turn.start", + detail: `Requested provider instance '${desiredInstanceId}' uses unknown provider driver '${desiredDriverKind}'. The driver is not installed in this build.`, + }); + } + const preferredProvider: ProviderDriverKind = desiredDriverKind; + if ( + thread.session !== null && + requestedModelSelection !== undefined && + requestedModelSelection.instanceId !== currentInstanceId + ) { + if (currentInfo.driverKind !== desiredInfo.driverKind) { + return yield* new ProviderAdapterRequestError({ + provider: preferredProvider, + method: "thread.turn.start", + detail: `Thread '${threadId}' is bound to driver '${currentInfo.driverKind}' and cannot switch to '${desiredInfo.driverKind}'.`, + }); + } + if ( + currentInfo.continuationIdentity.continuationKey !== + desiredInfo.continuationIdentity.continuationKey + ) { + return yield* new ProviderAdapterRequestError({ + provider: preferredProvider, + method: "thread.turn.start", + detail: `Thread '${threadId}' cannot switch from instance '${currentInstanceId}' to '${desiredInstanceId}' because their provider resume state is incompatible.`, + }); + } + } const effectiveCwd = resolveThreadWorkspaceCwd({ thread, projects: readModel.projects, }); - const resolveActiveSession = (threadId: ThreadId) => - providerService - .listSessions() - .pipe(Effect.map((sessions) => sessions.find((session) => session.threadId === threadId))); - const startProviderSession = (input?: { readonly resumeCursor?: unknown; - readonly provider?: ProviderKind; + readonly provider?: ProviderDriverKind; }) => providerService.startSession(threadId, { threadId, ...(preferredProvider ? { provider: preferredProvider } : {}), + providerInstanceId: desiredInstanceId, ...(effectiveCwd ? { cwd: effectiveCwd } : {}), modelSelection: desiredModelSelection, ...(input?.resumeCursor !== undefined ? { resumeCursor: input.resumeCursor } : {}), @@ -325,46 +395,55 @@ const make = Effect.gen(function* () { }); const bindSessionToThread = (session: ProviderSession) => - setThreadSession({ - threadId, - session: { + Effect.gen(function* () { + if (session.providerInstanceId === undefined) { + return yield* new ProviderAdapterRequestError({ + provider: providerErrorLabel(session.provider), + method: "thread.turn.start", + detail: `Provider session '${session.threadId}' started without a provider instance id.`, + }); + } + yield* setThreadSession({ threadId, - status: mapProviderSessionStatusToOrchestrationStatus(session.status), - providerName: session.provider, - runtimeMode: desiredRuntimeMode, - // Provider turn ids are not orchestration turn ids. - activeTurnId: null, - lastError: session.lastError ?? null, - updatedAt: session.updatedAt, - }, - createdAt, + session: { + threadId, + status: mapProviderSessionStatusToOrchestrationStatus(session.status), + providerName: session.provider, + providerInstanceId: session.providerInstanceId, + runtimeMode: desiredRuntimeMode, + // Provider turn ids are not orchestration turn ids. + activeTurnId: null, + lastError: session.lastError ?? null, + updatedAt: session.updatedAt, + }, + createdAt, + }); }); - const activeSession = yield* resolveActiveSession(threadId); const existingSessionThreadId = thread.session && thread.session.status !== "stopped" && activeSession ? thread.id : null; if (existingSessionThreadId) { const runtimeModeChanged = thread.runtimeMode !== thread.session?.runtimeMode; const cwdChanged = effectiveCwd !== activeSession?.cwd; - const sessionModelSwitch = - currentProvider === undefined - ? "in-session" - : (yield* providerService.getCapabilities(currentProvider)).sessionModelSwitch; + const sessionModelSwitch = (yield* providerService.getCapabilities(desiredInstanceId)) + .sessionModelSwitch; const modelChanged = requestedModelSelection !== undefined && requestedModelSelection.model !== activeSession?.model; - const shouldRestartForModelChange = modelChanged && sessionModelSwitch === "restart-session"; - const previousModelSelection = Option.getOrUndefined( - yield* getThreadModelSelection(threadId), - ); + const instanceChanged = + requestedModelSelection !== undefined && + activeSession?.providerInstanceId !== requestedModelSelection.instanceId; + const shouldRestartForModelChange = modelChanged && sessionModelSwitch === "unsupported"; + const previousModelSelection = threadModelSelections.get(threadId); const shouldRestartForModelSelectionChange = - currentProvider === "claudeAgent" && + preferredProvider === "claudeAgent" && requestedModelSelection !== undefined && !Equal.equals(previousModelSelection, requestedModelSelection); if ( !runtimeModeChanged && !cwdChanged && + !instanceChanged && !shouldRestartForModelChange && !shouldRestartForModelSelectionChange ) { @@ -377,8 +456,10 @@ const make = Effect.gen(function* () { yield* Effect.logInfo("provider command reactor restarting provider session", { threadId, existingSessionThreadId, - currentProvider, - desiredProvider: desiredModelSelection.provider, + currentProvider: activeSession?.provider, + currentInstanceId, + desiredInstanceId, + desiredProvider: desiredModelSelection.instanceId, currentRuntimeMode: thread.session?.runtimeMode, desiredRuntimeMode: thread.runtimeMode, runtimeModeChanged, @@ -386,6 +467,7 @@ const make = Effect.gen(function* () { desiredCwd: effectiveCwd, cwdChanged, modelChanged, + instanceChanged, shouldRestartForModelChange, shouldRestartForModelSelectionChange, hasResumeCursor: resumeCursor !== undefined, @@ -430,7 +512,7 @@ const make = Effect.gen(function* () { input.modelSelection !== undefined ? { modelSelection: input.modelSelection } : {}, ); if (input.modelSelection !== undefined) { - yield* setThreadModelSelection(input.threadId, input.modelSelection); + threadModelSelections.set(input.threadId, input.modelSelection); } const normalizedInput = toNonEmptyProviderInput(input.messageText); const normalizedAttachments = input.attachments ?? []; @@ -442,11 +524,16 @@ const make = Effect.gen(function* () { const sessionModelSwitch = activeSession === undefined ? "in-session" - : (yield* providerService.getCapabilities(activeSession.provider)).sessionModelSwitch; + : activeSession.providerInstanceId === undefined + ? yield* new ProviderAdapterRequestError({ + provider: providerErrorLabel(activeSession.provider), + method: "thread.turn.start", + detail: `Active provider session '${activeSession.threadId}' is missing a provider instance id.`, + }) + : (yield* providerService.getCapabilities(activeSession.providerInstanceId)) + .sessionModelSwitch; const requestedModelSelection = - input.modelSelection ?? - Option.getOrUndefined(yield* getThreadModelSelection(input.threadId)) ?? - thread.modelSelection; + input.modelSelection ?? threadModelSelections.get(input.threadId) ?? thread.modelSelection; const modelForTurn = sessionModelSwitch === "unsupported" && input.modelSelection === undefined ? activeSession?.model !== undefined @@ -500,7 +587,7 @@ const make = Effect.gen(function* () { const targetBranch = buildGeneratedWorktreeBranchName(generated.branch); if (targetBranch === oldBranch) return; - const renamed = yield* git.renameBranch({ cwd, oldBranch, newBranch: targetBranch }); + const renamed = yield* gitWorkflow.renameBranch({ cwd, oldBranch, newBranch: targetBranch }); yield* orchestrationEngine.dispatch({ type: "thread.meta.update", commandId: serverCommandId("worktree-branch-rename"), @@ -508,7 +595,7 @@ const make = Effect.gen(function* () { branch: renamed.branch, worktreePath: cwd, }); - yield* gitStatusBroadcaster.refreshStatus(cwd).pipe(Effect.ignoreCause({ log: true })); + yield* vcsStatusBroadcaster.refreshStatus(cwd).pipe(Effect.ignoreCause({ log: true })); }).pipe( Effect.catchCause((cause) => Effect.logWarning("provider command reactor failed to generate or rename worktree branch", { @@ -815,6 +902,9 @@ const make = Effect.gen(function* () { threadId: thread.id, status: "stopped", providerName: thread.session?.providerName ?? null, + ...(thread.session?.providerInstanceId !== undefined + ? { providerInstanceId: thread.session.providerInstanceId } + : {}), runtimeMode: thread.session?.runtimeMode ?? DEFAULT_RUNTIME_MODE, activeTurnId: null, lastError: thread.session?.lastError ?? null, @@ -841,9 +931,7 @@ const make = Effect.gen(function* () { if (!thread?.session || thread.session.status === "stopped") { return; } - const cachedModelSelection = Option.getOrUndefined( - yield* getThreadModelSelection(event.payload.threadId), - ); + const cachedModelSelection = threadModelSelections.get(event.payload.threadId); yield* ensureSessionForThread( event.payload.threadId, event.occurredAt, diff --git a/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.test.ts b/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.test.ts index 6352428dac0..487d1a3aac7 100644 --- a/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.test.ts +++ b/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.test.ts @@ -2,10 +2,12 @@ import fs from "node:fs"; import os from "node:os"; import path from "node:path"; -import type { +import { OrchestrationReadModel, + ProviderDriverKind, ProviderRuntimeEvent, ProviderSession, + ProviderInstanceId, } from "@t3tools/contracts"; import { ApprovalRequestId, @@ -29,7 +31,6 @@ import { ProviderService, type ProviderServiceShape, } from "../../provider/Services/ProviderService.ts"; -import { getProviderCapabilities } from "../../provider/Services/ProviderAdapter.ts"; import { RepositoryIdentityResolverLive } from "../../project/Layers/RepositoryIdentityResolver.ts"; import { OrchestrationEngineLive } from "./OrchestrationEngine.ts"; import { OrchestrationProjectionPipelineLive } from "./ProjectionPipeline.ts"; @@ -98,7 +99,20 @@ function createProviderServiceHarness() { respondToUserInput: () => unsupported(), stopSession: () => unsupported(), listSessions: () => Effect.succeed([...runtimeSessions]), - getCapabilities: () => Effect.succeed(getProviderCapabilities("codex")), + getCapabilities: () => Effect.succeed({ sessionModelSwitch: "in-session" }), + getInstanceInfo: (instanceId) => { + const driverKind = ProviderDriverKind.make(String(instanceId)); + return Effect.succeed({ + instanceId, + driverKind, + displayName: undefined, + enabled: true, + continuationIdentity: { + driverKind, + continuationKey: `${driverKind}:instance:${instanceId}`, + }, + }); + }, rollbackConversation: () => unsupported(), get streamEvents() { return Stream.fromPubSub(runtimeEventPubSub); @@ -143,7 +157,7 @@ function createProviderServiceHarness() { async function waitForThread( engine: OrchestrationEngineShape, predicate: (thread: ProviderRuntimeTestThread) => boolean, - timeoutMs = 5000, + timeoutMs = 2000, threadId: ThreadId = asThreadId("thread-1"), ) { const deadline = Date.now() + timeoutMs; @@ -233,7 +247,7 @@ describe("ProviderRuntimeIngestion", () => { title: "Provider Project", workspaceRoot, defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, createdAt, @@ -247,7 +261,7 @@ describe("ProviderRuntimeIngestion", () => { projectId: asProjectId("project-1"), title: "Thread", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -275,7 +289,7 @@ describe("ProviderRuntimeIngestion", () => { }), ); provider.setSession({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), status: "ready", runtimeMode: "approval-required", threadId: ThreadId.make("thread-1"), @@ -298,7 +312,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), createdAt: now, turnId: asTurnId("turn-1"), @@ -312,7 +326,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.completed", eventId: asEventId("evt-turn-completed"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), createdAt: new Date().toISOString(), turnId: asTurnId("turn-1"), @@ -340,7 +354,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "session.state.changed", eventId: asEventId("evt-session-state-waiting"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), createdAt: waitingAt, payload: { @@ -359,7 +373,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "session.state.changed", eventId: asEventId("evt-session-state-error"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), createdAt: new Date().toISOString(), payload: { @@ -381,7 +395,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "session.state.changed", eventId: asEventId("evt-session-state-stopped"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), createdAt: new Date().toISOString(), payload: { @@ -402,7 +416,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "session.state.changed", eventId: asEventId("evt-session-state-ready"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), createdAt: new Date().toISOString(), payload: { @@ -428,7 +442,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started-midturn-lifecycle"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-midturn-lifecycle"), @@ -444,14 +458,14 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "thread.started", eventId: asEventId("evt-thread-started-midturn-lifecycle"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: asThreadId("thread-1"), }); harness.emit({ type: "session.started", eventId: asEventId("evt-session-started-midturn-lifecycle"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: asThreadId("thread-1"), }); @@ -465,7 +479,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.completed", eventId: asEventId("evt-turn-completed-midturn-lifecycle"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: asThreadId("thread-1"), turnId: asTurnId("turn-midturn-lifecycle"), @@ -503,7 +517,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started-claude-placeholder"), - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), createdAt: new Date().toISOString(), threadId: asThreadId("thread-1"), turnId: asTurnId("turn-claude-placeholder"), @@ -519,7 +533,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.completed", eventId: asEventId("evt-turn-completed-claude-placeholder"), - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), createdAt: new Date().toISOString(), threadId: asThreadId("thread-1"), turnId: asTurnId("turn-claude-placeholder"), @@ -536,31 +550,10 @@ describe("ProviderRuntimeIngestion", () => { const harness = await createHarness(); const now = new Date().toISOString(); - // Seed thread-2 so the auxiliary completion targets a real but different thread - harness.emit({ - type: "turn.started", - eventId: asEventId("evt-turn-started-thread2-seed"), - provider: "codex", - createdAt: now, - threadId: asThreadId("thread-2"), - turnId: asTurnId("turn-thread2-seed"), - }); - - harness.emit({ - type: "turn.completed", - eventId: asEventId("evt-turn-completed-thread2-seed"), - provider: "codex", - createdAt: now, - threadId: asThreadId("thread-2"), - turnId: asTurnId("turn-thread2-seed"), - status: "completed", - }); - - // Start primary turn on thread-1 harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started-primary"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-primary"), @@ -572,13 +565,12 @@ describe("ProviderRuntimeIngestion", () => { thread.session?.status === "running" && thread.session?.activeTurnId === "turn-primary", ); - // Emit auxiliary turn.completed on thread-2 — should not affect thread-1 harness.emit({ type: "turn.completed", eventId: asEventId("evt-turn-completed-aux"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), - threadId: asThreadId("thread-2"), + threadId: asThreadId("thread-1"), turnId: asTurnId("turn-aux"), status: "completed", }); @@ -592,7 +584,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.completed", eventId: asEventId("evt-turn-completed-primary"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: asThreadId("thread-1"), turnId: asTurnId("turn-primary"), @@ -612,7 +604,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started-guarded"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-guarded-main"), @@ -628,7 +620,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.completed", eventId: asEventId("evt-turn-completed-guarded-other"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: asThreadId("thread-1"), turnId: asTurnId("turn-guarded-other"), @@ -644,7 +636,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.completed", eventId: asEventId("evt-turn-completed-guarded-main"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: asThreadId("thread-1"), turnId: asTurnId("turn-guarded-main"), @@ -664,7 +656,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "content.delta", eventId: asEventId("evt-message-delta-1"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-2"), @@ -677,7 +669,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "content.delta", eventId: asEventId("evt-message-delta-2"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-2"), @@ -690,7 +682,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "item.completed", eventId: asEventId("evt-message-completed"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-2"), @@ -721,7 +713,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "item.completed", eventId: asEventId("evt-assistant-item-completed-no-delta"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-no-delta"), @@ -753,7 +745,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "item.completed", eventId: asEventId("evt-tool-completed-with-data"), - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-tool-completed"), @@ -809,7 +801,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "item.completed", eventId: asEventId("evt-command-completed"), - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-command-completed"), @@ -851,7 +843,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "item.completed", eventId: asEventId("evt-read-path-completed"), - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-read-path"), @@ -893,7 +885,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.proposed.completed", eventId: asEventId("evt-plan-item-completed"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-plan-final"), @@ -932,7 +924,7 @@ describe("ProviderRuntimeIngestion", () => { projectId: asProjectId("project-1"), title: "Plan Source", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: "plan", @@ -967,7 +959,7 @@ describe("ProviderRuntimeIngestion", () => { projectId: asProjectId("project-1"), title: "Plan Target", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -995,7 +987,7 @@ describe("ProviderRuntimeIngestion", () => { }), ); harness.setProviderSession({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), status: "ready", runtimeMode: "approval-required", threadId: targetThreadId, @@ -1007,7 +999,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.proposed.completed", eventId: asEventId("evt-plan-source-completed"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt, threadId: sourceThreadId, turnId: sourceTurnId, @@ -1077,7 +1069,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.started", eventId: asEventId("evt-plan-target-started"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: targetThreadId, turnId: targetTurnId, @@ -1119,7 +1111,7 @@ describe("ProviderRuntimeIngestion", () => { projectId: asProjectId("project-1"), title: "Plan Source", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: "plan", @@ -1147,7 +1139,7 @@ describe("ProviderRuntimeIngestion", () => { }), ); harness.setProviderSession({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), status: "running", runtimeMode: "approval-required", threadId: targetThreadId, @@ -1159,7 +1151,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started-already-running"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt, threadId: targetThreadId, turnId: activeTurnId, @@ -1176,7 +1168,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.proposed.completed", eventId: asEventId("evt-plan-source-completed-guarded"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt, threadId: sourceThreadId, turnId: sourceTurnId, @@ -1229,7 +1221,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started-stale-plan-implementation"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: targetThreadId, turnId: staleTurnId, @@ -1272,7 +1264,7 @@ describe("ProviderRuntimeIngestion", () => { projectId: asProjectId("project-1"), title: "Plan Source", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: "plan", @@ -1307,7 +1299,7 @@ describe("ProviderRuntimeIngestion", () => { projectId: asProjectId("project-1"), title: "Plan Target", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -1338,7 +1330,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.proposed.completed", eventId: asEventId("evt-plan-source-completed-unrelated"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt, threadId: sourceThreadId, turnId: sourceTurnId, @@ -1389,7 +1381,7 @@ describe("ProviderRuntimeIngestion", () => { ); harness.setProviderSession({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), status: "running", runtimeMode: "approval-required", threadId: targetThreadId, @@ -1401,7 +1393,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started-unrelated-plan-implementation"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: targetThreadId, turnId: replayedTurnId, @@ -1428,7 +1420,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started-plan-buffer"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-plan-buffer"), @@ -1443,7 +1435,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.proposed.delta", eventId: asEventId("evt-plan-delta-1"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-plan-buffer"), @@ -1454,7 +1446,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.proposed.delta", eventId: asEventId("evt-plan-delta-2"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-plan-buffer"), @@ -1465,7 +1457,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.completed", eventId: asEventId("evt-turn-completed-plan-buffer"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-plan-buffer"), @@ -1494,7 +1486,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started-buffered"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffered"), @@ -1508,7 +1500,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "content.delta", eventId: asEventId("evt-message-delta-buffered"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffered"), @@ -1531,7 +1523,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "item.completed", eventId: asEventId("evt-message-completed-buffered"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffered"), @@ -1555,15 +1547,14 @@ describe("ProviderRuntimeIngestion", () => { expect(message?.streaming).toBe(false); }); - // TODO(upstream-sync): re-enable once assistant segmentation reconciled - it.skip("flushes and completes buffered assistant text when an approval request opens", async () => { + it("flushes and completes buffered assistant text when an approval request opens", async () => { const harness = await createHarness(); const now = new Date().toISOString(); harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started-buffered-request-flush"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffered-request-flush"), @@ -1578,7 +1569,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "content.delta", eventId: asEventId("evt-message-delta-buffered-request-flush"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffered-request-flush"), @@ -1591,7 +1582,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "request.opened", eventId: asEventId("evt-request-opened-buffered-request-flush"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffered-request-flush"), @@ -1616,15 +1607,14 @@ describe("ProviderRuntimeIngestion", () => { expect(message?.streaming).toBe(false); }); - // TODO(upstream-sync): re-enable once assistant segmentation reconciled - it.skip("flushes and completes buffered assistant text when user input is requested", async () => { + it("flushes and completes buffered assistant text when user input is requested", async () => { const harness = await createHarness(); const now = new Date().toISOString(); harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started-buffered-user-input-flush"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffered-user-input-flush"), @@ -1639,7 +1629,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "content.delta", eventId: asEventId("evt-message-delta-buffered-user-input-flush"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffered-user-input-flush"), @@ -1652,7 +1642,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "user-input.requested", eventId: asEventId("evt-user-input-requested-buffered-user-input-flush"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffered-user-input-flush"), @@ -1692,7 +1682,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started-buffered-whitespace-request"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: startedAt, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffered-whitespace-request"), @@ -1707,7 +1697,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "content.delta", eventId: asEventId("evt-message-delta-buffered-whitespace-request"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: startedAt, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffered-whitespace-request"), @@ -1720,7 +1710,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "request.opened", eventId: asEventId("evt-request-opened-buffered-whitespace-request"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: pausedAt, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffered-whitespace-request"), @@ -1744,8 +1734,7 @@ describe("ProviderRuntimeIngestion", () => { ).toBe(false); }); - // TODO(upstream-sync): re-enable once assistant segmentation reconciled - it.skip("starts a new buffered assistant message segment after approval and completes without duplication", async () => { + it("starts a new buffered assistant message segment after approval and completes without duplication", async () => { const harness = await createHarness(); const startedAt = "2026-03-28T06:07:00.000Z"; const pausedAt = "2026-03-28T06:07:01.000Z"; @@ -1755,7 +1744,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started-buffered-request-append"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: startedAt, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffered-request-append"), @@ -1770,7 +1759,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "content.delta", eventId: asEventId("evt-message-delta-buffered-request-append-initial"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: startedAt, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffered-request-append"), @@ -1783,7 +1772,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "request.opened", eventId: asEventId("evt-request-opened-buffered-request-append"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: pausedAt, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffered-request-append"), @@ -1806,7 +1795,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "content.delta", eventId: asEventId("evt-message-delta-buffered-request-append-followup"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: resumedAt, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffered-request-append"), @@ -1819,7 +1808,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "item.completed", eventId: asEventId("evt-message-completed-buffered-request-append"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: completedAt, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffered-request-append"), @@ -1877,8 +1866,7 @@ describe("ProviderRuntimeIngestion", () => { expect(assistantEvents[3]?.payload.text).toBe(""); }); - // TODO(upstream-sync): re-enable once assistant segmentation reconciled - it.skip("starts a new streaming assistant message segment after approval", async () => { + it("starts a new streaming assistant message segment after approval", async () => { const harness = await createHarness({ serverSettings: { enableAssistantStreaming: true } }); const startedAt = "2026-03-28T07:00:00.000Z"; const pausedAt = "2026-03-28T07:00:01.000Z"; @@ -1888,7 +1876,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started-streaming-request-segment"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: startedAt, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-streaming-request-segment"), @@ -1903,7 +1891,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "content.delta", eventId: asEventId("evt-message-delta-streaming-request-segment-initial"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: startedAt, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-streaming-request-segment"), @@ -1916,7 +1904,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "request.opened", eventId: asEventId("evt-request-opened-streaming-request-segment"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: pausedAt, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-streaming-request-segment"), @@ -1939,7 +1927,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "content.delta", eventId: asEventId("evt-message-delta-streaming-request-segment-followup"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: resumedAt, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-streaming-request-segment"), @@ -1952,7 +1940,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "item.completed", eventId: asEventId("evt-message-completed-streaming-request-segment"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: completedAt, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-streaming-request-segment"), @@ -2010,7 +1998,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started-streaming-mode"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-streaming-mode"), @@ -2025,7 +2013,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "content.delta", eventId: asEventId("evt-message-delta-streaming-mode"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-streaming-mode"), @@ -2052,7 +2040,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "item.completed", eventId: asEventId("evt-message-completed-streaming-mode"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-streaming-mode"), @@ -2077,255 +2065,6 @@ describe("ProviderRuntimeIngestion", () => { expect(finalMessage?.streaming).toBe(false); }); - it("completes streaming assistant messages even when read model lookup lags completion", async () => { - const harness = await createHarness({ serverSettings: { enableAssistantStreaming: true } }); - const now = new Date().toISOString(); - - await Effect.runPromise( - harness.engine.dispatch({ - type: "thread.turn.start", - commandId: CommandId.make("cmd-turn-start-streaming-lag"), - threadId: ThreadId.make("thread-1"), - message: { - messageId: asMessageId("message-streaming-lag"), - role: "user", - text: "stream with lag", - attachments: [], - }, - interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, - runtimeMode: "approval-required", - createdAt: now, - }), - ); - await Effect.runPromise(Effect.sleep("30 millis")); - - harness.emit({ - type: "turn.started", - eventId: asEventId("evt-turn-started-streaming-lag"), - provider: "codex", - createdAt: now, - threadId: asThreadId("thread-1"), - turnId: asTurnId("turn-streaming-lag"), - }); - await waitForThread( - harness.engine, - (thread) => - thread.session?.status === "running" && - thread.session?.activeTurnId === "turn-streaming-lag", - ); - - harness.emit({ - type: "content.delta", - eventId: asEventId("evt-message-delta-streaming-lag"), - provider: "codex", - createdAt: now, - threadId: asThreadId("thread-1"), - turnId: asTurnId("turn-streaming-lag"), - itemId: asItemId("item-streaming-lag"), - payload: { - streamKind: "assistant_text", - delta: "hello lagged", - }, - }); - - const liveThread = await waitForThread(harness.engine, (entry) => - entry.messages.some( - (message: ProviderRuntimeTestMessage) => - message.id === "assistant:item-streaming-lag" && - message.streaming && - message.text === "hello lagged", - ), - ); - const liveMessage = liveThread.messages.find( - (entry: ProviderRuntimeTestMessage) => entry.id === "assistant:item-streaming-lag", - ); - expect(liveMessage?.streaming).toBe(true); - - harness.emit({ - type: "item.completed", - eventId: asEventId("evt-message-completed-streaming-lag"), - provider: "codex", - createdAt: now, - threadId: asThreadId("thread-1"), - turnId: asTurnId("turn-streaming-lag"), - itemId: asItemId("item-streaming-lag"), - payload: { - itemType: "assistant_message", - status: "completed", - }, - }); - - const finalThread = await waitForThread(harness.engine, (entry) => - entry.messages.some( - (message: ProviderRuntimeTestMessage) => - message.id === "assistant:item-streaming-lag" && !message.streaming, - ), - ); - const finalMessage = finalThread.messages.find( - (entry: ProviderRuntimeTestMessage) => entry.id === "assistant:item-streaming-lag", - ); - expect(finalMessage?.text).toBe("hello lagged"); - expect(finalMessage?.streaming).toBe(false); - }); - - it.skip("splits streaming assistant text into separate messages around tool activity", async () => { - const harness = await createHarness({ serverSettings: { enableAssistantStreaming: true } }); - const turnStartedAt = "2026-03-09T10:00:00.000Z"; - const beforeToolAt = "2026-03-09T10:00:01.000Z"; - const toolAt = "2026-03-09T10:00:02.000Z"; - const afterToolAt = "2026-03-09T10:00:03.000Z"; - const completedAt = "2026-03-09T10:00:04.000Z"; - - await Effect.runPromise( - harness.engine.dispatch({ - type: "thread.turn.start", - commandId: CommandId.make("cmd-turn-start-interleaved-streaming"), - threadId: ThreadId.make("thread-1"), - message: { - messageId: asMessageId("message-interleaved-streaming"), - role: "user", - text: "show interleaving", - attachments: [], - }, - interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, - runtimeMode: "approval-required", - createdAt: turnStartedAt, - }), - ); - await Effect.runPromise(Effect.sleep("30 millis")); - - harness.emit({ - type: "turn.started", - eventId: asEventId("evt-turn-started-interleaved-streaming"), - provider: "codex", - createdAt: turnStartedAt, - threadId: asThreadId("thread-1"), - turnId: asTurnId("turn-interleaved-streaming"), - }); - await waitForThread( - harness.engine, - (thread) => - thread.session?.status === "running" && - thread.session?.activeTurnId === "turn-interleaved-streaming", - ); - - harness.emit({ - type: "content.delta", - eventId: asEventId("evt-message-delta-before-tool"), - provider: "codex", - createdAt: beforeToolAt, - threadId: asThreadId("thread-1"), - turnId: asTurnId("turn-interleaved-streaming"), - itemId: asItemId("item-interleaved-streaming"), - payload: { - streamKind: "assistant_text", - delta: "Before tool.", - }, - }); - await waitForThread(harness.engine, (thread) => - thread.messages.some( - (message: ProviderRuntimeTestMessage) => - message.id === "assistant:item-interleaved-streaming" && - message.streaming && - message.text === "Before tool.", - ), - ); - - harness.emit({ - type: "item.updated", - eventId: asEventId("evt-tool-updated-interleaved-streaming"), - provider: "codex", - createdAt: toolAt, - threadId: asThreadId("thread-1"), - turnId: asTurnId("turn-interleaved-streaming"), - itemId: asItemId("tool-interleaved-streaming"), - payload: { - itemType: "command_execution", - status: "in_progress", - title: "Run command", - detail: "pwd", - }, - }); - await waitForThread( - harness.engine, - (thread) => - thread.messages.some( - (message: ProviderRuntimeTestMessage) => - message.id === "assistant:item-interleaved-streaming" && - !message.streaming && - message.text === "Before tool.", - ) && - thread.activities.some( - (activity: ProviderRuntimeTestActivity) => - activity.id === "evt-tool-updated-interleaved-streaming" && - activity.kind === "tool.updated", - ), - ); - - harness.emit({ - type: "content.delta", - eventId: asEventId("evt-message-delta-after-tool"), - provider: "codex", - createdAt: afterToolAt, - threadId: asThreadId("thread-1"), - turnId: asTurnId("turn-interleaved-streaming"), - itemId: asItemId("item-interleaved-streaming"), - payload: { - streamKind: "assistant_text", - delta: "After tool.", - }, - }); - await waitForThread(harness.engine, (thread) => - thread.messages.some( - (message: ProviderRuntimeTestMessage) => - message.id === "assistant:item-interleaved-streaming:segment:1" && - message.streaming && - message.text === "After tool.", - ), - ); - - harness.emit({ - type: "item.completed", - eventId: asEventId("evt-message-completed-interleaved-streaming"), - provider: "codex", - createdAt: completedAt, - threadId: asThreadId("thread-1"), - turnId: asTurnId("turn-interleaved-streaming"), - itemId: asItemId("item-interleaved-streaming"), - payload: { - itemType: "assistant_message", - status: "completed", - }, - }); - - const thread = await waitForThread(harness.engine, (entry) => - entry.messages.some( - (message: ProviderRuntimeTestMessage) => - message.id === "assistant:item-interleaved-streaming:segment:1" && !message.streaming, - ), - ); - expect( - thread.messages.map((message: ProviderRuntimeTestMessage) => ({ - id: message.id, - text: message.text, - streaming: message.streaming, - })), - ).toEqual( - expect.arrayContaining([ - { - id: "assistant:item-interleaved-streaming", - text: "Before tool.", - streaming: false, - }, - { - id: "assistant:item-interleaved-streaming:segment:1", - text: "After tool.", - streaming: false, - }, - ]), - ); - }); - it("spills oversized buffered deltas and still finalizes full assistant text", async () => { const harness = await createHarness(); const now = new Date().toISOString(); @@ -2334,7 +2073,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started-buffer-spill"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffer-spill"), @@ -2349,7 +2088,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "content.delta", eventId: asEventId("evt-message-delta-buffer-spill"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffer-spill"), @@ -2362,7 +2101,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "item.completed", eventId: asEventId("evt-message-completed-buffer-spill"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-buffer-spill"), @@ -2394,7 +2133,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.started", eventId: asEventId("evt-turn-started-for-complete-dedup"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-complete-dedup"), @@ -2410,7 +2149,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "content.delta", eventId: asEventId("evt-message-delta-for-complete-dedup"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-complete-dedup"), @@ -2423,7 +2162,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "item.completed", eventId: asEventId("evt-message-completed-for-complete-dedup"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-complete-dedup"), @@ -2436,7 +2175,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.completed", eventId: asEventId("evt-turn-completed-for-complete-dedup"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-complete-dedup"), @@ -2480,7 +2219,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "request.opened", eventId: asEventId("evt-request-opened"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), requestId: ApprovalRequestId.make("req-open"), @@ -2493,7 +2232,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "request.resolved", eventId: asEventId("evt-request-resolved"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), requestId: ApprovalRequestId.make("req-open"), @@ -2546,7 +2285,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "runtime.error", eventId: asEventId("evt-runtime-error"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-3"), @@ -2573,7 +2312,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "runtime.error", eventId: asEventId("evt-runtime-error-activity"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-runtime-error-activity"), @@ -2604,7 +2343,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.started", eventId: asEventId("evt-warning-turn-started"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-warning"), @@ -2614,7 +2353,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "runtime.warning", eventId: asEventId("evt-warning-runtime"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-warning"), @@ -2648,7 +2387,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "session.started", eventId: asEventId("evt-session-started"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), message: "session started", @@ -2656,14 +2395,14 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "thread.started", eventId: asEventId("evt-thread-started"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), }); harness.emit({ type: "item.started", eventId: asEventId("evt-tool-started"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-9"), @@ -2700,7 +2439,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "thread.metadata.updated", eventId: asEventId("evt-thread-metadata-updated"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), payload: { @@ -2712,7 +2451,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.plan.updated", eventId: asEventId("evt-turn-plan-updated"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-p1"), @@ -2728,7 +2467,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "item.updated", eventId: asEventId("evt-item-updated"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-p1"), @@ -2745,7 +2484,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "runtime.warning", eventId: asEventId("evt-runtime-warning"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-p1"), @@ -2758,7 +2497,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.diff.updated", eventId: asEventId("evt-turn-diff-updated"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-p1"), @@ -2834,7 +2573,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "thread.token-usage.updated", eventId: asEventId("evt-thread-token-usage-updated"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), payload: { @@ -2886,7 +2625,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "thread.token-usage.updated", eventId: asEventId("evt-thread-token-usage-updated-camel"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), payload: { @@ -2939,7 +2678,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "thread.token-usage.updated", eventId: asEventId("evt-thread-token-usage-updated-claude-window"), - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), createdAt: now, threadId: asThreadId("thread-1"), payload: { @@ -2983,7 +2722,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "thread.state.changed", eventId: asEventId("evt-thread-compacted"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-1"), @@ -3013,7 +2752,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "task.started", eventId: asEventId("evt-task-started"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-task-1"), @@ -3026,7 +2765,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "task.progress", eventId: asEventId("evt-task-progress"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-task-1"), @@ -3040,7 +2779,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "task.completed", eventId: asEventId("evt-task-completed"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-task-1"), @@ -3053,7 +2792,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "turn.proposed.completed", eventId: asEventId("evt-task-proposed-plan-completed"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-task-1"), @@ -3116,7 +2855,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "user-input.requested", eventId: asEventId("evt-user-input-requested"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-user-input"), @@ -3141,7 +2880,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "user-input.resolved", eventId: asEventId("evt-user-input-resolved"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: asThreadId("thread-1"), turnId: asTurnId("turn-user-input"), @@ -3189,7 +2928,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "content.delta", eventId: asEventId("evt-invalid-delta"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: now, threadId: asThreadId("thread-1"), turnId: asTurnId("turn-invalid"), @@ -3203,7 +2942,7 @@ describe("ProviderRuntimeIngestion", () => { harness.emit({ type: "runtime.error", eventId: asEventId("evt-runtime-error-after-failure"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: asThreadId("thread-1"), turnId: asTurnId("turn-after-failure"), diff --git a/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.ts b/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.ts index 7d98bbe0380..b7a4c195a5b 100644 --- a/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.ts +++ b/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.ts @@ -32,6 +32,12 @@ const providerTurnKey = (threadId: ThreadId, turnId: TurnId) => `${threadId}:${t const providerCommandId = (event: ProviderRuntimeEvent, tag: string): CommandId => CommandId.make(`provider:${event.eventId}:${tag}:${crypto.randomUUID()}`); +interface AssistantSegmentState { + baseKey: string; + nextSegmentIndex: number; + activeMessageId: MessageId | null; +} + const TURN_MESSAGE_IDS_BY_TURN_CACHE_CAPACITY = 10_000; const TURN_MESSAGE_IDS_BY_TURN_TTL = Duration.minutes(120); const BUFFERED_MESSAGE_TEXT_BY_MESSAGE_ID_CACHE_CAPACITY = 20_000; @@ -56,18 +62,6 @@ type RuntimeIngestionInput = event: TurnStartRequestedDomainEvent; }; -type AssistantSegmentState = { - baseMessageId: MessageId; - currentSegmentIndex: number | null; - nextSegmentIndex: number; -}; - -const assistantSegmentStateKey = (threadId: ThreadId, baseMessageId: MessageId) => - `${threadId}:${baseMessageId}`; - -const assistantSegmentMessageId = (baseMessageId: MessageId, segmentIndex: number): MessageId => - segmentIndex === 0 ? baseMessageId : MessageId.make(`${baseMessageId}:segment:${segmentIndex}`); - function toTurnId(value: TurnId | string | undefined): TurnId | undefined { return value === undefined ? undefined : TurnId.make(String(value)); } @@ -95,6 +89,10 @@ function normalizeProposedPlanMarkdown(planMarkdown: string | undefined): string return trimmed; } +function hasRenderableAssistantText(text: string | undefined): boolean { + return (text?.trim().length ?? 0) > 0; +} + function proposedPlanIdForTurn(threadId: ThreadId, turnId: TurnId): string { return `plan:${threadId}:turn:${turnId}`; } @@ -110,10 +108,15 @@ function proposedPlanIdFromEvent(event: ProviderRuntimeEvent, threadId: ThreadId return `plan:${threadId}:event:${event.eventId}`; } -function asString(value: unknown): string | undefined { - return typeof value === "string" ? value : undefined; +function assistantSegmentBaseKeyFromEvent(event: ProviderRuntimeEvent): string { + return String(event.itemId ?? event.turnId ?? event.eventId); } +function assistantSegmentMessageId(baseKey: string, segmentIndex: number): MessageId { + return MessageId.make( + segmentIndex === 0 ? `assistant:${baseKey}` : `assistant:${baseKey}:segment:${segmentIndex}`, + ); +} function buildContextWindowActivityPayload( event: ProviderRuntimeEvent, ): ThreadTokenUsageSnapshot | undefined { @@ -123,14 +126,6 @@ function buildContextWindowActivityPayload( return event.payload.usage; } -function runtimePayloadRecord(event: ProviderRuntimeEvent): Record | undefined { - const payload = (event as { payload?: unknown }).payload; - if (!payload || typeof payload !== "object") { - return undefined; - } - return payload as Record; -} - function normalizeRuntimeTurnState( value: string | undefined, ): "completed" | "failed" | "interrupted" | "cancelled" { @@ -145,38 +140,6 @@ function normalizeRuntimeTurnState( } } -function runtimeTurnState( - event: ProviderRuntimeEvent, -): "completed" | "failed" | "interrupted" | "cancelled" { - const payloadState = asString(runtimePayloadRecord(event)?.state); - return normalizeRuntimeTurnState(payloadState); -} - -function runtimeTurnErrorMessage(event: ProviderRuntimeEvent): string | undefined { - const payloadErrorMessage = asString(runtimePayloadRecord(event)?.errorMessage); - return payloadErrorMessage; -} - -function runtimeErrorMessageFromEvent(event: ProviderRuntimeEvent): string | undefined { - const payloadMessage = asString(runtimePayloadRecord(event)?.message); - return payloadMessage; -} - -function runtimeErrorClassLabel(errorClass: string): string | undefined { - switch (errorClass) { - case "provider_error": - return "Provider error"; - case "transport_error": - return "Connection error"; - case "permission_error": - return "Permission error"; - case "validation_error": - return "Validation error"; - default: - return undefined; - } -} - function orchestrationSessionStatusFromRuntimeState( state: "starting" | "running" | "waiting" | "ready" | "interrupted" | "stopped" | "error", ): "starting" | "running" | "ready" | "interrupted" | "stopped" | "error" { @@ -280,23 +243,15 @@ function runtimeEventToActivities( } case "runtime.error": { - const message = runtimeErrorMessageFromEvent(event); - if (!message) { - return []; - } - const errorClass = runtimePayloadRecord(event)?.class; - const errorClassLabel = - typeof errorClass === "string" ? runtimeErrorClassLabel(errorClass) : undefined; return [ { id: event.eventId, createdAt: event.createdAt, tone: "error", kind: "runtime.error", - summary: errorClassLabel ?? "Runtime error", + summary: "Runtime error", payload: { - message: truncateDetail(message), - detail: truncateDetail(message), + message: truncateDetail(event.payload.message), }, turnId: toTurnId(event.turnId) ?? null, ...maybeSequence, @@ -565,7 +520,7 @@ function runtimeEventToActivities( return []; } -const make = Effect.fn("make")(function* () { +const make = Effect.gen(function* () { const orchestrationEngine = yield* OrchestrationEngineService; const providerService = yield* ProviderService; const projectionTurnRepository = yield* ProjectionTurnRepository; @@ -577,19 +532,19 @@ const make = Effect.fn("make")(function* () { lookup: () => Effect.succeed(new Set()), }); - const bufferedAssistantTextByMessageId = yield* Cache.make< - MessageId, - { text: string; createdAt: string } - >({ + const bufferedAssistantTextByMessageId = yield* Cache.make({ capacity: BUFFERED_MESSAGE_TEXT_BY_MESSAGE_ID_CACHE_CAPACITY, timeToLive: BUFFERED_MESSAGE_TEXT_BY_MESSAGE_ID_TTL, - lookup: () => Effect.succeed({ text: "", createdAt: "" }), + lookup: () => Effect.succeed(""), }); - const assistantMessageSawDeltaByMessageId = yield* Cache.make({ - capacity: BUFFERED_MESSAGE_TEXT_BY_MESSAGE_ID_CACHE_CAPACITY, - timeToLive: BUFFERED_MESSAGE_TEXT_BY_MESSAGE_ID_TTL, - lookup: () => Effect.succeed(false), + const assistantSegmentStateByTurnKey = yield* Cache.make({ + capacity: TURN_MESSAGE_IDS_BY_TURN_CACHE_CAPACITY, + timeToLive: TURN_MESSAGE_IDS_BY_TURN_TTL, + lookup: () => + Effect.die( + new Error("assistant segment state should be read through getOption before initialization"), + ), }); const bufferedProposedPlanById = yield* Cache.make({ @@ -598,9 +553,6 @@ const make = Effect.fn("make")(function* () { lookup: () => Effect.succeed({ text: "", createdAt: "" }), }); - const assistantSegmentStateByKey = new Map(); - const assistantSegmentKeysByTurnKey = new Map>(); - const isGitRepoForThread = Effect.fn("isGitRepoForThread")(function* (threadId: ThreadId) { const readModel = yield* orchestrationEngine.getReadModel(); const thread = readModel.threads.find((entry) => entry.id === threadId); @@ -662,39 +614,107 @@ const make = Effect.fn("make")(function* () { const clearAssistantMessageIdsForTurn = (threadId: ThreadId, turnId: TurnId) => Cache.invalidate(turnMessageIdsByTurnKey, providerTurnKey(threadId, turnId)); - const appendBufferedAssistantText = (messageId: MessageId, delta: string, createdAt: string) => + const getAssistantSegmentStateForTurn = (threadId: ThreadId, turnId: TurnId) => + Cache.getOption(assistantSegmentStateByTurnKey, providerTurnKey(threadId, turnId)); + + const setAssistantSegmentStateForTurn = ( + threadId: ThreadId, + turnId: TurnId, + state: AssistantSegmentState, + ) => Cache.set(assistantSegmentStateByTurnKey, providerTurnKey(threadId, turnId), state); + + const clearAssistantSegmentStateForTurn = (threadId: ThreadId, turnId: TurnId) => + Cache.invalidate(assistantSegmentStateByTurnKey, providerTurnKey(threadId, turnId)); + + const getActiveAssistantMessageIdForTurn = (threadId: ThreadId, turnId: TurnId) => + getAssistantSegmentStateForTurn(threadId, turnId).pipe( + Effect.map((state) => + Option.flatMap(state, (entry) => + entry.activeMessageId ? Option.some(entry.activeMessageId) : Option.none(), + ), + ), + ); + + const startAssistantSegmentForTurn = (input: { + threadId: ThreadId; + turnId: TurnId; + baseKey: string; + }) => + getAssistantSegmentStateForTurn(input.threadId, input.turnId).pipe( + Effect.flatMap((existingState) => + Effect.gen(function* () { + const nextState = Option.match(existingState, { + onNone: () => ({ + baseKey: input.baseKey, + nextSegmentIndex: 1, + activeMessageId: assistantSegmentMessageId(input.baseKey, 0), + }), + onSome: (state) => { + const segmentIndex = state.baseKey === input.baseKey ? state.nextSegmentIndex : 0; + const messageId = assistantSegmentMessageId(input.baseKey, segmentIndex); + return { + baseKey: input.baseKey, + nextSegmentIndex: state.baseKey === input.baseKey ? state.nextSegmentIndex + 1 : 1, + activeMessageId: messageId, + } satisfies AssistantSegmentState; + }, + }); + yield* setAssistantSegmentStateForTurn(input.threadId, input.turnId, nextState); + return nextState.activeMessageId!; + }), + ), + ); + + const getOrCreateAssistantMessageId = (input: { + threadId: ThreadId; + event: ProviderRuntimeEvent; + turnId?: TurnId; + }) => + Effect.gen(function* () { + if (!input.turnId) { + return assistantSegmentMessageId(assistantSegmentBaseKeyFromEvent(input.event), 0); + } + + const activeMessageId = yield* getActiveAssistantMessageIdForTurn( + input.threadId, + input.turnId, + ); + if (Option.isSome(activeMessageId)) { + return activeMessageId.value; + } + + return yield* startAssistantSegmentForTurn({ + threadId: input.threadId, + turnId: input.turnId, + baseKey: assistantSegmentBaseKeyFromEvent(input.event), + }); + }); + + const appendBufferedAssistantText = (messageId: MessageId, delta: string) => Cache.getOption(bufferedAssistantTextByMessageId, messageId).pipe( - Effect.flatMap( - Effect.fn("appendBufferedAssistantText")(function* (existing) { - const prev = Option.getOrUndefined(existing); - const nextText = `${prev?.text ?? ""}${delta}`; - const nextCreatedAt = - prev?.createdAt && prev.createdAt.length > 0 ? prev.createdAt : createdAt; + Effect.flatMap((existingText) => + Effect.gen(function* () { + const nextText = Option.match(existingText, { + onNone: () => delta, + onSome: (text) => `${text}${delta}`, + }); if (nextText.length <= MAX_BUFFERED_ASSISTANT_CHARS) { - yield* Cache.set(bufferedAssistantTextByMessageId, messageId, { - text: nextText, - createdAt: nextCreatedAt, - }); - return { spillChunk: "", createdAt: nextCreatedAt }; + yield* Cache.set(bufferedAssistantTextByMessageId, messageId, nextText); + return ""; } // Safety valve: flush full buffered text as an assistant delta to cap memory. yield* Cache.invalidate(bufferedAssistantTextByMessageId, messageId); - return { spillChunk: nextText, createdAt: nextCreatedAt }; + return nextText; }), ), ); const takeBufferedAssistantText = (messageId: MessageId) => Cache.getOption(bufferedAssistantTextByMessageId, messageId).pipe( - Effect.flatMap((existing) => + Effect.flatMap((existingText) => Cache.invalidate(bufferedAssistantTextByMessageId, messageId).pipe( - Effect.as( - Option.match(existing, { - onNone: () => ({ text: "", createdAt: "" }), - onSome: (entry) => entry, - }), - ), + Effect.as(Option.getOrElse(existingText, () => "")), ), ), ); @@ -702,18 +722,6 @@ const make = Effect.fn("make")(function* () { const clearBufferedAssistantText = (messageId: MessageId) => Cache.invalidate(bufferedAssistantTextByMessageId, messageId); - const markAssistantMessageSawDelta = (messageId: MessageId) => - Cache.set(assistantMessageSawDeltaByMessageId, messageId, true); - - const takeAssistantMessageSawDelta = (messageId: MessageId) => - Cache.getOption(assistantMessageSawDeltaByMessageId, messageId).pipe( - Effect.flatMap((existing) => - Cache.invalidate(assistantMessageSawDeltaByMessageId, messageId).pipe( - Effect.as(Option.getOrElse(existing, () => false)), - ), - ), - ); - const appendBufferedProposedPlan = (planId: string, delta: string, createdAt: string) => Cache.getOption(bufferedProposedPlanById, planId).pipe( Effect.flatMap((existingEntry) => { @@ -738,169 +746,69 @@ const make = Effect.fn("make")(function* () { const clearBufferedProposedPlan = (planId: string) => Cache.invalidate(bufferedProposedPlanById, planId); - const rememberAssistantSegmentKeyForTurn = ( - threadId: ThreadId, - turnId: TurnId, - stateKey: string, - ): void => { - const turnKey = providerTurnKey(threadId, turnId); - const existing = assistantSegmentKeysByTurnKey.get(turnKey); - if (existing) { - existing.add(stateKey); - return; - } - assistantSegmentKeysByTurnKey.set(turnKey, new Set([stateKey])); - }; - - const clearAssistantSegmentsForTurn = (threadId: ThreadId, turnId: TurnId): void => { - const turnKey = providerTurnKey(threadId, turnId); - const stateKeys = assistantSegmentKeysByTurnKey.get(turnKey); - if (!stateKeys) { - return; - } - for (const stateKey of stateKeys) { - assistantSegmentStateByKey.delete(stateKey); - } - assistantSegmentKeysByTurnKey.delete(turnKey); - }; - - const clearAssistantSegment = (input: { - threadId: ThreadId; - baseMessageId: MessageId; - turnId?: TurnId; - }): void => { - const stateKey = assistantSegmentStateKey(input.threadId, input.baseMessageId); - assistantSegmentStateByKey.delete(stateKey); - if (!input.turnId) { - return; - } - const turnKey = providerTurnKey(input.threadId, input.turnId); - const stateKeys = assistantSegmentKeysByTurnKey.get(turnKey); - if (!stateKeys) { - return; - } - stateKeys.delete(stateKey); - if (stateKeys.size === 0) { - assistantSegmentKeysByTurnKey.delete(turnKey); - } - }; - - const clearAssistantSegmentsForThread = (threadId: ThreadId): void => { - const prefix = `${threadId}:`; - for (const key of assistantSegmentKeysByTurnKey.keys()) { - if (!key.startsWith(prefix)) { - continue; - } - const stateKeys = assistantSegmentKeysByTurnKey.get(key); - if (stateKeys) { - for (const stateKey of stateKeys) { - assistantSegmentStateByKey.delete(stateKey); - } - } - assistantSegmentKeysByTurnKey.delete(key); - } - }; + const clearAssistantMessageState = (messageId: MessageId) => + clearBufferedAssistantText(messageId); - const openAssistantSegment = (input: { + const flushBufferedAssistantMessage = (input: { + event: ProviderRuntimeEvent; threadId: ThreadId; - baseMessageId: MessageId; + messageId: MessageId; turnId?: TurnId; - }): MessageId => { - const stateKey = assistantSegmentStateKey(input.threadId, input.baseMessageId); - const existingState = assistantSegmentStateByKey.get(stateKey); - if (existingState && existingState.currentSegmentIndex !== null) { - if (input.turnId) { - rememberAssistantSegmentKeyForTurn(input.threadId, input.turnId, stateKey); + createdAt: string; + commandTag: string; + }) => + Effect.gen(function* () { + const bufferedText = yield* takeBufferedAssistantText(input.messageId); + if (!hasRenderableAssistantText(bufferedText)) { + return false; } - return assistantSegmentMessageId( - existingState.baseMessageId, - existingState.currentSegmentIndex, - ); - } - const segmentIndex = existingState?.nextSegmentIndex ?? 0; - assistantSegmentStateByKey.set(stateKey, { - baseMessageId: input.baseMessageId, - currentSegmentIndex: segmentIndex, - nextSegmentIndex: segmentIndex + 1, + yield* orchestrationEngine.dispatch({ + type: "thread.message.assistant.delta", + commandId: providerCommandId(input.event, input.commandTag), + threadId: input.threadId, + messageId: input.messageId, + delta: bufferedText, + ...(input.turnId ? { turnId: input.turnId } : {}), + createdAt: input.createdAt, + }); + return true; }); - if (input.turnId) { - rememberAssistantSegmentKeyForTurn(input.threadId, input.turnId, stateKey); - } - return assistantSegmentMessageId(input.baseMessageId, segmentIndex); - }; - const takeOpenAssistantSegmentMessageId = (input: { - threadId: ThreadId; - baseMessageId: MessageId; - }): { messageId: MessageId; hadAnySegment: boolean } | null => { - const stateKey = assistantSegmentStateKey(input.threadId, input.baseMessageId); - const state = assistantSegmentStateByKey.get(stateKey); - if (!state) { - return { messageId: input.baseMessageId, hadAnySegment: false }; - } - if (state.currentSegmentIndex === null) { - return state.nextSegmentIndex > 0 - ? null - : { messageId: input.baseMessageId, hadAnySegment: false }; - } - return { - messageId: assistantSegmentMessageId(state.baseMessageId, state.currentSegmentIndex), - hadAnySegment: state.nextSegmentIndex > 0, - }; - }; - - const closeOpenAssistantSegmentsForTurn = (input: { + const flushBufferedAssistantMessagesForTurn = (input: { event: ProviderRuntimeEvent; threadId: ThreadId; turnId: TurnId; createdAt: string; - existingAssistantMessageById: ReadonlyMap< - MessageId, - { readonly id: MessageId; readonly text: string; readonly streaming: boolean } - >; + commandTag: string; }) => Effect.gen(function* () { - const turnKey = providerTurnKey(input.threadId, input.turnId); - const stateKeys = Array.from(assistantSegmentKeysByTurnKey.get(turnKey) ?? []); + const assistantMessageIds = yield* getAssistantMessageIdsForTurn( + input.threadId, + input.turnId, + ); + const flushedMessageIds = new Set(); yield* Effect.forEach( - stateKeys, - (stateKey) => - Effect.gen(function* () { - const state = assistantSegmentStateByKey.get(stateKey); - if (!state || state.currentSegmentIndex === null) { - return; - } - const messageId = assistantSegmentMessageId( - state.baseMessageId, - state.currentSegmentIndex, - ); - assistantSegmentStateByKey.set(stateKey, { - ...state, - currentSegmentIndex: null, - }); - yield* finalizeAssistantMessage({ - event: input.event, - threadId: input.threadId, - messageId, - turnId: input.turnId, - createdAt: input.createdAt, - commandTag: "assistant-complete-tool-boundary", - finalDeltaCommandTag: "assistant-delta-tool-boundary", - existingMessage: input.existingAssistantMessageById.get(messageId), - }); - }), + assistantMessageIds, + (messageId) => + flushBufferedAssistantMessage({ + event: input.event, + threadId: input.threadId, + messageId, + turnId: input.turnId, + createdAt: input.createdAt, + commandTag: input.commandTag, + }).pipe( + Effect.tap((flushed) => + flushed ? Effect.sync(() => flushedMessageIds.add(messageId)) : Effect.void, + ), + ), { concurrency: 1 }, ).pipe(Effect.asVoid); + return flushedMessageIds; }); - const clearAssistantMessageState = (messageId: MessageId) => - Effect.all([ - clearBufferedAssistantText(messageId), - Cache.invalidate(assistantMessageSawDeltaByMessageId, messageId), - ]).pipe(Effect.asVoid); - - const finalizeAssistantMessage = Effect.fn("finalizeAssistantMessage")(function* (input: { + const finalizeAssistantMessage = (input: { event: ProviderRuntimeEvent; threadId: ThreadId; messageId: MessageId; @@ -909,65 +817,86 @@ const make = Effect.fn("make")(function* () { commandTag: string; finalDeltaCommandTag: string; fallbackText?: string; - existingMessage?: - | { - readonly id: MessageId; - readonly text: string; - readonly streaming: boolean; - } - | undefined; - }) { - if (input.existingMessage && !input.existingMessage.streaming) { - yield* clearAssistantMessageState(input.messageId); - return; - } - - const buffered = yield* takeBufferedAssistantText(input.messageId); - const bufferedText = buffered.text; - - const sawDelta = yield* takeAssistantMessageSawDelta(input.messageId); - const text = - bufferedText.length > 0 - ? bufferedText - : !sawDelta && (input.fallbackText?.trim().length ?? 0) > 0 - ? input.fallbackText! - : ""; + hasProjectedMessage?: boolean; + }) => + Effect.gen(function* () { + const bufferedText = yield* takeBufferedAssistantText(input.messageId); + const text = + bufferedText.length > 0 + ? bufferedText + : (input.fallbackText?.trim().length ?? 0) > 0 + ? input.fallbackText! + : ""; + const hasRenderableText = hasRenderableAssistantText(text); + + if (hasRenderableText) { + yield* orchestrationEngine.dispatch({ + type: "thread.message.assistant.delta", + commandId: providerCommandId(input.event, input.finalDeltaCommandTag), + threadId: input.threadId, + messageId: input.messageId, + delta: text, + ...(input.turnId ? { turnId: input.turnId } : {}), + createdAt: input.createdAt, + }); + } - if (text.length === 0 && !input.existingMessage) { + if (input.hasProjectedMessage || hasRenderableText) { + yield* orchestrationEngine.dispatch({ + type: "thread.message.assistant.complete", + commandId: providerCommandId(input.event, input.commandTag), + threadId: input.threadId, + messageId: input.messageId, + ...(input.turnId ? { turnId: input.turnId } : {}), + createdAt: input.createdAt, + }); + } yield* clearAssistantMessageState(input.messageId); - return; - } + }); - // Use the original timestamp from when the first delta arrived, not the - // finalization time. This ensures assistant text messages are positioned - // chronologically relative to tool activities in the timeline instead of - // all appearing at the end when the turn completes. - const deltaCreatedAt = buffered.createdAt.length > 0 ? buffered.createdAt : input.createdAt; + const finalizeActiveAssistantSegmentForTurn = (input: { + event: ProviderRuntimeEvent; + threadId: ThreadId; + turnId: TurnId; + createdAt: string; + commandTag: string; + finalDeltaCommandTag: string; + hasProjectedMessage: boolean; + flushedMessageIds?: ReadonlySet; + }) => + Effect.gen(function* () { + const activeMessageId = yield* getActiveAssistantMessageIdForTurn( + input.threadId, + input.turnId, + ); + if (Option.isNone(activeMessageId)) { + return; + } - if (text.length > 0) { - yield* orchestrationEngine.dispatch({ - type: "thread.message.assistant.delta", - commandId: providerCommandId(input.event, input.finalDeltaCommandTag), + yield* finalizeAssistantMessage({ + event: input.event, threadId: input.threadId, - messageId: input.messageId, - delta: text, - ...(input.turnId ? { turnId: input.turnId } : {}), - createdAt: deltaCreatedAt, + messageId: activeMessageId.value, + turnId: input.turnId, + createdAt: input.createdAt, + commandTag: input.commandTag, + finalDeltaCommandTag: input.finalDeltaCommandTag, + hasProjectedMessage: + input.hasProjectedMessage || + (input.flushedMessageIds?.has(activeMessageId.value) ?? false), }); - } + yield* forgetAssistantMessageId(input.threadId, input.turnId, activeMessageId.value); - yield* orchestrationEngine.dispatch({ - type: "thread.message.assistant.complete", - commandId: providerCommandId(input.event, input.commandTag), - threadId: input.threadId, - messageId: input.messageId, - ...(input.turnId ? { turnId: input.turnId } : {}), - createdAt: input.createdAt, + const state = yield* getAssistantSegmentStateForTurn(input.threadId, input.turnId); + if (Option.isSome(state)) { + yield* setAssistantSegmentStateForTurn(input.threadId, input.turnId, { + ...state.value, + activeMessageId: null, + }); + } }); - yield* clearAssistantMessageState(input.messageId); - }); - const upsertProposedPlan = Effect.fn("upsertProposedPlan")(function* (input: { + const upsertProposedPlan = (input: { event: ProviderRuntimeEvent; threadId: ThreadId; threadProposedPlans: ReadonlyArray<{ @@ -981,31 +910,32 @@ const make = Effect.fn("make")(function* () { planMarkdown: string | undefined; createdAt: string; updatedAt: string; - }) { - const planMarkdown = normalizeProposedPlanMarkdown(input.planMarkdown); - if (!planMarkdown) { - return; - } + }) => + Effect.gen(function* () { + const planMarkdown = normalizeProposedPlanMarkdown(input.planMarkdown); + if (!planMarkdown) { + return; + } - const existingPlan = input.threadProposedPlans.find((entry) => entry.id === input.planId); - yield* orchestrationEngine.dispatch({ - type: "thread.proposed-plan.upsert", - commandId: providerCommandId(input.event, "proposed-plan-upsert"), - threadId: input.threadId, - proposedPlan: { - id: input.planId, - turnId: input.turnId ?? null, - planMarkdown, - implementedAt: existingPlan?.implementedAt ?? null, - implementationThreadId: existingPlan?.implementationThreadId ?? null, - createdAt: existingPlan?.createdAt ?? input.createdAt, - updatedAt: input.updatedAt, - }, - createdAt: input.updatedAt, + const existingPlan = input.threadProposedPlans.find((entry) => entry.id === input.planId); + yield* orchestrationEngine.dispatch({ + type: "thread.proposed-plan.upsert", + commandId: providerCommandId(input.event, "proposed-plan-upsert"), + threadId: input.threadId, + proposedPlan: { + id: input.planId, + turnId: input.turnId ?? null, + planMarkdown, + implementedAt: existingPlan?.implementedAt ?? null, + implementationThreadId: existingPlan?.implementationThreadId ?? null, + createdAt: existingPlan?.createdAt ?? input.createdAt, + updatedAt: input.updatedAt, + }, + createdAt: input.updatedAt, + }); }); - }); - const finalizeBufferedProposedPlan = Effect.fn("finalizeBufferedProposedPlan")(function* (input: { + const finalizeBufferedProposedPlan = (input: { event: ProviderRuntimeEvent; threadId: ThreadId; threadProposedPlans: ReadonlyArray<{ @@ -1018,71 +948,75 @@ const make = Effect.fn("make")(function* () { turnId?: TurnId; fallbackMarkdown?: string; updatedAt: string; - }) { - const bufferedPlan = yield* takeBufferedProposedPlan(input.planId); - const bufferedMarkdown = normalizeProposedPlanMarkdown(bufferedPlan?.text); - const fallbackMarkdown = normalizeProposedPlanMarkdown(input.fallbackMarkdown); - const planMarkdown = bufferedMarkdown ?? fallbackMarkdown; - if (!planMarkdown) { - return; - } + }) => + Effect.gen(function* () { + const bufferedPlan = yield* takeBufferedProposedPlan(input.planId); + const bufferedMarkdown = normalizeProposedPlanMarkdown(bufferedPlan?.text); + const fallbackMarkdown = normalizeProposedPlanMarkdown(input.fallbackMarkdown); + const planMarkdown = bufferedMarkdown ?? fallbackMarkdown; + if (!planMarkdown) { + return; + } - yield* upsertProposedPlan({ - event: input.event, - threadId: input.threadId, - threadProposedPlans: input.threadProposedPlans, - planId: input.planId, - ...(input.turnId ? { turnId: input.turnId } : {}), - planMarkdown, - createdAt: - bufferedPlan?.createdAt && bufferedPlan.createdAt.length > 0 - ? bufferedPlan.createdAt - : input.updatedAt, - updatedAt: input.updatedAt, + yield* upsertProposedPlan({ + event: input.event, + threadId: input.threadId, + threadProposedPlans: input.threadProposedPlans, + planId: input.planId, + ...(input.turnId ? { turnId: input.turnId } : {}), + planMarkdown, + createdAt: + bufferedPlan?.createdAt && bufferedPlan.createdAt.length > 0 + ? bufferedPlan.createdAt + : input.updatedAt, + updatedAt: input.updatedAt, + }); + yield* clearBufferedProposedPlan(input.planId); }); - yield* clearBufferedProposedPlan(input.planId); - }); - - const clearTurnStateForSession = Effect.fn("clearTurnStateForSession")(function* ( - threadId: ThreadId, - ) { - const prefix = `${threadId}:`; - const proposedPlanPrefix = `plan:${threadId}:`; - const turnKeys = Array.from(yield* Cache.keys(turnMessageIdsByTurnKey)); - const proposedPlanKeys = Array.from(yield* Cache.keys(bufferedProposedPlanById)); - yield* Effect.forEach( - turnKeys, - Effect.fn(function* (key) { - if (!key.startsWith(prefix)) { - return; - } - const messageIds = yield* Cache.getOption(turnMessageIdsByTurnKey, key); - if (Option.isSome(messageIds)) { - yield* Effect.forEach(messageIds.value, clearAssistantMessageState, { - concurrency: 1, - }).pipe(Effect.asVoid); - } + const clearTurnStateForSession = (threadId: ThreadId) => + Effect.gen(function* () { + const prefix = `${threadId}:`; + const proposedPlanPrefix = `plan:${threadId}:`; + const turnKeys = Array.from(yield* Cache.keys(turnMessageIdsByTurnKey)); + const assistantSegmentKeys = Array.from(yield* Cache.keys(assistantSegmentStateByTurnKey)); + const proposedPlanKeys = Array.from(yield* Cache.keys(bufferedProposedPlanById)); + yield* Effect.forEach( + turnKeys, + (key) => + Effect.gen(function* () { + if (!key.startsWith(prefix)) { + return; + } - yield* Cache.invalidate(turnMessageIdsByTurnKey, key); - }), - { concurrency: 1 }, - ).pipe(Effect.asVoid); - yield* Effect.forEach( - proposedPlanKeys, - (key) => - key.startsWith(proposedPlanPrefix) - ? Cache.invalidate(bufferedProposedPlanById, key) - : Effect.void, - { concurrency: 1 }, - ).pipe(Effect.asVoid); - clearAssistantSegmentsForThread(threadId); - }); + const messageIds = yield* Cache.getOption(turnMessageIdsByTurnKey, key); + if (Option.isSome(messageIds)) { + yield* Effect.forEach(messageIds.value, clearAssistantMessageState, { + concurrency: 1, + }).pipe(Effect.asVoid); + } - // Accumulate token usage from thread.token-usage.updated events so - // providers like Copilot and Amp (which emit usage separately from - // turn.completed) still get turn-level usage in the completion summary. - const pendingTokenUsageByThread = new Map>(); + yield* Cache.invalidate(turnMessageIdsByTurnKey, key); + }), + { concurrency: 1 }, + ).pipe(Effect.asVoid); + yield* Effect.forEach( + assistantSegmentKeys, + (key) => + key.startsWith(prefix) + ? Cache.invalidate(assistantSegmentStateByTurnKey, key) + : Effect.void, + { concurrency: 1 }, + ).pipe(Effect.asVoid); + yield* Effect.forEach( + proposedPlanKeys, + (key) => + key.startsWith(proposedPlanPrefix) + ? Cache.invalidate(bufferedProposedPlanById, key) + : Effect.void, + { concurrency: 1 }, + ).pipe(Effect.asVoid); + }); const getSourceProposedPlanReferenceForPendingTurnStart = Effect.fn( "getSourceProposedPlanReferenceForPendingTurnStart", @@ -1160,487 +1094,438 @@ const make = Effect.fn("make")(function* () { }, ); - const processRuntimeEvent = Effect.fn("processRuntimeEvent")(function* ( - event: ProviderRuntimeEvent, - ) { - // Accumulate token usage events per thread - if (event.type === "thread.token-usage.updated") { - const payload = runtimePayloadRecord(event); - const raw = payload?.usage; - if (raw && typeof raw === "object") { - const prev = pendingTokenUsageByThread.get(event.threadId) ?? {}; - const incoming = raw as Record; - // Merge by summing numeric fields - const merged: Record = { ...prev }; - for (const [k, v] of Object.entries(incoming)) { - if (typeof v === "number" && typeof (prev[k] ?? 0) === "number") { - merged[k] = ((prev[k] as number) ?? 0) + v; - } else { - merged[k] = v; - } - } - pendingTokenUsageByThread.set(event.threadId, merged); - } - } - - // Clear accumulated usage when a new turn starts - if (event.type === "turn.started") { - pendingTokenUsageByThread.delete(event.threadId); - } - - const readModel = yield* orchestrationEngine.getReadModel(); - const thread = readModel.threads.find((entry) => entry.id === event.threadId); - if (!thread) return; - - const now = event.createdAt; - const eventTurnId = toTurnId(event.turnId); - const activeTurnId = thread.session?.activeTurnId ?? null; - - const existingAssistantMessageById = new Map( - thread.messages.map((message) => [message.id, message] as const), - ); + const processRuntimeEvent = (event: ProviderRuntimeEvent) => + Effect.gen(function* () { + const readModel = yield* orchestrationEngine.getReadModel(); + const thread = readModel.threads.find((entry) => entry.id === event.threadId); + if (!thread) return; - const assistantBaseMessageId = - event.type === "content.delta" || - (event.type === "item.completed" && event.payload.itemType === "assistant_message") - ? MessageId.make(`assistant:${event.itemId ?? event.turnId ?? event.eventId}`) - : undefined; + const now = event.createdAt; + const eventTurnId = toTurnId(event.turnId); + const activeTurnId = thread.session?.activeTurnId ?? null; - const conflictsWithActiveTurn = - activeTurnId !== null && eventTurnId !== undefined && !sameId(activeTurnId, eventTurnId); - const missingTurnForActiveTurn = activeTurnId !== null && eventTurnId === undefined; + const conflictsWithActiveTurn = + activeTurnId !== null && eventTurnId !== undefined && !sameId(activeTurnId, eventTurnId); + const missingTurnForActiveTurn = activeTurnId !== null && eventTurnId === undefined; - const shouldApplyThreadLifecycle = (() => { - if (!STRICT_PROVIDER_LIFECYCLE_GUARD) { - return true; - } - switch (event.type) { - case "session.exited": + const shouldApplyThreadLifecycle = (() => { + if (!STRICT_PROVIDER_LIFECYCLE_GUARD) { return true; - case "session.started": - case "thread.started": - return true; - case "turn.started": - return !conflictsWithActiveTurn; - case "turn.completed": - case "turn.aborted": - if (conflictsWithActiveTurn || missingTurnForActiveTurn) { - return false; - } - // Only the active turn may close the lifecycle state. - if (activeTurnId !== null && eventTurnId !== undefined) { - return sameId(activeTurnId, eventTurnId); - } - // If no active turn is tracked, accept completion scoped to this thread. - return true; - default: - return true; - } - })(); - const acceptedTurnStartedSourcePlan = - event.type === "turn.started" && shouldApplyThreadLifecycle - ? yield* getSourceProposedPlanReferenceForAcceptedTurnStart(thread.id, eventTurnId) - : null; - - if ( - event.type === "session.started" || - event.type === "session.state.changed" || - event.type === "session.exited" || - event.type === "thread.started" || - event.type === "turn.started" || - event.type === "turn.completed" || - event.type === "turn.aborted" - ) { - const nextActiveTurnId = - event.type === "turn.started" - ? (eventTurnId ?? null) - : event.type === "turn.completed" || - event.type === "turn.aborted" || - event.type === "session.exited" - ? null - : activeTurnId; - const status = (() => { + } switch (event.type) { - case "session.state.changed": - return orchestrationSessionStatusFromRuntimeState(event.payload.state); - case "turn.started": - return "running"; case "session.exited": - return "stopped"; - case "turn.completed": - return runtimeTurnState(event) === "failed" ? "error" : "ready"; - case "turn.aborted": - return "interrupted"; + return true; case "session.started": case "thread.started": - // Provider thread/session start notifications can arrive during an - // active turn; preserve turn-running state in that case. - return activeTurnId !== null ? "running" : "ready"; + return true; + case "turn.started": + return !conflictsWithActiveTurn; + case "turn.completed": + if (conflictsWithActiveTurn || missingTurnForActiveTurn) { + return false; + } + // Only the active turn may close the lifecycle state. + if (activeTurnId !== null && eventTurnId !== undefined) { + return sameId(activeTurnId, eventTurnId); + } + // If no active turn is tracked, accept completion scoped to this thread. + return true; + default: + return true; } })(); - const lastError = - event.type === "session.state.changed" && event.payload.state === "error" - ? (event.payload.reason ?? thread.session?.lastError ?? "Provider session error") - : event.type === "turn.completed" && runtimeTurnState(event) === "failed" - ? (runtimeTurnErrorMessage(event) ?? thread.session?.lastError ?? "Turn failed") - : status === "ready" || status === "interrupted" + const acceptedTurnStartedSourcePlan = + event.type === "turn.started" && shouldApplyThreadLifecycle + ? yield* getSourceProposedPlanReferenceForAcceptedTurnStart(thread.id, eventTurnId) + : null; + + if ( + event.type === "session.started" || + event.type === "session.state.changed" || + event.type === "session.exited" || + event.type === "thread.started" || + event.type === "turn.started" || + event.type === "turn.completed" + ) { + const nextActiveTurnId = + event.type === "turn.started" + ? (eventTurnId ?? null) + : event.type === "turn.completed" || event.type === "session.exited" ? null - : (thread.session?.lastError ?? null); - - if (shouldApplyThreadLifecycle) { - const turnUsagePayload = - event.type === "turn.completed" ? runtimePayloadRecord(event) : undefined; - let turnUsage = - turnUsagePayload?.usage !== undefined && - turnUsagePayload.usage !== null && - typeof turnUsagePayload.usage === "object" - ? (turnUsagePayload.usage as Record) - : undefined; - - // Fall back to accumulated thread.token-usage.updated data - // for providers (Copilot, Amp) that emit usage separately. - if (!turnUsage && (event.type === "turn.completed" || event.type === "turn.aborted")) { - const pending = pendingTokenUsageByThread.get(event.threadId); - if (pending) { - turnUsage = pending; + : activeTurnId; + const status = (() => { + switch (event.type) { + case "session.state.changed": + return orchestrationSessionStatusFromRuntimeState(event.payload.state); + case "turn.started": + return "running"; + case "session.exited": + return "stopped"; + case "turn.completed": + return normalizeRuntimeTurnState(event.payload.state) === "failed" + ? "error" + : "ready"; + case "session.started": + case "thread.started": + // Provider thread/session start notifications can arrive during an + // active turn; preserve turn-running state in that case. + return activeTurnId !== null ? "running" : "ready"; + } + })(); + const lastError = + event.type === "session.state.changed" && event.payload.state === "error" + ? (event.payload.reason ?? thread.session?.lastError ?? "Provider session error") + : event.type === "turn.completed" && + normalizeRuntimeTurnState(event.payload.state) === "failed" + ? (event.payload.errorMessage ?? thread.session?.lastError ?? "Turn failed") + : status === "ready" + ? null + : (thread.session?.lastError ?? null); + + if (shouldApplyThreadLifecycle) { + if (event.type === "turn.started" && acceptedTurnStartedSourcePlan !== null) { + yield* markSourceProposedPlanImplemented( + acceptedTurnStartedSourcePlan.sourceThreadId, + acceptedTurnStartedSourcePlan.sourcePlanId, + thread.id, + now, + ).pipe( + Effect.catchCause((cause) => + Effect.logWarning( + "provider runtime ingestion failed to mark source proposed plan", + { + eventId: event.eventId, + eventType: event.type, + cause: Cause.pretty(cause), + }, + ), + ), + ); } - } - if (event.type === "turn.completed" || event.type === "turn.aborted") { - pendingTokenUsageByThread.delete(event.threadId); - } - - if (event.type === "turn.started" && acceptedTurnStartedSourcePlan !== null) { - yield* markSourceProposedPlanImplemented( - acceptedTurnStartedSourcePlan.sourceThreadId, - acceptedTurnStartedSourcePlan.sourcePlanId, - thread.id, - now, - ).pipe( - Effect.catchCause((cause) => - Effect.logWarning("provider runtime ingestion failed to mark source proposed plan", { - eventId: event.eventId, - eventType: event.type, - cause: Cause.pretty(cause), - }), - ), - ); - } - yield* orchestrationEngine.dispatch({ - type: "thread.session.set", - commandId: providerCommandId(event, "thread-session-set"), - threadId: thread.id, - session: { + yield* orchestrationEngine.dispatch({ + type: "thread.session.set", + commandId: providerCommandId(event, "thread-session-set"), threadId: thread.id, - status, - providerName: event.provider, - runtimeMode: thread.session?.runtimeMode ?? "full-access", - activeTurnId: nextActiveTurnId, - lastError, - updatedAt: now, - }, - ...(turnUsage ? { turnUsage } : {}), - createdAt: now, - }); + session: { + threadId: thread.id, + status, + providerName: event.provider, + ...(event.providerInstanceId !== undefined + ? { providerInstanceId: event.providerInstanceId } + : {}), + runtimeMode: thread.session?.runtimeMode ?? "full-access", + activeTurnId: nextActiveTurnId, + lastError, + updatedAt: now, + }, + createdAt: now, + }); + } } - } - const assistantDelta = - event.type === "content.delta" && event.payload.streamKind === "assistant_text" - ? event.payload.delta - : undefined; - const proposedPlanDelta = - event.type === "turn.proposed.delta" ? event.payload.delta : undefined; - - const isToolLifecycleEvent = - eventTurnId !== undefined && - ((event.type === "item.started" && isToolLifecycleItemType(event.payload.itemType)) || - (event.type === "item.updated" && isToolLifecycleItemType(event.payload.itemType)) || - (event.type === "item.completed" && isToolLifecycleItemType(event.payload.itemType))); - if (isToolLifecycleEvent) { - yield* closeOpenAssistantSegmentsForTurn({ - event, - threadId: thread.id, - turnId: eventTurnId, - createdAt: now, - existingAssistantMessageById, - }); - } + const assistantDelta = + event.type === "content.delta" && event.payload.streamKind === "assistant_text" + ? event.payload.delta + : undefined; + const proposedPlanDelta = + event.type === "turn.proposed.delta" ? event.payload.delta : undefined; - if (assistantDelta && assistantDelta.length > 0) { - const assistantMessageId = MessageId.make( - `assistant:${event.itemId ?? event.turnId ?? event.eventId}`, - ); - const turnId = toTurnId(event.turnId); - if (turnId) { - yield* rememberAssistantMessageId(thread.id, turnId, assistantMessageId); - } - yield* markAssistantMessageSawDelta(assistantMessageId); + if (assistantDelta && assistantDelta.length > 0) { + const turnId = toTurnId(event.turnId); + const assistantMessageId = yield* getOrCreateAssistantMessageId({ + threadId: thread.id, + event, + ...(turnId ? { turnId } : {}), + }); + if (turnId) { + yield* rememberAssistantMessageId(thread.id, turnId, assistantMessageId); + } - const assistantDeliveryMode: AssistantDeliveryMode = yield* Effect.map( - serverSettingsService.getSettings, - (settings) => (settings.enableAssistantStreaming ? "streaming" : "buffered"), - ); - if (assistantDeliveryMode === "buffered") { - const spillResult = yield* appendBufferedAssistantText( - assistantMessageId, - assistantDelta, - now, + const assistantDeliveryMode: AssistantDeliveryMode = yield* Effect.map( + serverSettingsService.getSettings, + (settings) => (settings.enableAssistantStreaming ? "streaming" : "buffered"), ); - if (spillResult.spillChunk.length > 0) { + if (assistantDeliveryMode === "buffered") { + const spillChunk = yield* appendBufferedAssistantText(assistantMessageId, assistantDelta); + if (spillChunk.length > 0) { + yield* orchestrationEngine.dispatch({ + type: "thread.message.assistant.delta", + commandId: providerCommandId(event, "assistant-delta-buffer-spill"), + threadId: thread.id, + messageId: assistantMessageId, + delta: spillChunk, + ...(turnId ? { turnId } : {}), + createdAt: now, + }); + } + } else { yield* orchestrationEngine.dispatch({ type: "thread.message.assistant.delta", - commandId: providerCommandId(event, "assistant-delta-buffer-spill"), + commandId: providerCommandId(event, "assistant-delta"), threadId: thread.id, messageId: assistantMessageId, - delta: spillResult.spillChunk, + delta: assistantDelta, ...(turnId ? { turnId } : {}), - createdAt: spillResult.createdAt, + createdAt: now, }); } - } else { - yield* orchestrationEngine.dispatch({ - type: "thread.message.assistant.delta", - commandId: providerCommandId(event, "assistant-delta"), + } + + const pauseForUserTurnId = + event.type === "request.opened" || event.type === "user-input.requested" + ? toTurnId(event.turnId) + : undefined; + if (pauseForUserTurnId) { + const assistantDeliveryMode: AssistantDeliveryMode = yield* Effect.map( + serverSettingsService.getSettings, + (settings) => (settings.enableAssistantStreaming ? "streaming" : "buffered"), + ); + const flushedMessageIds = + assistantDeliveryMode === "buffered" + ? yield* flushBufferedAssistantMessagesForTurn({ + event, + threadId: thread.id, + turnId: pauseForUserTurnId, + createdAt: now, + commandTag: + event.type === "request.opened" + ? "assistant-delta-flush-on-request-opened" + : "assistant-delta-flush-on-user-input-requested", + }) + : new Set(); + yield* finalizeActiveAssistantSegmentForTurn({ + event, threadId: thread.id, - messageId: assistantMessageId, - delta: assistantDelta, - ...(turnId ? { turnId } : {}), + turnId: pauseForUserTurnId, createdAt: now, + commandTag: + event.type === "request.opened" + ? "assistant-complete-on-request-opened" + : "assistant-complete-on-user-input-requested", + finalDeltaCommandTag: + event.type === "request.opened" + ? "assistant-delta-finalize-on-request-opened" + : "assistant-delta-finalize-on-user-input-requested", + hasProjectedMessage: thread.messages.some( + (entry) => + entry.role === "assistant" && entry.turnId === pauseForUserTurnId && entry.streaming, + ), + flushedMessageIds, }); } - } - if (proposedPlanDelta && proposedPlanDelta.length > 0) { - const planId = proposedPlanIdFromEvent(event, thread.id); - yield* appendBufferedProposedPlan(planId, proposedPlanDelta, now); - } + if (proposedPlanDelta && proposedPlanDelta.length > 0) { + const planId = proposedPlanIdFromEvent(event, thread.id); + yield* appendBufferedProposedPlan(planId, proposedPlanDelta, now); + } - const assistantCompletion = - event.type === "item.completed" && event.payload.itemType === "assistant_message" - ? { - messageId: MessageId.make(`assistant:${event.itemId ?? event.turnId ?? event.eventId}`), - fallbackText: event.payload.detail, - } - : undefined; - const proposedPlanCompletion = - event.type === "turn.proposed.completed" - ? { - planId: proposedPlanIdFromEvent(event, thread.id), - turnId: toTurnId(event.turnId), - planMarkdown: event.payload.planMarkdown, + const assistantCompletion = + event.type === "item.completed" && event.payload.itemType === "assistant_message" + ? { + messageId: MessageId.make( + `assistant:${event.itemId ?? event.turnId ?? event.eventId}`, + ), + fallbackText: event.payload.detail, + } + : undefined; + const proposedPlanCompletion = + event.type === "turn.proposed.completed" + ? { + planId: proposedPlanIdFromEvent(event, thread.id), + turnId: toTurnId(event.turnId), + planMarkdown: event.payload.planMarkdown, + } + : undefined; + + if (assistantCompletion) { + const turnId = toTurnId(event.turnId); + const activeAssistantMessageId = turnId + ? yield* getActiveAssistantMessageIdForTurn(thread.id, turnId) + : Option.none(); + const hasAssistantMessagesForTurn = + turnId !== undefined + ? thread.messages.some((entry) => entry.role === "assistant" && entry.turnId === turnId) + : false; + const assistantMessageId = Option.getOrElse( + activeAssistantMessageId, + () => assistantCompletion.messageId, + ); + const existingAssistantMessage = thread.messages.find( + (entry) => entry.id === assistantMessageId, + ); + const shouldApplyFallbackCompletionText = + !existingAssistantMessage || existingAssistantMessage.text.length === 0; + + const shouldSkipRedundantCompletion = + Option.isNone(activeAssistantMessageId) && + turnId !== undefined && + hasAssistantMessagesForTurn && + (assistantCompletion.fallbackText?.trim().length ?? 0) === 0; + + if (!shouldSkipRedundantCompletion) { + if (turnId && Option.isNone(activeAssistantMessageId)) { + yield* rememberAssistantMessageId(thread.id, turnId, assistantMessageId); } - : undefined; - if (assistantCompletion) { - const turnId = toTurnId(event.turnId); - const assistantMessageId = assistantBaseMessageId - ? takeOpenAssistantSegmentMessageId({ + yield* finalizeAssistantMessage({ + event, threadId: thread.id, - baseMessageId: assistantBaseMessageId, - })?.messageId - : undefined; - if (!assistantMessageId) { - if (assistantBaseMessageId) { - clearAssistantSegment({ - threadId: thread.id, - baseMessageId: assistantBaseMessageId, + messageId: assistantMessageId, ...(turnId ? { turnId } : {}), + createdAt: now, + commandTag: "assistant-complete", + finalDeltaCommandTag: "assistant-delta-finalize", + hasProjectedMessage: existingAssistantMessage !== undefined, + ...(assistantCompletion.fallbackText !== undefined && shouldApplyFallbackCompletionText + ? { fallbackText: assistantCompletion.fallbackText } + : {}), }); - } - } else if (turnId) { - yield* rememberAssistantMessageId(thread.id, turnId, assistantMessageId); - } - if (assistantMessageId) { - // Avoid duplicating streamed text by checking the resolved - // segment message, not just the base message, for existing content. - const existingSegmentMessage = - existingAssistantMessageById.get(assistantMessageId) ?? - thread.messages.find((entry) => entry.id === assistantMessageId); - const shouldApplyFallbackCompletionText = - !existingSegmentMessage || existingSegmentMessage.text.length === 0; - - yield* finalizeAssistantMessage({ - event, - threadId: thread.id, - messageId: assistantMessageId, - ...(turnId ? { turnId } : {}), - createdAt: now, - commandTag: "assistant-complete", - finalDeltaCommandTag: "assistant-delta-finalize", - ...(assistantCompletion.fallbackText !== undefined && shouldApplyFallbackCompletionText - ? { fallbackText: assistantCompletion.fallbackText } - : {}), - existingMessage: existingAssistantMessageById.get(assistantMessageId), - }); - } + if (turnId) { + yield* forgetAssistantMessageId(thread.id, turnId, assistantMessageId); + } + } - if (assistantBaseMessageId) { - clearAssistantSegment({ - threadId: thread.id, - baseMessageId: assistantBaseMessageId, - ...(turnId ? { turnId } : {}), - }); - } - if (turnId && assistantMessageId) { - yield* forgetAssistantMessageId(thread.id, turnId, assistantMessageId); + if (turnId) { + yield* clearAssistantSegmentStateForTurn(thread.id, turnId); + } } - } - - if (proposedPlanCompletion) { - yield* finalizeBufferedProposedPlan({ - event, - threadId: thread.id, - threadProposedPlans: thread.proposedPlans, - planId: proposedPlanCompletion.planId, - ...(proposedPlanCompletion.turnId ? { turnId: proposedPlanCompletion.turnId } : {}), - fallbackMarkdown: proposedPlanCompletion.planMarkdown, - updatedAt: now, - }); - } - - if (event.type === "turn.completed") { - const turnId = toTurnId(event.turnId); - if (turnId) { - const assistantMessageIds = yield* getAssistantMessageIdsForTurn(thread.id, turnId); - yield* Effect.forEach( - assistantMessageIds, - (assistantMessageId) => - finalizeAssistantMessage({ - event, - threadId: thread.id, - messageId: assistantMessageId, - turnId, - createdAt: now, - commandTag: "assistant-complete-finalize", - finalDeltaCommandTag: "assistant-delta-finalize-fallback", - existingMessage: existingAssistantMessageById.get(assistantMessageId), - }), - { concurrency: 1 }, - ).pipe(Effect.asVoid); - yield* clearAssistantMessageIdsForTurn(thread.id, turnId); - clearAssistantSegmentsForTurn(thread.id, turnId); + if (proposedPlanCompletion) { yield* finalizeBufferedProposedPlan({ event, threadId: thread.id, threadProposedPlans: thread.proposedPlans, - planId: proposedPlanIdForTurn(thread.id, turnId), - turnId, + planId: proposedPlanCompletion.planId, + ...(proposedPlanCompletion.turnId ? { turnId: proposedPlanCompletion.turnId } : {}), + fallbackMarkdown: proposedPlanCompletion.planMarkdown, updatedAt: now, }); } - } - if (event.type === "turn.aborted") { - const turnId = toTurnId(event.turnId); - if (turnId) { - const assistantMessageIds = yield* getAssistantMessageIdsForTurn(thread.id, turnId); - yield* Effect.forEach( - assistantMessageIds, - (assistantMessageId) => - finalizeAssistantMessage({ - event, - threadId: thread.id, - messageId: assistantMessageId, - turnId, - createdAt: now, - commandTag: "assistant-complete-finalize", - finalDeltaCommandTag: "assistant-delta-finalize-fallback", - existingMessage: existingAssistantMessageById.get(assistantMessageId), - }), - { concurrency: 1 }, - ).pipe(Effect.asVoid); - yield* clearAssistantMessageIdsForTurn(thread.id, turnId); - clearAssistantSegmentsForTurn(thread.id, turnId); - } - } - - if (event.type === "session.exited") { - yield* clearTurnStateForSession(thread.id); - } - - if (event.type === "runtime.error") { - const runtimeErrorMessage = event.payload.message; - - const shouldApplyRuntimeError = !STRICT_PROVIDER_LIFECYCLE_GUARD - ? true - : activeTurnId === null || eventTurnId === undefined || sameId(activeTurnId, eventTurnId); + if (event.type === "turn.completed") { + const turnId = toTurnId(event.turnId); + if (turnId) { + const assistantMessageIds = yield* getAssistantMessageIdsForTurn(thread.id, turnId); + yield* Effect.forEach( + assistantMessageIds, + (assistantMessageId) => + finalizeAssistantMessage({ + event, + threadId: thread.id, + messageId: assistantMessageId, + turnId, + createdAt: now, + commandTag: "assistant-complete-finalize", + finalDeltaCommandTag: "assistant-delta-finalize-fallback", + hasProjectedMessage: thread.messages.some( + (entry) => entry.id === assistantMessageId, + ), + }), + { concurrency: 1 }, + ).pipe(Effect.asVoid); + yield* clearAssistantMessageIdsForTurn(thread.id, turnId); + yield* clearAssistantSegmentStateForTurn(thread.id, turnId); - if (shouldApplyRuntimeError) { - yield* orchestrationEngine.dispatch({ - type: "thread.session.set", - commandId: providerCommandId(event, "runtime-error-session-set"), - threadId: thread.id, - session: { + yield* finalizeBufferedProposedPlan({ + event, threadId: thread.id, - status: "error", - providerName: event.provider, - runtimeMode: thread.session?.runtimeMode ?? "full-access", - activeTurnId: eventTurnId ?? null, - lastError: runtimeErrorMessage, + threadProposedPlans: thread.proposedPlans, + planId: proposedPlanIdForTurn(thread.id, turnId), + turnId, updatedAt: now, - }, - createdAt: now, - }); + }); + } } - } - if (event.type === "thread.metadata.updated" && event.payload.name) { - yield* orchestrationEngine.dispatch({ - type: "thread.meta.update", - commandId: providerCommandId(event, "thread-meta-update"), - threadId: thread.id, - title: event.payload.name, - }); - } + if (event.type === "session.exited") { + yield* clearTurnStateForSession(thread.id); + } - if (event.type === "turn.diff.updated") { - const turnId = toTurnId(event.turnId); - if (turnId && (yield* isGitRepoForThread(thread.id))) { - // Skip if a checkpoint already exists for this turn. A real - // (non-placeholder) capture from CheckpointReactor should not - // be clobbered, and dispatching a duplicate placeholder for the - // same turnId would produce an unstable checkpointTurnCount. - if (thread.checkpoints.some((c) => c.turnId === turnId)) { - // Already tracked; no-op. - } else { - const assistantMessageId = MessageId.make( - `assistant:${event.itemId ?? event.turnId ?? event.eventId}`, - ); - const maxTurnCount = thread.checkpoints.reduce( - (max, c) => Math.max(max, c.checkpointTurnCount), - 0, - ); + if (event.type === "runtime.error") { + const runtimeErrorMessage = event.payload.message; + + const shouldApplyRuntimeError = !STRICT_PROVIDER_LIFECYCLE_GUARD + ? true + : activeTurnId === null || eventTurnId === undefined || sameId(activeTurnId, eventTurnId); + + if (shouldApplyRuntimeError) { yield* orchestrationEngine.dispatch({ - type: "thread.turn.diff.complete", - commandId: providerCommandId(event, "thread-turn-diff-complete"), + type: "thread.session.set", + commandId: providerCommandId(event, "runtime-error-session-set"), threadId: thread.id, - turnId, - completedAt: now, - checkpointRef: CheckpointRef.make(`provider-diff:${event.eventId}`), - status: "missing", - files: [], - assistantMessageId, - checkpointTurnCount: maxTurnCount + 1, + session: { + threadId: thread.id, + status: "error", + providerName: event.provider, + ...(event.providerInstanceId !== undefined + ? { providerInstanceId: event.providerInstanceId } + : {}), + runtimeMode: thread.session?.runtimeMode ?? "full-access", + activeTurnId: eventTurnId ?? null, + lastError: runtimeErrorMessage, + updatedAt: now, + }, createdAt: now, }); } } - } - const activities = runtimeEventToActivities(event); - yield* Effect.forEach(activities, (activity) => - orchestrationEngine.dispatch({ - type: "thread.activity.append", - commandId: providerCommandId(event, "thread-activity-append"), - threadId: thread.id, - activity, - createdAt: activity.createdAt, - }), - ).pipe(Effect.asVoid); - }); + if (event.type === "thread.metadata.updated" && event.payload.name) { + yield* orchestrationEngine.dispatch({ + type: "thread.meta.update", + commandId: providerCommandId(event, "thread-meta-update"), + threadId: thread.id, + title: event.payload.name, + }); + } + + if (event.type === "turn.diff.updated") { + const turnId = toTurnId(event.turnId); + if (turnId && (yield* isGitRepoForThread(thread.id))) { + // Skip if a checkpoint already exists for this turn. A real + // (non-placeholder) capture from CheckpointReactor should not + // be clobbered, and dispatching a duplicate placeholder for the + // same turnId would produce an unstable checkpointTurnCount. + if (thread.checkpoints.some((c) => c.turnId === turnId)) { + // Already tracked; no-op. + } else { + const assistantMessageId = MessageId.make( + `assistant:${event.itemId ?? event.turnId ?? event.eventId}`, + ); + const maxTurnCount = thread.checkpoints.reduce( + (max, c) => Math.max(max, c.checkpointTurnCount), + 0, + ); + yield* orchestrationEngine.dispatch({ + type: "thread.turn.diff.complete", + commandId: providerCommandId(event, "thread-turn-diff-complete"), + threadId: thread.id, + turnId, + completedAt: now, + checkpointRef: CheckpointRef.make(`provider-diff:${event.eventId}`), + status: "missing", + files: [], + assistantMessageId, + checkpointTurnCount: maxTurnCount + 1, + createdAt: now, + }); + } + } + } + + const activities = runtimeEventToActivities(event); + yield* Effect.forEach(activities, (activity) => + orchestrationEngine.dispatch({ + type: "thread.activity.append", + commandId: providerCommandId(event, "thread-activity-append"), + threadId: thread.id, + activity, + createdAt: activity.createdAt, + }), + ).pipe(Effect.asVoid); + }); const processDomainEvent = (_event: TurnStartRequestedDomainEvent) => Effect.void; @@ -1664,21 +1549,22 @@ const make = Effect.fn("make")(function* () { const worker = yield* makeDrainableWorker(processInputSafely); - const start: ProviderRuntimeIngestionShape["start"] = Effect.fn("start")(function* () { - yield* Effect.forkScoped( - Stream.runForEach(providerService.streamEvents, (event) => - worker.enqueue({ source: "runtime", event }), - ), - ); - yield* Effect.forkScoped( - Stream.runForEach(orchestrationEngine.streamDomainEvents, (event) => { - if (event.type !== "thread.turn-start-requested") { - return Effect.void; - } - return worker.enqueue({ source: "domain", event }); - }), - ); - }); + const start: ProviderRuntimeIngestionShape["start"] = () => + Effect.gen(function* () { + yield* Effect.forkScoped( + Stream.runForEach(providerService.streamEvents, (event) => + worker.enqueue({ source: "runtime", event }), + ), + ); + yield* Effect.forkScoped( + Stream.runForEach(orchestrationEngine.streamDomainEvents, (event) => { + if (event.type !== "thread.turn-start-requested") { + return Effect.void; + } + return worker.enqueue({ source: "domain", event }); + }), + ); + }); return { start, @@ -1688,5 +1574,5 @@ const make = Effect.fn("make")(function* () { export const ProviderRuntimeIngestionLive = Layer.effect( ProviderRuntimeIngestionService, - make(), + make, ).pipe(Layer.provide(ProjectionTurnRepositoryLive)); diff --git a/apps/server/src/orchestration/commandInvariants.test.ts b/apps/server/src/orchestration/commandInvariants.test.ts index a678bcea166..1e33a355d06 100644 --- a/apps/server/src/orchestration/commandInvariants.test.ts +++ b/apps/server/src/orchestration/commandInvariants.test.ts @@ -7,6 +7,7 @@ import { ThreadId, type OrchestrationCommand, type OrchestrationReadModel, + ProviderInstanceId, } from "@t3tools/contracts"; import { Effect } from "effect"; @@ -29,7 +30,7 @@ const readModel: OrchestrationReadModel = { title: "Project A", workspaceRoot: "/tmp/project-a", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, scripts: [], @@ -42,7 +43,7 @@ const readModel: OrchestrationReadModel = { title: "Project B", workspaceRoot: "/tmp/project-b", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, scripts: [], @@ -57,7 +58,7 @@ const readModel: OrchestrationReadModel = { projectId: ProjectId.make("project-a"), title: "Thread A", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -80,7 +81,7 @@ const readModel: OrchestrationReadModel = { projectId: ProjectId.make("project-b"), title: "Thread B", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -157,7 +158,7 @@ describe("commandInvariants", () => { projectId: ProjectId.make("project-a"), title: "new", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -181,7 +182,7 @@ describe("commandInvariants", () => { projectId: ProjectId.make("project-a"), title: "dup", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, diff --git a/apps/server/src/orchestration/decider.delete.test.ts b/apps/server/src/orchestration/decider.delete.test.ts index 2b323714932..548fcc0b68e 100644 --- a/apps/server/src/orchestration/decider.delete.test.ts +++ b/apps/server/src/orchestration/decider.delete.test.ts @@ -7,6 +7,7 @@ import { type OrchestrationCommand, type OrchestrationEvent, type OrchestrationReadModel, + ProviderInstanceId, } from "@t3tools/contracts"; import { Effect } from "effect"; import { describe, expect, it } from "vitest"; @@ -63,7 +64,7 @@ async function seedReadModel(): Promise { projectId: asProjectId("project-delete"), title: "Thread Delete 1", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -93,7 +94,7 @@ async function seedReadModel(): Promise { projectId: asProjectId("project-delete"), title: "Thread Delete 2", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, diff --git a/apps/server/src/orchestration/decider.projectScripts.test.ts b/apps/server/src/orchestration/decider.projectScripts.test.ts index a85e21c53f3..23566099196 100644 --- a/apps/server/src/orchestration/decider.projectScripts.test.ts +++ b/apps/server/src/orchestration/decider.projectScripts.test.ts @@ -5,7 +5,9 @@ import { MessageId, ProjectId, ThreadId, + ProviderInstanceId, } from "@t3tools/contracts"; +import { createModelSelection } from "@t3tools/shared/model"; import { describe, expect, it } from "vitest"; import { Effect } from "effect"; @@ -137,7 +139,7 @@ describe("decider project scripts", () => { projectId: asProjectId("project-1"), title: "Thread", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -162,14 +164,10 @@ describe("decider project scripts", () => { text: "hello", attachments: [], }, - modelSelection: { - provider: "codex", - model: "gpt-5.3-codex", - options: { - reasoningEffort: "high", - fastMode: true, - }, - }, + modelSelection: createModelSelection(ProviderInstanceId.make("codex"), "gpt-5.3-codex", [ + { id: "reasoningEffort", value: "high" }, + { id: "fastMode", value: true }, + ]), interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, runtimeMode: "approval-required", createdAt: now, @@ -191,14 +189,10 @@ describe("decider project scripts", () => { expect(turnStartEvent.payload).toMatchObject({ threadId: ThreadId.make("thread-1"), messageId: asMessageId("message-user-1"), - modelSelection: { - provider: "codex", - model: "gpt-5.3-codex", - options: { - reasoningEffort: "high", - fastMode: true, - }, - }, + modelSelection: createModelSelection(ProviderInstanceId.make("codex"), "gpt-5.3-codex", [ + { id: "reasoningEffort", value: "high" }, + { id: "fastMode", value: true }, + ]), runtimeMode: "approval-required", }); }); @@ -246,7 +240,7 @@ describe("decider project scripts", () => { projectId: asProjectId("project-1"), title: "Thread", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, @@ -328,7 +322,7 @@ describe("decider project scripts", () => { projectId: asProjectId("project-1"), title: "Thread", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, interactionMode: DEFAULT_PROVIDER_INTERACTION_MODE, diff --git a/apps/server/src/orchestration/projector.test.ts b/apps/server/src/orchestration/projector.test.ts index a61153bb529..b50bc2da598 100644 --- a/apps/server/src/orchestration/projector.test.ts +++ b/apps/server/src/orchestration/projector.test.ts @@ -2,6 +2,7 @@ import { CommandId, EventId, ProjectId, + ProviderDriverKind, ThreadId, type OrchestrationEvent, } from "@t3tools/contracts"; @@ -57,7 +58,7 @@ describe("orchestration projector", () => { projectId: "project-1", title: "demo", modelSelection: { - provider: "codex", + provider: ProviderDriverKind.make("codex"), model: "gpt-5-codex", }, runtimeMode: "full-access", @@ -77,7 +78,7 @@ describe("orchestration projector", () => { projectId: "project-1", title: "demo", modelSelection: { - provider: "codex", + instanceId: "codex", model: "gpt-5-codex", }, runtimeMode: "full-access", @@ -118,7 +119,7 @@ describe("orchestration projector", () => { projectId: "project-1", title: "demo", modelSelection: { - provider: "codex", + provider: ProviderDriverKind.make("codex"), model: "gpt-5-codex", }, branch: null, @@ -150,7 +151,7 @@ describe("orchestration projector", () => { projectId: "project-1", title: "demo", modelSelection: { - provider: "codex", + provider: ProviderDriverKind.make("codex"), model: "gpt-5-codex", }, runtimeMode: "full-access", @@ -253,7 +254,7 @@ describe("orchestration projector", () => { projectId: "project-1", title: "demo", modelSelection: { - provider: "codex", + provider: ProviderDriverKind.make("codex"), model: "gpt-5.3-codex", }, runtimeMode: "full-access", @@ -319,7 +320,7 @@ describe("orchestration projector", () => { projectId: "project-1", title: "demo", modelSelection: { - provider: "codex", + provider: ProviderDriverKind.make("codex"), model: "gpt-5.3-codex", }, runtimeMode: "full-access", @@ -376,7 +377,7 @@ describe("orchestration projector", () => { projectId: "project-1", title: "demo", modelSelection: { - provider: "codex", + provider: ProviderDriverKind.make("codex"), model: "gpt-5.3-codex", }, runtimeMode: "full-access", @@ -463,7 +464,7 @@ describe("orchestration projector", () => { projectId: "project-1", title: "demo", modelSelection: { - provider: "codex", + provider: ProviderDriverKind.make("codex"), model: "gpt-5.3-codex", }, runtimeMode: "full-access", @@ -678,7 +679,7 @@ describe("orchestration projector", () => { projectId: "project-1", title: "demo", modelSelection: { - provider: "codex", + provider: ProviderDriverKind.make("codex"), model: "gpt-5.3-codex", }, runtimeMode: "full-access", @@ -831,7 +832,7 @@ describe("orchestration projector", () => { projectId: "project-1", title: "capped", modelSelection: { - provider: "codex", + provider: ProviderDriverKind.make("codex"), model: "gpt-5-codex", }, runtimeMode: "full-access", diff --git a/apps/server/src/orchestration/redactEvent.ts b/apps/server/src/orchestration/redactEvent.ts index e68e4e06a18..4625a1bfbc0 100644 --- a/apps/server/src/orchestration/redactEvent.ts +++ b/apps/server/src/orchestration/redactEvent.ts @@ -1,48 +1,18 @@ -import type { - OrchestrationEvent, - ProviderStartOptions, - ProviderStartOptionsRedacted, -} from "@t3tools/contracts"; - -/** Strip sensitive fields (username, password) from provider start options. */ -export function redactProviderStartOptions( - opts: ProviderStartOptions, -): ProviderStartOptionsRedacted { - const redacted = { ...opts } as Record; - if (opts.opencode) { - const { username: _u, password: _p, ...rest } = opts.opencode; - redacted.opencode = rest; - } - if (opts.kilo) { - const { username: _u, password: _p, ...rest } = opts.kilo; - redacted.kilo = rest; - } - return redacted as ProviderStartOptionsRedacted; -} +import type { OrchestrationEvent } from "@t3tools/contracts"; /** * Redact sensitive fields from an orchestration event payload. * - * Currently strips `username`/`password` from opencode and kilo provider - * options on `thread.turn-start-requested` events. Use this at persistence - * and client-broadcast boundaries so credentials never leave the server - * runtime. + * Historically this stripped `username`/`password` from opencode and kilo + * provider options on `thread.turn-start-requested` events. The new + * orchestration contract no longer carries provider start options on + * orchestration events (model selection is communicated via `modelSelection`), + * so there is nothing to redact today. This function is kept as a stable + * boundary hook so future credential-bearing payloads can be scrubbed in + * a single place. */ export function redactEventForBoundary>( event: T, ): T { - if (event.type !== "thread.turn-start-requested") { - return event; - } - const payload = event.payload as Record; - if (!payload.providerOptions) { - return event; - } - return { - ...event, - payload: { - ...payload, - providerOptions: redactProviderStartOptions(payload.providerOptions as ProviderStartOptions), - }, - } as T; + return event; } diff --git a/apps/server/src/persistence/Layers/OrchestrationEventStore.test.ts b/apps/server/src/persistence/Layers/OrchestrationEventStore.test.ts index 4255f770abe..7b0f718d9cb 100644 --- a/apps/server/src/persistence/Layers/OrchestrationEventStore.test.ts +++ b/apps/server/src/persistence/Layers/OrchestrationEventStore.test.ts @@ -1,4 +1,4 @@ -import { CommandId, EventId, ProjectId } from "@t3tools/contracts"; +import { CommandId, EventId, ProjectId, ProviderInstanceId } from "@t3tools/contracts"; import { assert, it } from "@effect/vitest"; import { Effect, Layer, Schema, Stream } from "effect"; import * as SqlClient from "effect/unstable/sql/SqlClient"; @@ -185,7 +185,7 @@ layer("OrchestrationEventStore", (it) => { return; } assert.deepStrictEqual(replayedEvent.payload.modelSelection, { - provider: "geminiCli", + instanceId: ProviderInstanceId.make("geminiCli"), model: "gemini-2.5-pro", }); }), diff --git a/apps/server/src/persistence/Layers/ProjectionRepositories.test.ts b/apps/server/src/persistence/Layers/ProjectionRepositories.test.ts index d42be699458..fc5f7a5c155 100644 --- a/apps/server/src/persistence/Layers/ProjectionRepositories.test.ts +++ b/apps/server/src/persistence/Layers/ProjectionRepositories.test.ts @@ -1,4 +1,4 @@ -import { ProjectId, ThreadId } from "@t3tools/contracts"; +import { ProjectId, ThreadId, ProviderInstanceId } from "@t3tools/contracts"; import { assert, it } from "@effect/vitest"; import { Effect, Layer, Option } from "effect"; import * as SqlClient from "effect/unstable/sql/SqlClient"; @@ -28,7 +28,7 @@ projectionRepositoriesLayer("Projection repositories", (it) => { title: "Null options project", workspaceRoot: "/tmp/project-null-options", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.4", }, scripts: [], @@ -52,7 +52,7 @@ projectionRepositoriesLayer("Projection repositories", (it) => { assert.strictEqual( row.defaultModelSelection, JSON.stringify({ - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.4", }), ); @@ -61,7 +61,7 @@ projectionRepositoriesLayer("Projection repositories", (it) => { projectId: ProjectId.make("project-null-options"), }); assert.deepStrictEqual(Option.getOrNull(persisted)?.defaultModelSelection, { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.4", }); }), @@ -77,7 +77,7 @@ projectionRepositoriesLayer("Projection repositories", (it) => { projectId: ProjectId.make("project-null-options"), title: "Null options thread", modelSelection: { - provider: "claudeAgent", + instanceId: ProviderInstanceId.make("claudeAgent"), model: "claude-opus-4-6", }, runtimeMode: "full-access", @@ -110,7 +110,7 @@ projectionRepositoriesLayer("Projection repositories", (it) => { assert.strictEqual( row.modelSelection, JSON.stringify({ - provider: "claudeAgent", + instanceId: ProviderInstanceId.make("claudeAgent"), model: "claude-opus-4-6", }), ); @@ -119,7 +119,7 @@ projectionRepositoriesLayer("Projection repositories", (it) => { threadId: ThreadId.make("thread-null-options"), }); assert.deepStrictEqual(Option.getOrNull(persisted)?.modelSelection, { - provider: "claudeAgent", + instanceId: ProviderInstanceId.make("claudeAgent"), model: "claude-opus-4-6", }); }), diff --git a/apps/server/src/persistence/Layers/ProjectionThreadSessions.ts b/apps/server/src/persistence/Layers/ProjectionThreadSessions.ts index 2499eba1967..80d241436f8 100644 --- a/apps/server/src/persistence/Layers/ProjectionThreadSessions.ts +++ b/apps/server/src/persistence/Layers/ProjectionThreadSessions.ts @@ -23,6 +23,7 @@ const makeProjectionThreadSessionRepository = Effect.gen(function* () { thread_id, status, provider_name, + provider_instance_id, runtime_mode, active_turn_id, last_error, @@ -32,6 +33,7 @@ const makeProjectionThreadSessionRepository = Effect.gen(function* () { ${row.threadId}, ${row.status}, ${row.providerName}, + ${row.providerInstanceId}, ${row.runtimeMode}, ${row.activeTurnId}, ${row.lastError}, @@ -41,6 +43,7 @@ const makeProjectionThreadSessionRepository = Effect.gen(function* () { DO UPDATE SET status = excluded.status, provider_name = excluded.provider_name, + provider_instance_id = excluded.provider_instance_id, runtime_mode = excluded.runtime_mode, active_turn_id = excluded.active_turn_id, last_error = excluded.last_error, @@ -57,6 +60,7 @@ const makeProjectionThreadSessionRepository = Effect.gen(function* () { thread_id AS "threadId", status, provider_name AS "providerName", + provider_instance_id AS "providerInstanceId", runtime_mode AS "runtimeMode", active_turn_id AS "activeTurnId", last_error AS "lastError", diff --git a/apps/server/src/persistence/Layers/ProviderSessionRuntime.ts b/apps/server/src/persistence/Layers/ProviderSessionRuntime.ts index da3e8bce90a..778e0c6d2ee 100644 --- a/apps/server/src/persistence/Layers/ProviderSessionRuntime.ts +++ b/apps/server/src/persistence/Layers/ProviderSessionRuntime.ts @@ -46,6 +46,7 @@ const makeProviderSessionRuntimeRepository = Effect.gen(function* () { INSERT INTO provider_session_runtime ( thread_id, provider_name, + provider_instance_id, adapter_key, runtime_mode, status, @@ -56,6 +57,7 @@ const makeProviderSessionRuntimeRepository = Effect.gen(function* () { VALUES ( ${runtime.threadId}, ${runtime.providerName}, + ${runtime.providerInstanceId}, ${runtime.adapterKey}, ${runtime.runtimeMode}, ${runtime.status}, @@ -66,6 +68,7 @@ const makeProviderSessionRuntimeRepository = Effect.gen(function* () { ON CONFLICT (thread_id) DO UPDATE SET provider_name = excluded.provider_name, + provider_instance_id = excluded.provider_instance_id, adapter_key = excluded.adapter_key, runtime_mode = excluded.runtime_mode, status = excluded.status, @@ -83,6 +86,7 @@ const makeProviderSessionRuntimeRepository = Effect.gen(function* () { SELECT thread_id AS "threadId", provider_name AS "providerName", + provider_instance_id AS "providerInstanceId", adapter_key AS "adapterKey", runtime_mode AS "runtimeMode", status, @@ -102,6 +106,7 @@ const makeProviderSessionRuntimeRepository = Effect.gen(function* () { SELECT thread_id AS "threadId", provider_name AS "providerName", + provider_instance_id AS "providerInstanceId", adapter_key AS "adapterKey", runtime_mode AS "runtimeMode", status, diff --git a/apps/server/src/persistence/Migrations.ts b/apps/server/src/persistence/Migrations.ts index d6158dd60d3..39773d760f7 100644 --- a/apps/server/src/persistence/Migrations.ts +++ b/apps/server/src/persistence/Migrations.ts @@ -35,11 +35,21 @@ import Migration0019 from "./Migrations/019_ProjectionSnapshotLookupIndexes.ts"; import Migration0020 from "./Migrations/020_AuthAccessManagement.ts"; import Migration0021 from "./Migrations/021_AuthSessionClientMetadata.ts"; import Migration0022 from "./Migrations/022_AuthSessionLastConnectedAt.ts"; +// Fork-only migrations (filenames re-use 020/021 prefixes; IDs 23/24 keep them +// installed after the fork's first deployment). import Migration0023 from "./Migrations/020_NormalizeLegacyProviderKinds.ts"; import Migration0024 from "./Migrations/021_RepairProjectionThreadProposedPlanImplementationColumns.ts"; +// Upstream migrations renumbered to sit after the fork's 23/24. import Migration0025 from "./Migrations/023_ProjectionThreadShellSummary.ts"; import Migration0026 from "./Migrations/024_BackfillProjectionThreadShellSummary.ts"; import Migration0027 from "./Migrations/025_CleanupInvalidProjectionPendingApprovals.ts"; +import Migration0028 from "./Migrations/026_CanonicalizeModelSelectionOptions.ts"; +import Migration0029 from "./Migrations/027_ProviderSessionRuntimeInstanceId.ts"; +import Migration0030 from "./Migrations/028_ProjectionThreadSessionInstanceId.ts"; +// Fork-only migration: backfills `provider_instance_id` for the four fork +// drivers (amp/copilot/geminiCli/kilo) so existing rows resolve under the +// new instance-based routing model introduced by upstream PR #2277. +import Migration0031 from "./Migrations/029_BackfillForkProviderInstanceIds.ts"; /** * Migration loader with all migrations defined inline. @@ -79,6 +89,10 @@ export const migrationEntries = [ [25, "ProjectionThreadShellSummary", Migration0025], [26, "BackfillProjectionThreadShellSummary", Migration0026], [27, "CleanupInvalidProjectionPendingApprovals", Migration0027], + [28, "CanonicalizeModelSelectionOptions", Migration0028], + [29, "ProviderSessionRuntimeInstanceId", Migration0029], + [30, "ProjectionThreadSessionInstanceId", Migration0030], + [31, "BackfillForkProviderInstanceIds", Migration0031], ] as const; export const makeMigrationLoader = (throughId?: number) => diff --git a/apps/server/src/persistence/Migrations/026_CanonicalizeModelSelectionOptions.test.ts b/apps/server/src/persistence/Migrations/026_CanonicalizeModelSelectionOptions.test.ts new file mode 100644 index 00000000000..ffc42521c90 --- /dev/null +++ b/apps/server/src/persistence/Migrations/026_CanonicalizeModelSelectionOptions.test.ts @@ -0,0 +1,450 @@ +import { assert, it } from "@effect/vitest"; +import { Effect, Layer } from "effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +import { runMigrations } from "../Migrations.ts"; +import * as NodeSqliteClient from "../NodeSqliteClient.ts"; + +const layer = it.layer(Layer.mergeAll(NodeSqliteClient.layerMemory())); + +layer("026_CanonicalizeModelSelectionOptions", (it) => { + it.effect("converts legacy object-shape options into array-shape on projections and events", () => + Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + yield* runMigrations({ toMigrationInclusive: 25 }); + + yield* sql` + INSERT INTO projection_projects ( + project_id, + title, + workspace_root, + default_model_selection_json, + scripts_json, + created_at, + updated_at, + deleted_at + ) + VALUES + ( + 'project-legacy', + 'Legacy options project', + '/tmp/legacy', + '{"provider":"claudeAgent","model":"claude-opus-4-6","options":{"effort":"max","fastMode":true}}', + '[]', + '2026-01-01T00:00:00.000Z', + '2026-01-01T00:00:00.000Z', + NULL + ), + ( + 'project-no-options', + 'No options project', + '/tmp/no-options', + '{"provider":"codex","model":"gpt-5.4"}', + '[]', + '2026-01-01T00:00:00.000Z', + '2026-01-01T00:00:00.000Z', + NULL + ), + ( + 'project-null-selection', + 'Null model selection project', + '/tmp/null-selection', + NULL, + '[]', + '2026-01-01T00:00:00.000Z', + '2026-01-01T00:00:00.000Z', + NULL + ), + ( + 'project-already-array', + 'Already-canonical options project', + '/tmp/already-array', + '{"provider":"codex","model":"gpt-5.4","options":[{"id":"reasoningEffort","value":"high"}]}', + '[]', + '2026-01-01T00:00:00.000Z', + '2026-01-01T00:00:00.000Z', + NULL + ) + `; + + yield* sql` + INSERT INTO projection_threads ( + thread_id, + project_id, + title, + model_selection_json, + branch, + worktree_path, + latest_turn_id, + created_at, + updated_at, + archived_at, + latest_user_message_at, + pending_approval_count, + pending_user_input_count, + has_actionable_proposed_plan, + deleted_at, + runtime_mode, + interaction_mode + ) + VALUES + ( + 'thread-legacy', + 'project-legacy', + 'Legacy thread', + '{"provider":"claudeAgent","model":"claude-opus-4-6","options":{"effort":"max","thinking":false,"contextWindow":"1m"}}', + NULL, NULL, NULL, + '2026-01-01T00:00:00.000Z', + '2026-01-01T00:00:00.000Z', + NULL, NULL, 0, 0, 0, NULL, + 'full-access', 'default' + ), + ( + 'thread-empty-options', + 'project-legacy', + 'Empty options thread', + '{"provider":"codex","model":"gpt-5.4","options":{}}', + NULL, NULL, NULL, + '2026-01-01T00:00:00.000Z', + '2026-01-01T00:00:00.000Z', + NULL, NULL, 0, 0, 0, NULL, + 'full-access', 'default' + ), + ( + 'thread-drop-garbage', + 'project-legacy', + 'Thread with non-scalar entries', + '{"provider":"claudeAgent","model":"claude-opus-4-6","options":{"effort":"high","thinking":{"enabled":true,"budgetTokens":2000},"emptyStr":" ","nullish":null}}', + NULL, NULL, NULL, + '2026-01-01T00:00:00.000Z', + '2026-01-01T00:00:00.000Z', + NULL, NULL, 0, 0, 0, NULL, + 'full-access', 'default' + ), + ( + 'thread-no-options', + 'project-legacy', + 'No options thread', + '{"provider":"codex","model":"gpt-5.4"}', + NULL, NULL, NULL, + '2026-01-01T00:00:00.000Z', + '2026-01-01T00:00:00.000Z', + NULL, NULL, 0, 0, 0, NULL, + 'full-access', 'default' + ), + ( + 'thread-already-array', + 'project-legacy', + 'Already array thread', + '{"provider":"codex","model":"gpt-5.4","options":[{"id":"fastMode","value":true}]}', + NULL, NULL, NULL, + '2026-01-01T00:00:00.000Z', + '2026-01-01T00:00:00.000Z', + NULL, NULL, 0, 0, 0, NULL, + 'full-access', 'default' + ) + `; + + yield* sql` + INSERT INTO orchestration_events ( + event_id, + aggregate_kind, + stream_id, + stream_version, + event_type, + occurred_at, + command_id, + causation_event_id, + correlation_id, + actor_kind, + payload_json, + metadata_json + ) + VALUES + ( + 'event-project-created', + 'project', + 'project-legacy', + 1, + 'project.created', + '2026-01-01T00:00:00.000Z', + 'cmd-pc', + NULL, + 'corr-pc', + 'user', + '{"projectId":"project-legacy","title":"Project","workspaceRoot":"/tmp/legacy","defaultModelSelection":{"provider":"claudeAgent","model":"claude-opus-4-6","options":{"effort":"max","fastMode":true}},"scripts":[],"createdAt":"2026-01-01T00:00:00.000Z","updatedAt":"2026-01-01T00:00:00.000Z"}', + '{}' + ), + ( + 'event-project-meta-updated', + 'project', + 'project-legacy', + 2, + 'project.meta-updated', + '2026-01-01T00:00:00.000Z', + 'cmd-pmu', + NULL, + 'corr-pmu', + 'user', + '{"projectId":"project-legacy","defaultModelSelection":{"provider":"codex","model":"gpt-5.4","options":{"reasoningEffort":"low"}},"updatedAt":"2026-01-01T00:00:00.000Z"}', + '{}' + ), + ( + 'event-project-null-selection', + 'project', + 'project-legacy', + 3, + 'project.meta-updated', + '2026-01-01T00:00:00.000Z', + 'cmd-null', + NULL, + 'corr-null', + 'user', + '{"projectId":"project-legacy","defaultModelSelection":null,"updatedAt":"2026-01-01T00:00:00.000Z"}', + '{}' + ), + ( + 'event-thread-created', + 'thread', + 'thread-legacy', + 1, + 'thread.created', + '2026-01-01T00:00:00.000Z', + 'cmd-tc', + NULL, + 'corr-tc', + 'user', + '{"threadId":"thread-legacy","projectId":"project-legacy","title":"Thread","modelSelection":{"provider":"claudeAgent","model":"claude-opus-4-6","options":{"effort":"max","thinking":false}},"runtimeMode":"full-access","interactionMode":"default","branch":null,"worktreePath":null,"createdAt":"2026-01-01T00:00:00.000Z","updatedAt":"2026-01-01T00:00:00.000Z"}', + '{}' + ), + ( + 'event-thread-meta-updated', + 'thread', + 'thread-legacy', + 2, + 'thread.meta-updated', + '2026-01-01T00:00:00.000Z', + 'cmd-tmu', + NULL, + 'corr-tmu', + 'user', + '{"threadId":"thread-legacy","modelSelection":{"provider":"codex","model":"gpt-5.4","options":{"fastMode":true}},"updatedAt":"2026-01-01T00:00:00.000Z"}', + '{}' + ), + ( + 'event-thread-turn-start', + 'thread', + 'thread-legacy', + 3, + 'thread.turn-start-requested', + '2026-01-01T00:00:00.000Z', + 'cmd-tts', + NULL, + 'corr-tts', + 'user', + '{"threadId":"thread-legacy","messageId":"msg-1","modelSelection":{"provider":"claudeAgent","model":"claude-opus-4-6","options":{"effort":"high","contextWindow":"1m"}},"runtimeMode":"full-access","interactionMode":"default","createdAt":"2026-01-01T00:00:00.000Z"}', + '{}' + ), + ( + 'event-thread-already-array', + 'thread', + 'thread-legacy', + 4, + 'thread.created', + '2026-01-01T00:00:00.000Z', + 'cmd-taa', + NULL, + 'corr-taa', + 'user', + '{"threadId":"thread-already-array","projectId":"project-legacy","title":"Already Array","modelSelection":{"provider":"codex","model":"gpt-5.4","options":[{"id":"reasoningEffort","value":"medium"}]},"runtimeMode":"full-access","interactionMode":"default","branch":null,"worktreePath":null,"createdAt":"2026-01-01T00:00:00.000Z","updatedAt":"2026-01-01T00:00:00.000Z"}', + '{}' + ), + ( + 'event-activity-append', + 'thread', + 'thread-legacy', + 5, + 'thread.activity-appended', + '2026-01-01T00:00:00.000Z', + 'cmd-aa', + NULL, + 'corr-aa', + 'user', + '{"threadId":"thread-legacy","activity":{"id":"a","tone":"info","kind":"k","summary":"s","payload":null,"turnId":null,"createdAt":"2026-01-01T00:00:00.000Z"}}', + '{}' + ) + `; + + yield* runMigrations({ toMigrationInclusive: 26 }); + + // Projection projects + const projectRows = yield* sql<{ + readonly projectId: string; + readonly defaultModelSelection: string | null; + }>` + SELECT + project_id AS "projectId", + default_model_selection_json AS "defaultModelSelection" + FROM projection_projects + ORDER BY project_id + `; + assert.deepStrictEqual( + projectRows.map((row) => ({ + projectId: row.projectId, + selection: row.defaultModelSelection ? JSON.parse(row.defaultModelSelection) : null, + })), + [ + { + projectId: "project-already-array", + selection: { + provider: "codex", + model: "gpt-5.4", + options: [{ id: "reasoningEffort", value: "high" }], + }, + }, + { + projectId: "project-legacy", + selection: { + provider: "claudeAgent", + model: "claude-opus-4-6", + options: [ + { id: "effort", value: "max" }, + { id: "fastMode", value: true }, + ], + }, + }, + { + projectId: "project-no-options", + selection: { provider: "codex", model: "gpt-5.4" }, + }, + { projectId: "project-null-selection", selection: null }, + ], + ); + + // Projection threads + const threadRows = yield* sql<{ + readonly threadId: string; + readonly modelSelection: string | null; + }>` + SELECT + thread_id AS "threadId", + model_selection_json AS "modelSelection" + FROM projection_threads + ORDER BY thread_id + `; + assert.deepStrictEqual( + threadRows.map((row) => ({ + threadId: row.threadId, + selection: row.modelSelection ? JSON.parse(row.modelSelection) : null, + })), + [ + { + threadId: "thread-already-array", + selection: { + provider: "codex", + model: "gpt-5.4", + options: [{ id: "fastMode", value: true }], + }, + }, + { + threadId: "thread-drop-garbage", + selection: { + provider: "claudeAgent", + model: "claude-opus-4-6", + // Only the scalar string survives; nested object, whitespace + // string, and null are dropped. + options: [{ id: "effort", value: "high" }], + }, + }, + { + threadId: "thread-empty-options", + selection: { provider: "codex", model: "gpt-5.4", options: [] }, + }, + { + threadId: "thread-legacy", + selection: { + provider: "claudeAgent", + model: "claude-opus-4-6", + options: [ + { id: "effort", value: "max" }, + { id: "thinking", value: false }, + { id: "contextWindow", value: "1m" }, + ], + }, + }, + { + threadId: "thread-no-options", + selection: { provider: "codex", model: "gpt-5.4" }, + }, + ], + ); + + // Orchestration events + const eventRows = yield* sql<{ + readonly eventId: string; + readonly payloadJson: string; + }>` + SELECT event_id AS "eventId", payload_json AS "payloadJson" + FROM orchestration_events + ORDER BY event_id + `; + + const payloads = Object.fromEntries( + eventRows.map((row) => [row.eventId, JSON.parse(row.payloadJson)]), + ); + + assert.deepStrictEqual(payloads["event-project-created"].defaultModelSelection, { + provider: "claudeAgent", + model: "claude-opus-4-6", + options: [ + { id: "effort", value: "max" }, + { id: "fastMode", value: true }, + ], + }); + + assert.deepStrictEqual(payloads["event-project-meta-updated"].defaultModelSelection, { + provider: "codex", + model: "gpt-5.4", + options: [{ id: "reasoningEffort", value: "low" }], + }); + + assert.strictEqual(payloads["event-project-null-selection"].defaultModelSelection, null); + + assert.deepStrictEqual(payloads["event-thread-created"].modelSelection, { + provider: "claudeAgent", + model: "claude-opus-4-6", + options: [ + { id: "effort", value: "max" }, + { id: "thinking", value: false }, + ], + }); + + assert.deepStrictEqual(payloads["event-thread-meta-updated"].modelSelection, { + provider: "codex", + model: "gpt-5.4", + options: [{ id: "fastMode", value: true }], + }); + + assert.deepStrictEqual(payloads["event-thread-turn-start"].modelSelection, { + provider: "claudeAgent", + model: "claude-opus-4-6", + options: [ + { id: "effort", value: "high" }, + { id: "contextWindow", value: "1m" }, + ], + }); + + // Already-array records are left untouched. + assert.deepStrictEqual(payloads["event-thread-already-array"].modelSelection, { + provider: "codex", + model: "gpt-5.4", + options: [{ id: "reasoningEffort", value: "medium" }], + }); + + // Events with no modelSelection at all are untouched. + assert.isUndefined(payloads["event-activity-append"].modelSelection); + assert.isUndefined(payloads["event-activity-append"].defaultModelSelection); + }), + ); +}); diff --git a/apps/server/src/persistence/Migrations/026_CanonicalizeModelSelectionOptions.ts b/apps/server/src/persistence/Migrations/026_CanonicalizeModelSelectionOptions.ts new file mode 100644 index 00000000000..15c08debf64 --- /dev/null +++ b/apps/server/src/persistence/Migrations/026_CanonicalizeModelSelectionOptions.ts @@ -0,0 +1,138 @@ +import * as Effect from "effect/Effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +/** + * Canonicalize `modelSelection.options` / `defaultModelSelection.options` from + * the legacy object shape (`{ effort: "max", fastMode: true, ... }`) to the + * current array-of-selections shape (`[{ id: "effort", value: "max" }, ...]`). + * + * Migration 016 introduced `modelSelection` with `options` stored as a + * per-provider object. Later the schema was reshaped so that options are a + * generic `Array<{ id, value }>` of user-selected option entries. Stored rows + * from before the reshape still have the object shape and fail to decode. + * + * For each value in the legacy object: + * - string values are kept if non-empty after trim + * - boolean values are always kept (true | false) + * - any other value type (number, null, nested object/array) is dropped, + * matching the permissive client-side normalizer in composerDraftStore. + * + * Touched storage: + * - `projection_threads.model_selection_json.options` + * - `projection_projects.default_model_selection_json.options` + * - `orchestration_events.payload_json.$.modelSelection.options` + * (thread.created | thread.meta-updated | thread.turn-start-requested) + * - `orchestration_events.payload_json.$.defaultModelSelection.options` + * (project.created | project.meta-updated) + */ +export default Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + yield* sql` + UPDATE projection_threads + SET model_selection_json = json_set( + model_selection_json, + '$.options', + ( + SELECT json_group_array( + json_object( + 'id', key, + 'value', + CASE type + WHEN 'true' THEN json('true') + WHEN 'false' THEN json('false') + ELSE atom + END + ) + ) + FROM json_each(json_extract(model_selection_json, '$.options')) + WHERE (type = 'text' AND trim(coalesce(atom, '')) != '') + OR type IN ('true', 'false') + ) + ) + WHERE model_selection_json IS NOT NULL + AND json_type(model_selection_json, '$.options') = 'object' + `; + + yield* sql` + UPDATE projection_projects + SET default_model_selection_json = json_set( + default_model_selection_json, + '$.options', + ( + SELECT json_group_array( + json_object( + 'id', key, + 'value', + CASE type + WHEN 'true' THEN json('true') + WHEN 'false' THEN json('false') + ELSE atom + END + ) + ) + FROM json_each(json_extract(default_model_selection_json, '$.options')) + WHERE (type = 'text' AND trim(coalesce(atom, '')) != '') + OR type IN ('true', 'false') + ) + ) + WHERE default_model_selection_json IS NOT NULL + AND json_type(default_model_selection_json, '$.options') = 'object' + `; + + yield* sql` + UPDATE orchestration_events + SET payload_json = json_set( + payload_json, + '$.modelSelection.options', + ( + SELECT json_group_array( + json_object( + 'id', key, + 'value', + CASE type + WHEN 'true' THEN json('true') + WHEN 'false' THEN json('false') + ELSE atom + END + ) + ) + FROM json_each(json_extract(payload_json, '$.modelSelection.options')) + WHERE (type = 'text' AND trim(coalesce(atom, '')) != '') + OR type IN ('true', 'false') + ) + ) + WHERE event_type IN ( + 'thread.created', + 'thread.meta-updated', + 'thread.turn-start-requested' + ) + AND json_type(payload_json, '$.modelSelection.options') = 'object' + `; + + yield* sql` + UPDATE orchestration_events + SET payload_json = json_set( + payload_json, + '$.defaultModelSelection.options', + ( + SELECT json_group_array( + json_object( + 'id', key, + 'value', + CASE type + WHEN 'true' THEN json('true') + WHEN 'false' THEN json('false') + ELSE atom + END + ) + ) + FROM json_each(json_extract(payload_json, '$.defaultModelSelection.options')) + WHERE (type = 'text' AND trim(coalesce(atom, '')) != '') + OR type IN ('true', 'false') + ) + ) + WHERE event_type IN ('project.created', 'project.meta-updated') + AND json_type(payload_json, '$.defaultModelSelection.options') = 'object' + `; +}); diff --git a/apps/server/src/persistence/Migrations/027_028_ProviderInstanceIdColumns.test.ts b/apps/server/src/persistence/Migrations/027_028_ProviderInstanceIdColumns.test.ts new file mode 100644 index 00000000000..3233f5043af --- /dev/null +++ b/apps/server/src/persistence/Migrations/027_028_ProviderInstanceIdColumns.test.ts @@ -0,0 +1,74 @@ +import { assert, it } from "@effect/vitest"; +import { Effect, Layer } from "effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +import { runMigrations } from "../Migrations.ts"; +import * as NodeSqliteClient from "../NodeSqliteClient.ts"; + +const layer = it.layer(Layer.mergeAll(NodeSqliteClient.layerMemory())); + +layer("027_028_ProviderInstanceIdColumns", (it) => { + it.effect("continues when provider_session_runtime was partially migrated", () => + Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + yield* runMigrations({ toMigrationInclusive: 26 }); + yield* sql` + ALTER TABLE provider_session_runtime + ADD COLUMN provider_instance_id TEXT + `; + + yield* runMigrations({ toMigrationInclusive: 28 }); + + const migrations = yield* sql<{ + readonly migration_id: number; + readonly name: string; + }>` + SELECT migration_id, name + FROM effect_sql_migrations + WHERE migration_id IN (27, 28) + ORDER BY migration_id + `; + assert.deepStrictEqual(migrations, [ + { + migration_id: 27, + name: "ProviderSessionRuntimeInstanceId", + }, + { + migration_id: 28, + name: "ProjectionThreadSessionInstanceId", + }, + ]); + + const providerSessionColumns = yield* sql<{ readonly name: string }>` + PRAGMA table_info(provider_session_runtime) + `; + assert.ok(providerSessionColumns.some((column) => column.name === "provider_instance_id")); + + const projectionThreadSessionColumns = yield* sql<{ readonly name: string }>` + PRAGMA table_info(projection_thread_sessions) + `; + assert.ok( + projectionThreadSessionColumns.some((column) => column.name === "provider_instance_id"), + ); + + const providerSessionIndexes = yield* sql<{ readonly name: string }>` + PRAGMA index_list(provider_session_runtime) + `; + assert.ok( + providerSessionIndexes.some( + (index) => index.name === "idx_provider_session_runtime_instance", + ), + ); + + const projectionThreadSessionIndexes = yield* sql<{ readonly name: string }>` + PRAGMA index_list(projection_thread_sessions) + `; + assert.ok( + projectionThreadSessionIndexes.some( + (index) => index.name === "idx_projection_thread_sessions_instance", + ), + ); + }), + ); +}); diff --git a/apps/server/src/persistence/Migrations/027_ProviderSessionRuntimeInstanceId.ts b/apps/server/src/persistence/Migrations/027_ProviderSessionRuntimeInstanceId.ts new file mode 100644 index 00000000000..7ae0e55b13c --- /dev/null +++ b/apps/server/src/persistence/Migrations/027_ProviderSessionRuntimeInstanceId.ts @@ -0,0 +1,38 @@ +/** + * Adds the nullable `provider_instance_id` routing column to + * `provider_session_runtime`. + * + * Slice D of the provider-array refactor splits "driver kind" from + * "configured instance". Existing rows have only the driver name in + * `provider_name`; new rows additionally carry the user-defined instance + * routing key. The column remains nullable so legacy rows can still decode; + * the persistence boundary is responsible for materializing a concrete + * instance id before any hot routing path sees the binding. + * + * The column is nullable on purpose — backfilling it during the migration + * would require knowing which configured instance "owned" each historical + * session, and that mapping is ambiguous when the user later configures + * multiple instances of the same driver. Keeping that compatibility at the + * persistence boundary keeps the fallback out of active routing code. + */ +import * as SqlClient from "effect/unstable/sql/SqlClient"; +import * as Effect from "effect/Effect"; + +export default Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + const columns = yield* sql<{ readonly name: string }>` + PRAGMA table_info(provider_session_runtime) + `; + if (!columns.some((column) => column.name === "provider_instance_id")) { + yield* sql` + ALTER TABLE provider_session_runtime + ADD COLUMN provider_instance_id TEXT + `; + } + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_provider_session_runtime_instance + ON provider_session_runtime(provider_instance_id) + `; +}); diff --git a/apps/server/src/persistence/Migrations/028_ProjectionThreadSessionInstanceId.ts b/apps/server/src/persistence/Migrations/028_ProjectionThreadSessionInstanceId.ts new file mode 100644 index 00000000000..bc4ba98044f --- /dev/null +++ b/apps/server/src/persistence/Migrations/028_ProjectionThreadSessionInstanceId.ts @@ -0,0 +1,21 @@ +import * as SqlClient from "effect/unstable/sql/SqlClient"; +import * as Effect from "effect/Effect"; + +export default Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + const columns = yield* sql<{ readonly name: string }>` + PRAGMA table_info(projection_thread_sessions) + `; + if (!columns.some((column) => column.name === "provider_instance_id")) { + yield* sql` + ALTER TABLE projection_thread_sessions + ADD COLUMN provider_instance_id TEXT + `; + } + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_projection_thread_sessions_instance + ON projection_thread_sessions(provider_instance_id) + `; +}); diff --git a/apps/server/src/persistence/Migrations/029_BackfillForkProviderInstanceIds.test.ts b/apps/server/src/persistence/Migrations/029_BackfillForkProviderInstanceIds.test.ts new file mode 100644 index 00000000000..271459f4041 --- /dev/null +++ b/apps/server/src/persistence/Migrations/029_BackfillForkProviderInstanceIds.test.ts @@ -0,0 +1,383 @@ +import { assert, it } from "@effect/vitest"; +import { Effect, Layer } from "effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +import { runMigrations } from "../Migrations.ts"; +import * as NodeSqliteClient from "../NodeSqliteClient.ts"; + +const layer = it.layer(Layer.mergeAll(NodeSqliteClient.layerMemory())); + +layer("029_BackfillForkProviderInstanceIds", (it) => { + it.effect( + "backfills provider_instance_id for fork drivers across both routing tables", + () => + Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + // Run all migrations up to (and including) 028 so the + // `provider_instance_id` column exists but is left NULL for + // historical rows. + yield* runMigrations({ toMigrationInclusive: 28 }); + + // Seed a project + thread for each fork driver to satisfy the + // foreign-key relationships used by the projection tables. + const forkKinds = ["amp", "copilot", "geminiCli", "kilo"] as const; + for (const kind of forkKinds) { + yield* sql` + INSERT INTO projection_projects ( + project_id, + title, + workspace_root, + scripts_json, + created_at, + updated_at, + deleted_at, + default_model_selection_json + ) + VALUES ( + ${`project-${kind}`}, + ${`Project ${kind}`}, + ${`/tmp/project-${kind}`}, + '[]', + '2026-01-01T00:00:00.000Z', + '2026-01-01T00:00:00.000Z', + NULL, + NULL + ) + `; + + yield* sql` + INSERT INTO projection_threads ( + thread_id, + project_id, + title, + model_selection_json, + runtime_mode, + interaction_mode, + branch, + worktree_path, + latest_turn_id, + created_at, + updated_at, + archived_at, + deleted_at + ) + VALUES ( + ${`thread-${kind}`}, + ${`project-${kind}`}, + ${`Thread ${kind}`}, + NULL, + 'full-access', + 'default', + NULL, + NULL, + NULL, + '2026-01-01T00:00:00.000Z', + '2026-01-01T00:00:00.000Z', + NULL, + NULL + ) + `; + + yield* sql` + INSERT INTO projection_thread_sessions ( + thread_id, + status, + provider_name, + provider_session_id, + provider_thread_id, + active_turn_id, + last_error, + updated_at, + runtime_mode, + provider_instance_id + ) + VALUES ( + ${`thread-${kind}`}, + 'running', + ${kind}, + NULL, + NULL, + NULL, + NULL, + '2026-01-01T00:00:00.000Z', + 'full-access', + NULL + ) + `; + + yield* sql` + INSERT INTO provider_session_runtime ( + thread_id, + provider_name, + adapter_key, + runtime_mode, + status, + last_seen_at, + resume_cursor_json, + runtime_payload_json, + provider_instance_id + ) + VALUES ( + ${`thread-${kind}`}, + ${kind}, + ${kind}, + 'full-access', + 'running', + '2026-01-01T00:00:00.000Z', + NULL, + NULL, + NULL + ) + `; + } + + // Also seed a non-fork (upstream) driver row to verify the + // backfill leaves it untouched. + yield* sql` + INSERT INTO projection_projects ( + project_id, + title, + workspace_root, + scripts_json, + created_at, + updated_at, + deleted_at, + default_model_selection_json + ) + VALUES ( + 'project-claude', + 'Project claude', + '/tmp/project-claude', + '[]', + '2026-01-01T00:00:00.000Z', + '2026-01-01T00:00:00.000Z', + NULL, + NULL + ) + `; + yield* sql` + INSERT INTO projection_threads ( + thread_id, + project_id, + title, + model_selection_json, + runtime_mode, + interaction_mode, + branch, + worktree_path, + latest_turn_id, + created_at, + updated_at, + archived_at, + deleted_at + ) + VALUES ( + 'thread-claude', + 'project-claude', + 'Thread claude', + NULL, + 'full-access', + 'default', + NULL, + NULL, + NULL, + '2026-01-01T00:00:00.000Z', + '2026-01-01T00:00:00.000Z', + NULL, + NULL + ) + `; + yield* sql` + INSERT INTO projection_thread_sessions ( + thread_id, + status, + provider_name, + provider_session_id, + provider_thread_id, + active_turn_id, + last_error, + updated_at, + runtime_mode, + provider_instance_id + ) + VALUES ( + 'thread-claude', + 'running', + 'claudeAgent', + NULL, + NULL, + NULL, + NULL, + '2026-01-01T00:00:00.000Z', + 'full-access', + NULL + ) + `; + yield* sql` + INSERT INTO provider_session_runtime ( + thread_id, + provider_name, + adapter_key, + runtime_mode, + status, + last_seen_at, + resume_cursor_json, + runtime_payload_json, + provider_instance_id + ) + VALUES ( + 'thread-claude', + 'claudeAgent', + 'claudeAgent', + 'full-access', + 'running', + '2026-01-01T00:00:00.000Z', + NULL, + NULL, + NULL + ) + `; + + // Run migration 029. + yield* runMigrations({ toMigrationInclusive: 31 }); + + // Each fork row should now have its `provider_instance_id` set + // to the matching driver kind (the default instance id). + for (const kind of forkKinds) { + const sessionRows = yield* sql<{ readonly providerInstanceId: string }>` + SELECT provider_instance_id AS "providerInstanceId" + FROM projection_thread_sessions + WHERE thread_id = ${`thread-${kind}`} + `; + assert.deepStrictEqual(sessionRows, [{ providerInstanceId: kind }]); + + const runtimeRows = yield* sql<{ readonly providerInstanceId: string }>` + SELECT provider_instance_id AS "providerInstanceId" + FROM provider_session_runtime + WHERE thread_id = ${`thread-${kind}`} + `; + assert.deepStrictEqual(runtimeRows, [{ providerInstanceId: kind }]); + } + + // The upstream-driver row must be untouched (still NULL). + const claudeSession = yield* sql<{ + readonly providerInstanceId: string | null; + }>` + SELECT provider_instance_id AS "providerInstanceId" + FROM projection_thread_sessions + WHERE thread_id = 'thread-claude' + `; + assert.deepStrictEqual(claudeSession, [{ providerInstanceId: null }]); + + const claudeRuntime = yield* sql<{ + readonly providerInstanceId: string | null; + }>` + SELECT provider_instance_id AS "providerInstanceId" + FROM provider_session_runtime + WHERE thread_id = 'thread-claude' + `; + assert.deepStrictEqual(claudeRuntime, [{ providerInstanceId: null }]); + }), + ); + + it.effect("is idempotent — re-running does not overwrite already-set ids", () => + Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + yield* runMigrations({ toMigrationInclusive: 28 }); + + yield* sql` + INSERT INTO projection_projects ( + project_id, + title, + workspace_root, + scripts_json, + created_at, + updated_at, + deleted_at, + default_model_selection_json + ) + VALUES ( + 'project-amp-custom', + 'Project amp custom', + '/tmp/project-amp-custom', + '[]', + '2026-01-01T00:00:00.000Z', + '2026-01-01T00:00:00.000Z', + NULL, + NULL + ) + `; + yield* sql` + INSERT INTO projection_threads ( + thread_id, + project_id, + title, + model_selection_json, + runtime_mode, + interaction_mode, + branch, + worktree_path, + latest_turn_id, + created_at, + updated_at, + archived_at, + deleted_at + ) + VALUES ( + 'thread-amp-custom', + 'project-amp-custom', + 'Thread amp custom', + NULL, + 'full-access', + 'default', + NULL, + NULL, + NULL, + '2026-01-01T00:00:00.000Z', + '2026-01-01T00:00:00.000Z', + NULL, + NULL + ) + `; + // Pre-existing row already has a NON-DEFAULT instance id (e.g. user + // ran a future custom-instance migration before this one). + yield* sql` + INSERT INTO projection_thread_sessions ( + thread_id, + status, + provider_name, + provider_session_id, + provider_thread_id, + active_turn_id, + last_error, + updated_at, + runtime_mode, + provider_instance_id + ) + VALUES ( + 'thread-amp-custom', + 'running', + 'amp', + NULL, + NULL, + NULL, + NULL, + '2026-01-01T00:00:00.000Z', + 'full-access', + 'amp-custom' + ) + `; + + yield* runMigrations({ toMigrationInclusive: 31 }); + + const rows = yield* sql<{ readonly providerInstanceId: string }>` + SELECT provider_instance_id AS "providerInstanceId" + FROM projection_thread_sessions + WHERE thread_id = 'thread-amp-custom' + `; + assert.deepStrictEqual(rows, [{ providerInstanceId: "amp-custom" }]); + }), + ); +}); diff --git a/apps/server/src/persistence/Migrations/029_BackfillForkProviderInstanceIds.ts b/apps/server/src/persistence/Migrations/029_BackfillForkProviderInstanceIds.ts new file mode 100644 index 00000000000..255eb2ddf7b --- /dev/null +++ b/apps/server/src/persistence/Migrations/029_BackfillForkProviderInstanceIds.ts @@ -0,0 +1,45 @@ +/** + * Backfills `provider_instance_id` for fork-only provider drivers so existing + * rows resolve under the new instance-based routing model. + * + * Migrations 027/028 added the nullable `provider_instance_id` column to + * `provider_session_runtime` and `projection_thread_sessions` but + * intentionally left existing rows with `provider_instance_id IS NULL` — + * the upstream PR couldn't safely guess which configured instance owned + * each historical session for the four upstream drivers. + * + * For the fork's additional drivers (`amp`, `copilot`, `geminiCli`, `kilo`), + * the situation is simpler: every fork install runs at most one configured + * instance per driver kind, so we can safely backfill + * `provider_instance_id = ''` (the default instance id used + * by built-in single-instance drivers — see `defaultInstanceIdForDriver`) + * for any row whose legacy `provider_name` matches one of the fork driver + * kinds and whose `provider_instance_id` is still null. + * + * Idempotent — re-running the migration is a no-op on already-backfilled + * rows because every UPDATE is guarded by `provider_instance_id IS NULL`. + */ +import * as Effect from "effect/Effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +const FORK_DRIVER_KINDS = ["amp", "copilot", "geminiCli", "kilo"] as const; + +export default Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + for (const driverKind of FORK_DRIVER_KINDS) { + yield* sql` + UPDATE provider_session_runtime + SET provider_instance_id = ${driverKind} + WHERE provider_name = ${driverKind} + AND provider_instance_id IS NULL + `; + + yield* sql` + UPDATE projection_thread_sessions + SET provider_instance_id = ${driverKind} + WHERE provider_name = ${driverKind} + AND provider_instance_id IS NULL + `; + } +}); diff --git a/apps/server/src/persistence/Services/ProjectionThreadSessions.ts b/apps/server/src/persistence/Services/ProjectionThreadSessions.ts index fcd13f068da..5d2c3c87d1d 100644 --- a/apps/server/src/persistence/Services/ProjectionThreadSessions.ts +++ b/apps/server/src/persistence/Services/ProjectionThreadSessions.ts @@ -10,6 +10,7 @@ import { RuntimeMode, IsoDateTime, OrchestrationSessionStatus, + ProviderInstanceId, ThreadId, TurnId, } from "@t3tools/contracts"; @@ -22,6 +23,7 @@ export const ProjectionThreadSession = Schema.Struct({ threadId: ThreadId, status: OrchestrationSessionStatus, providerName: Schema.NullOr(Schema.String), + providerInstanceId: Schema.NullOr(ProviderInstanceId), runtimeMode: RuntimeMode, activeTurnId: Schema.NullOr(TurnId), lastError: Schema.NullOr(Schema.String), diff --git a/apps/server/src/persistence/Services/ProviderSessionRuntime.ts b/apps/server/src/persistence/Services/ProviderSessionRuntime.ts index bf8e658e8a6..6ba15b32d96 100644 --- a/apps/server/src/persistence/Services/ProviderSessionRuntime.ts +++ b/apps/server/src/persistence/Services/ProviderSessionRuntime.ts @@ -7,6 +7,7 @@ */ import { IsoDateTime, + ProviderInstanceId, ProviderSessionRuntimeStatus, RuntimeMode, ThreadId, @@ -19,6 +20,14 @@ import type { ProviderSessionRuntimeRepositoryError } from "../Errors.ts"; export const ProviderSessionRuntime = Schema.Struct({ threadId: ThreadId, providerName: Schema.String, + /** + * User-defined routing key for the configured provider instance that + * owns this session. Nullable only at the storage/migration boundary: + * rows persisted before the driver/instance split carry only + * `providerName`. Repository consumers must materialize a concrete + * instance id before routing. + */ + providerInstanceId: Schema.NullOr(ProviderInstanceId), adapterKey: Schema.String, runtimeMode: RuntimeMode, status: ProviderSessionRuntimeStatus, diff --git a/apps/server/src/project/Layers/RepositoryIdentityResolver.ts b/apps/server/src/project/Layers/RepositoryIdentityResolver.ts index 307123551bb..ae35c29ce21 100644 --- a/apps/server/src/project/Layers/RepositoryIdentityResolver.ts +++ b/apps/server/src/project/Layers/RepositoryIdentityResolver.ts @@ -1,6 +1,9 @@ import type { RepositoryIdentity } from "@t3tools/contracts"; import { Cache, Duration, Effect, Exit, Layer } from "effect"; -import { detectGitHostingProviderFromRemoteUrl, normalizeGitRemoteUrl } from "@t3tools/shared/git"; +import { + detectSourceControlProviderFromGitRemoteUrl, + normalizeGitRemoteUrl, +} from "@t3tools/shared/git"; import { runProcess } from "../../processRunner.ts"; import { @@ -45,7 +48,7 @@ function buildRepositoryIdentity(input: { readonly rootPath: string; }): RepositoryIdentity { const canonicalKey = normalizeGitRemoteUrl(input.remoteUrl); - const hostingProvider = detectGitHostingProviderFromRemoteUrl(input.remoteUrl); + const sourceControlProvider = detectSourceControlProviderFromGitRemoteUrl(input.remoteUrl); const repositoryPath = canonicalKey.split("/").slice(1).join("/"); const repositoryPathSegments = repositoryPath.split("/").filter((segment) => segment.length > 0); const [owner] = repositoryPathSegments; @@ -60,7 +63,7 @@ function buildRepositoryIdentity(input: { }, rootPath: input.rootPath, ...(repositoryPath ? { displayName: repositoryPath } : {}), - ...(hostingProvider ? { provider: hostingProvider.kind } : {}), + ...(sourceControlProvider ? { provider: sourceControlProvider.kind } : {}), ...(owner ? { owner } : {}), ...(repositoryName ? { name: repositoryName } : {}), }; diff --git a/apps/server/src/provider/Drivers/AmpDriver.ts b/apps/server/src/provider/Drivers/AmpDriver.ts new file mode 100644 index 00000000000..c0baed29d84 --- /dev/null +++ b/apps/server/src/provider/Drivers/AmpDriver.ts @@ -0,0 +1,133 @@ +/** + * AmpDriver — `ProviderDriver` for the Amp CLI runtime. + * + * Plain-value driver matching the Claude / OpenCode / Cursor pattern: the + * `create()` effect bundles a per-instance `snapshot`, `adapter`, and + * `textGeneration` triple, all scoped to the registry's lifecycle. Two + * concurrent Amp instances therefore have wholly independent + * `AmpServerManager` state and child processes. + * + * Amp's text-generation shape is the graceful "not supported" stub + * exported from `AmpTextGeneration` — the CLI doesn't currently expose a + * structured-output mode we can target. + * + * @module provider/Drivers/AmpDriver + */ +import { + GenericProviderSettings, + ProviderDriverKind, + type ServerProvider, +} from "@t3tools/contracts"; +import { Duration, Effect, FileSystem, Path, Schema, Stream } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; + +import { makeAmpTextGeneration } from "../../textGeneration/AmpTextGeneration.ts"; +import { ServerConfig } from "../../config.ts"; +import { ProviderDriverError } from "../Errors.ts"; +import { makeAmpAdapter } from "../Layers/AmpAdapter.ts"; +import { checkAmpStatus, makePendingAmpProvider } from "../Layers/AmpProvider.ts"; +import { ProviderEventLoggers } from "../Layers/ProviderEventLoggers.ts"; +import { makeManagedServerProvider } from "../makeManagedServerProvider.ts"; +import { + defaultProviderContinuationIdentity, + type ProviderDriver, + type ProviderInstance, +} from "../ProviderDriver.ts"; +import type { ServerProviderDraft } from "../providerSnapshot.ts"; +import { mergeProviderInstanceEnvironment } from "../ProviderInstanceEnvironment.ts"; + +const DRIVER_KIND = ProviderDriverKind.make("amp"); +const SNAPSHOT_REFRESH_INTERVAL = Duration.minutes(5); + +export type AmpDriverEnv = + | ChildProcessSpawner.ChildProcessSpawner + | FileSystem.FileSystem + | Path.Path + | ProviderEventLoggers + | ServerConfig; + +const withInstanceIdentity = + (input: { + readonly instanceId: ProviderInstance["instanceId"]; + readonly displayName: string | undefined; + readonly accentColor: string | undefined; + readonly continuationGroupKey: string; + }) => + (snapshot: ServerProviderDraft): ServerProvider => ({ + ...snapshot, + instanceId: input.instanceId, + driver: DRIVER_KIND, + ...(input.displayName ? { displayName: input.displayName } : {}), + ...(input.accentColor ? { accentColor: input.accentColor } : {}), + continuation: { groupKey: input.continuationGroupKey }, + }); + +export const AmpDriver: ProviderDriver = { + driverKind: DRIVER_KIND, + metadata: { + displayName: "Amp", + supportsMultipleInstances: true, + }, + configSchema: GenericProviderSettings, + defaultConfig: (): GenericProviderSettings => Schema.decodeSync(GenericProviderSettings)({}), + create: ({ instanceId, displayName, accentColor, environment, enabled, config }) => + Effect.gen(function* () { + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner; + const path = yield* Path.Path; + const processEnv = mergeProviderInstanceEnvironment(environment); + const continuationIdentity = defaultProviderContinuationIdentity({ + driverKind: DRIVER_KIND, + instanceId, + }); + const stampIdentity = withInstanceIdentity({ + instanceId, + displayName, + accentColor, + continuationGroupKey: continuationIdentity.continuationKey, + }); + const effectiveConfig = { ...config, enabled } satisfies GenericProviderSettings; + + const adapter = yield* makeAmpAdapter(effectiveConfig, { + instanceId, + environment: processEnv, + }); + const textGeneration = yield* makeAmpTextGeneration(effectiveConfig, processEnv); + + const checkProvider = checkAmpStatus(effectiveConfig, processEnv).pipe( + Effect.map(stampIdentity), + Effect.provideService(ChildProcessSpawner.ChildProcessSpawner, spawner), + Effect.provideService(Path.Path, path), + ); + + const snapshot = yield* makeManagedServerProvider({ + getSettings: Effect.succeed(effectiveConfig), + streamSettings: Stream.never, + haveSettingsChanged: () => false, + initialSnapshot: (settings) => stampIdentity(makePendingAmpProvider(settings)), + checkProvider, + refreshInterval: SNAPSHOT_REFRESH_INTERVAL, + }).pipe( + Effect.mapError( + (cause) => + new ProviderDriverError({ + driver: DRIVER_KIND, + instanceId, + detail: `Failed to build Amp snapshot: ${cause.message ?? String(cause)}`, + cause, + }), + ), + ); + + return { + instanceId, + driverKind: DRIVER_KIND, + continuationIdentity, + displayName, + accentColor, + enabled, + snapshot, + adapter, + textGeneration, + } satisfies ProviderInstance; + }), +}; diff --git a/apps/server/src/provider/Drivers/ClaudeDriver.ts b/apps/server/src/provider/Drivers/ClaudeDriver.ts new file mode 100644 index 00000000000..311f4958651 --- /dev/null +++ b/apps/server/src/provider/Drivers/ClaudeDriver.ts @@ -0,0 +1,157 @@ +/** + * ClaudeDriver — `ProviderDriver` for the Claude Agent SDK runtime. + * + * Mirrors `CodexDriver`: a plain value whose `create()` returns one + * `ProviderInstance` bundling `snapshot` / `adapter` / `textGeneration` + * closures captured over the per-instance `ClaudeSettings`. + * + * Unlike Codex, the Claude snapshot probe may invoke a secondary probe + * (`probeClaudeCapabilities`) to read Anthropic account + slash-command + * metadata. That probe is per-instance and keyed by binary + resolved HOME so + * two concurrent Claude instances don't cross-contaminate account metadata. + * + * @module provider/Drivers/ClaudeDriver + */ +import { ClaudeSettings, ProviderDriverKind, type ServerProvider } from "@t3tools/contracts"; +import { Cache, Duration, Effect, FileSystem, Path, Schema, Stream } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; + +import { makeClaudeTextGeneration } from "../../textGeneration/ClaudeTextGeneration.ts"; +import { ServerConfig } from "../../config.ts"; +import { ProviderDriverError } from "../Errors.ts"; +import { makeClaudeAdapter } from "../Layers/ClaudeAdapter.ts"; +import { + checkClaudeProviderStatus, + makePendingClaudeProvider, + probeClaudeCapabilities, +} from "../Layers/ClaudeProvider.ts"; +import { ProviderEventLoggers } from "../Layers/ProviderEventLoggers.ts"; +import { makeManagedServerProvider } from "../makeManagedServerProvider.ts"; +import { + defaultProviderContinuationIdentity, + type ProviderDriver, + type ProviderInstance, +} from "../ProviderDriver.ts"; +import type { ServerProviderDraft } from "../providerSnapshot.ts"; +import { mergeProviderInstanceEnvironment } from "../ProviderInstanceEnvironment.ts"; +import { makeClaudeCapabilitiesCacheKey, makeClaudeContinuationGroupKey } from "./ClaudeHome.ts"; + +const DRIVER_KIND = ProviderDriverKind.make("claudeAgent"); +const SNAPSHOT_REFRESH_INTERVAL = Duration.minutes(5); +const CAPABILITIES_PROBE_TTL = Duration.minutes(5); + +export type ClaudeDriverEnv = + | ChildProcessSpawner.ChildProcessSpawner + | FileSystem.FileSystem + | Path.Path + | ProviderEventLoggers + | ServerConfig; + +const withInstanceIdentity = + (input: { + readonly instanceId: ProviderInstance["instanceId"]; + readonly displayName: string | undefined; + readonly accentColor: string | undefined; + readonly continuationGroupKey: string; + }) => + (snapshot: ServerProviderDraft): ServerProvider => ({ + ...snapshot, + instanceId: input.instanceId, + driver: DRIVER_KIND, + ...(input.displayName ? { displayName: input.displayName } : {}), + ...(input.accentColor ? { accentColor: input.accentColor } : {}), + continuation: { groupKey: input.continuationGroupKey }, + }); + +export const ClaudeDriver: ProviderDriver = { + driverKind: DRIVER_KIND, + metadata: { + displayName: "Claude", + supportsMultipleInstances: true, + }, + configSchema: ClaudeSettings, + defaultConfig: (): ClaudeSettings => Schema.decodeSync(ClaudeSettings)({}), + create: ({ instanceId, displayName, accentColor, environment, enabled, config }) => + Effect.gen(function* () { + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner; + const path = yield* Path.Path; + const eventLoggers = yield* ProviderEventLoggers; + const processEnv = mergeProviderInstanceEnvironment(environment); + const fallbackContinuationIdentity = defaultProviderContinuationIdentity({ + driverKind: DRIVER_KIND, + instanceId, + }); + const effectiveConfig = { ...config, enabled } satisfies ClaudeSettings; + const continuationGroupKey = yield* makeClaudeContinuationGroupKey(effectiveConfig); + const stampIdentity = withInstanceIdentity({ + instanceId, + displayName, + accentColor, + continuationGroupKey, + }); + + const adapterOptions = { + instanceId, + environment: processEnv, + ...(eventLoggers.native ? { nativeEventLogger: eventLoggers.native } : {}), + }; + const adapter = yield* makeClaudeAdapter(effectiveConfig, adapterOptions); + const textGeneration = yield* makeClaudeTextGeneration(effectiveConfig, processEnv); + + // Per-instance capabilities cache: keyed on binary + resolved HOME so + // account-specific probes never share auth metadata across instances. + const capabilitiesProbeCache = yield* Cache.make({ + capacity: 1, + timeToLive: CAPABILITIES_PROBE_TTL, + lookup: () => + probeClaudeCapabilities(effectiveConfig, processEnv).pipe( + Effect.provideService(Path.Path, path), + ), + }); + const capabilitiesCacheKey = yield* makeClaudeCapabilitiesCacheKey(effectiveConfig); + + const checkProvider = checkClaudeProviderStatus( + effectiveConfig, + () => Cache.get(capabilitiesProbeCache, capabilitiesCacheKey), + processEnv, + ).pipe( + Effect.map(stampIdentity), + Effect.provideService(ChildProcessSpawner.ChildProcessSpawner, spawner), + Effect.provideService(Path.Path, path), + ); + + const snapshot = yield* makeManagedServerProvider({ + getSettings: Effect.succeed(effectiveConfig), + streamSettings: Stream.never, + haveSettingsChanged: () => false, + initialSnapshot: (settings) => stampIdentity(makePendingClaudeProvider(settings)), + checkProvider, + refreshInterval: SNAPSHOT_REFRESH_INTERVAL, + }).pipe( + Effect.mapError( + (cause) => + new ProviderDriverError({ + driver: DRIVER_KIND, + instanceId, + detail: `Failed to build Claude snapshot: ${cause.message ?? String(cause)}`, + cause, + }), + ), + ); + + return { + instanceId, + driverKind: DRIVER_KIND, + continuationIdentity: { + ...fallbackContinuationIdentity, + continuationKey: continuationGroupKey, + }, + displayName, + accentColor, + enabled, + snapshot, + adapter, + textGeneration, + } satisfies ProviderInstance; + }), +}; diff --git a/apps/server/src/provider/Drivers/ClaudeHome.test.ts b/apps/server/src/provider/Drivers/ClaudeHome.test.ts new file mode 100644 index 00000000000..84c9c331cc8 --- /dev/null +++ b/apps/server/src/provider/Drivers/ClaudeHome.test.ts @@ -0,0 +1,52 @@ +import * as NodeOS from "node:os"; + +import * as NodeServices from "@effect/platform-node/NodeServices"; +import { describe, expect, it } from "@effect/vitest"; +import { Effect, Path } from "effect"; + +import { + makeClaudeCapabilitiesCacheKey, + makeClaudeContinuationGroupKey, + makeClaudeEnvironment, + resolveClaudeHomePath, +} from "./ClaudeHome.ts"; + +it.layer(NodeServices.layer)("ClaudeHome", (it) => { + describe("Claude home resolution", () => { + it.effect("uses the process home when no Claude home override is configured", () => + Effect.gen(function* () { + const path = yield* Path.Path; + const resolved = path.resolve(NodeOS.homedir()); + + expect(yield* resolveClaudeHomePath({ homePath: "" })).toBe(resolved); + expect(yield* makeClaudeEnvironment({ homePath: "" })).toBe(process.env); + }), + ); + + it.effect("resolves configured Claude HOME and stamps continuation/cache keys with it", () => + Effect.gen(function* () { + const path = yield* Path.Path; + const homePath = "~/.claude-work"; + const resolved = path.resolve(NodeOS.homedir(), ".claude-work"); + + expect(yield* resolveClaudeHomePath({ homePath })).toBe(resolved); + expect((yield* makeClaudeEnvironment({ homePath })).HOME).toBe(resolved); + expect(yield* makeClaudeContinuationGroupKey({ homePath })).toBe(`claude:home:${resolved}`); + expect(yield* makeClaudeCapabilitiesCacheKey({ binaryPath: "claude", homePath })).toBe( + `claude\0${resolved}`, + ); + }), + ); + + it.effect("keeps continuation compatible across instances with the same Claude HOME", () => + Effect.gen(function* () { + const path = yield* Path.Path; + const resolved = path.resolve(NodeOS.homedir()); + + expect(yield* makeClaudeContinuationGroupKey({ homePath: "" })).toBe( + `claude:home:${resolved}`, + ); + }), + ); + }); +}); diff --git a/apps/server/src/provider/Drivers/ClaudeHome.ts b/apps/server/src/provider/Drivers/ClaudeHome.ts new file mode 100644 index 00000000000..c959096677e --- /dev/null +++ b/apps/server/src/provider/Drivers/ClaudeHome.ts @@ -0,0 +1,43 @@ +import * as NodeOS from "node:os"; + +import type { ClaudeSettings } from "@t3tools/contracts"; +import { Effect, Path } from "effect"; + +import { expandHomePath } from "../../pathExpansion.ts"; + +export const resolveClaudeHomePath = Effect.fn("resolveClaudeHomePath")(function* ( + config: Pick, +): Effect.fn.Return { + const path = yield* Path.Path; + const homePath = config.homePath.trim(); + return path.resolve(homePath.length > 0 ? expandHomePath(homePath) : NodeOS.homedir()); +}); + +export const makeClaudeEnvironment = Effect.fn("makeClaudeEnvironment")(function* ( + config: Pick, + baseEnv: NodeJS.ProcessEnv = process.env, +): Effect.fn.Return { + const homePath = config.homePath.trim(); + if (homePath.length === 0) return baseEnv; + const resolvedHomePath = yield* resolveClaudeHomePath(config); + return { + ...baseEnv, + HOME: resolvedHomePath, + }; +}); + +export const makeClaudeContinuationGroupKey = Effect.fn("makeClaudeContinuationGroupKey")( + function* (config: Pick): Effect.fn.Return { + const resolvedHomePath = yield* resolveClaudeHomePath(config); + return `claude:home:${resolvedHomePath}`; + }, +); + +export const makeClaudeCapabilitiesCacheKey = Effect.fn("makeClaudeCapabilitiesCacheKey")( + function* ( + config: Pick, + ): Effect.fn.Return { + const resolvedHomePath = yield* resolveClaudeHomePath(config); + return `${config.binaryPath}\0${resolvedHomePath}`; + }, +); diff --git a/apps/server/src/provider/Drivers/CodexDriver.ts b/apps/server/src/provider/Drivers/CodexDriver.ts new file mode 100644 index 00000000000..26fffd5e213 --- /dev/null +++ b/apps/server/src/provider/Drivers/CodexDriver.ts @@ -0,0 +1,171 @@ +/** + * CodexDriver — first concrete `ProviderDriver` in the new per-instance model. + * + * A driver is a plain value (not a Context.Service) whose `create()` returns + * one `ProviderInstance` bundling: + * - `snapshot` — the live `ServerProviderShape` for this instance; + * - `adapter` — the Codex session/turn/approval runtime; + * - `textGeneration` — commit/PR/branch/title generation via `codex exec`. + * + * Each call to `create()` captures the `codexConfig` argument in closures + * owned by the returned instance. Two instances created with different + * `homePath`s (e.g. `codex_personal` + `codex_work`) therefore run with + * fully independent Codex app-server processes and `CODEX_HOME` + * environments — no shared mutable state. + * + * Resource lifecycle: `create()` runs in a scope handed in by the registry. + * Closing that scope releases the adapter's child processes, the managed + * snapshot's refresh fibre, and the text-generation binaries' transient + * scratch files. The registry uses this to tear down an instance when its + * `providerInstances` entry disappears or its config changes. + * + * @module provider/Drivers/CodexDriver + */ +import { CodexSettings, ProviderDriverKind, type ServerProvider } from "@t3tools/contracts"; +import { Duration, Effect, FileSystem, Path, Schema, Stream } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; + +import { makeCodexTextGeneration } from "../../textGeneration/CodexTextGeneration.ts"; +import { ServerConfig } from "../../config.ts"; +import { ProviderDriverError } from "../Errors.ts"; +import { makeCodexAdapter } from "../Layers/CodexAdapter.ts"; +import { checkCodexProviderStatus, makePendingCodexProvider } from "../Layers/CodexProvider.ts"; +import { ProviderEventLoggers } from "../Layers/ProviderEventLoggers.ts"; +import { makeManagedServerProvider } from "../makeManagedServerProvider.ts"; +import type { ProviderDriver, ProviderInstance } from "../ProviderDriver.ts"; +import type { ServerProviderDraft } from "../providerSnapshot.ts"; +import { mergeProviderInstanceEnvironment } from "../ProviderInstanceEnvironment.ts"; +import { + codexContinuationIdentity, + materializeCodexShadowHome, + resolveCodexHomeLayout, +} from "./CodexHomeLayout.ts"; + +const DRIVER_KIND = ProviderDriverKind.make("codex"); +const SNAPSHOT_REFRESH_INTERVAL = Duration.minutes(5); + +/** + * Services the driver needs to materialize an instance. Surfaced as the + * driver's `R` so the registry layer aggregates these across every + * registered driver and the runtime satisfies them once. + */ +export type CodexDriverEnv = + | ChildProcessSpawner.ChildProcessSpawner + | FileSystem.FileSystem + | Path.Path + | ProviderEventLoggers + | ServerConfig; + +/** + * Stamp instance identity onto a `ServerProvider` snapshot produced by the + * driver-kind-only codex helpers. Once `buildServerProvider` in + * `providerSnapshot.ts` is widened to accept `instanceId`/`driver`, this + * wrapper disappears. + */ +const withInstanceIdentity = + (input: { + readonly instanceId: ProviderInstance["instanceId"]; + readonly displayName: string | undefined; + readonly accentColor: string | undefined; + readonly continuationGroupKey: string; + }) => + (snapshot: ServerProviderDraft): ServerProvider => ({ + ...snapshot, + instanceId: input.instanceId, + driver: DRIVER_KIND, + ...(input.displayName ? { displayName: input.displayName } : {}), + ...(input.accentColor ? { accentColor: input.accentColor } : {}), + continuation: { groupKey: input.continuationGroupKey }, + }); + +export const CodexDriver: ProviderDriver = { + driverKind: DRIVER_KIND, + metadata: { + displayName: "Codex", + supportsMultipleInstances: true, + }, + configSchema: CodexSettings, + defaultConfig: (): CodexSettings => Schema.decodeSync(CodexSettings)({}), + create: ({ instanceId, displayName, accentColor, environment, enabled, config }) => + Effect.gen(function* () { + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner; + const eventLoggers = yield* ProviderEventLoggers; + const processEnv = mergeProviderInstanceEnvironment(environment); + const homeLayout = yield* resolveCodexHomeLayout(config); + const continuationIdentity = codexContinuationIdentity(homeLayout); + const stampIdentity = withInstanceIdentity({ + instanceId, + displayName, + accentColor, + continuationGroupKey: continuationIdentity.continuationKey, + }); + yield* materializeCodexShadowHome(homeLayout).pipe( + Effect.mapError( + (cause) => + new ProviderDriverError({ + driver: DRIVER_KIND, + instanceId, + detail: cause.message, + cause, + }), + ), + ); + const effectiveConfig = { + ...config, + enabled, + homePath: homeLayout.effectiveHomePath ?? "", + } satisfies CodexSettings; + + // `makeCodexAdapter` and `makeCodexTextGeneration` have `never` error + // channels at construction time — their failure modes are all on the + // per-operation closures they return. No `mapError` wrapper is needed + // here; the registry only has to worry about snapshot-build and + // spawner-availability failures surfaced from `checkCodexProviderStatus` + // below. + const adapter = yield* makeCodexAdapter(effectiveConfig, { + instanceId, + environment: processEnv, + ...(eventLoggers.native ? { nativeEventLogger: eventLoggers.native } : {}), + }); + const textGeneration = yield* makeCodexTextGeneration(effectiveConfig, processEnv); + + // Build a managed snapshot whose settings never change — mutations come + // in as instance rebuilds from the registry rather than in-place + // updates. Pre-provide `ChildProcessSpawner` so the check fits + // `makeManagedServerProvider.checkProvider`'s `R = never`. + const checkProvider = checkCodexProviderStatus(effectiveConfig, undefined, processEnv).pipe( + Effect.map(stampIdentity), + Effect.provideService(ChildProcessSpawner.ChildProcessSpawner, spawner), + ); + const snapshot = yield* makeManagedServerProvider({ + getSettings: Effect.succeed(effectiveConfig), + streamSettings: Stream.never, + haveSettingsChanged: () => false, + initialSnapshot: (settings) => stampIdentity(makePendingCodexProvider(settings)), + checkProvider, + refreshInterval: SNAPSHOT_REFRESH_INTERVAL, + }).pipe( + Effect.mapError( + (cause) => + new ProviderDriverError({ + driver: DRIVER_KIND, + instanceId, + detail: `Failed to build Codex snapshot: ${cause.message ?? String(cause)}`, + cause, + }), + ), + ); + + return { + instanceId, + driverKind: DRIVER_KIND, + continuationIdentity, + displayName, + accentColor, + enabled, + snapshot, + adapter, + textGeneration, + } satisfies ProviderInstance; + }), +}; diff --git a/apps/server/src/provider/Drivers/CodexHomeLayout.test.ts b/apps/server/src/provider/Drivers/CodexHomeLayout.test.ts new file mode 100644 index 00000000000..b84a43c4032 --- /dev/null +++ b/apps/server/src/provider/Drivers/CodexHomeLayout.test.ts @@ -0,0 +1,209 @@ +import * as NodeServices from "@effect/platform-node/NodeServices"; +import { describe, expect, it } from "@effect/vitest"; +import { Effect, FileSystem, Path, Schema } from "effect"; + +import { CodexSettings } from "@t3tools/contracts"; +import { + CodexShadowHomeError, + materializeCodexShadowHome, + resolveCodexHomeLayout, +} from "./CodexHomeLayout.ts"; + +const decodeCodexSettings = (input: { + readonly enabled?: boolean; + readonly homePath?: string; + readonly shadowHomePath?: string; + readonly customModels?: readonly string[]; + readonly binaryPath?: string; +}): CodexSettings => Schema.decodeSync(CodexSettings)(input); + +const makeTempDir = Effect.fn("CodexHomeLayout.test.makeTempDir")(function* (prefix: string) { + const fileSystem = yield* FileSystem.FileSystem; + return yield* fileSystem.makeTempDirectoryScoped({ prefix }); +}); + +const writeTextFile = Effect.fn("CodexHomeLayout.test.writeTextFile")(function* ( + filePath: string, + contents: string, +) { + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + yield* fileSystem.makeDirectory(path.dirname(filePath), { recursive: true }); + yield* fileSystem.writeFileString(filePath, contents); +}); + +it.layer(NodeServices.layer)("CodexHomeLayout", (it) => { + describe("resolveCodexHomeLayout", () => { + it.effect("uses direct CODEX_HOME when no shadow home is configured", () => + Effect.gen(function* () { + const homePath = yield* makeTempDir("t3code-codex-home-"); + + const layout = yield* resolveCodexHomeLayout( + decodeCodexSettings({ + homePath, + }), + ); + + expect(layout).toMatchObject({ + mode: "direct", + sharedHomePath: homePath, + effectiveHomePath: homePath, + continuationKey: `codex:home:${homePath}`, + }); + }), + ); + + it.effect("uses the shared home for continuation and the shadow home for runtime", () => + Effect.gen(function* () { + const path = yield* Path.Path; + const sharedHome = yield* makeTempDir("t3code-codex-shared-"); + const shadowRoot = yield* makeTempDir("t3code-codex-shadow-root-"); + const shadowHome = path.join(shadowRoot, "shadow"); + + const layout = yield* resolveCodexHomeLayout( + decodeCodexSettings({ + homePath: sharedHome, + shadowHomePath: shadowHome, + }), + ); + + expect(layout).toMatchObject({ + mode: "authOverlay", + sharedHomePath: sharedHome, + effectiveHomePath: shadowHome, + continuationKey: `codex:home:${sharedHome}`, + }); + }), + ); + }); + + describe("materializeCodexShadowHome", () => { + it.effect("materializes a shadow home with shared state links and private auth", () => + Effect.gen(function* () { + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const sharedHome = yield* makeTempDir("t3code-codex-shared-"); + const shadowRoot = yield* makeTempDir("t3code-codex-shadow-root-"); + const shadowHome = path.join(shadowRoot, "shadow"); + + yield* fileSystem.makeDirectory(path.join(sharedHome, "sessions")); + yield* writeTextFile(path.join(sharedHome, "config.toml"), 'model = "gpt-5-codex"\n'); + yield* writeTextFile(path.join(sharedHome, "models_cache.json"), '{"models":["shared"]}\n'); + yield* writeTextFile(path.join(sharedHome, "auth.json"), '{"shared":true}\n'); + yield* fileSystem.makeDirectory(shadowHome, { recursive: true }); + yield* writeTextFile(path.join(shadowHome, "auth.json"), '{"shadow":true}\n'); + yield* fileSystem.symlink( + path.join(sharedHome, "models_cache.json"), + path.join(shadowHome, "models_cache.json"), + ); + + const layout = yield* resolveCodexHomeLayout( + decodeCodexSettings({ + homePath: sharedHome, + shadowHomePath: shadowHome, + }), + ); + + yield* materializeCodexShadowHome(layout); + + const sessionsTarget = yield* fileSystem.readLink(path.join(shadowHome, "sessions")); + const configTarget = yield* fileSystem.readLink(path.join(shadowHome, "config.toml")); + const modelsCacheExists = yield* fileSystem.exists( + path.join(shadowHome, "models_cache.json"), + ); + const authLinkResult = yield* fileSystem + .readLink(path.join(shadowHome, "auth.json")) + .pipe(Effect.result); + const authContents = yield* fileSystem.readFileString(path.join(shadowHome, "auth.json")); + + expect(sessionsTarget).toBe(path.join(sharedHome, "sessions")); + expect(configTarget).toBe(path.join(sharedHome, "config.toml")); + expect(modelsCacheExists).toBe(false); + expect(authLinkResult._tag).toBe("Failure"); + expect(authContents).toContain("shadow"); + }), + ); + + it.effect("accepts Codex-created shadow-local runtime directories", () => + Effect.gen(function* () { + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const sharedHome = yield* makeTempDir("t3code-codex-shared-"); + const shadowRoot = yield* makeTempDir("t3code-codex-shadow-root-"); + const shadowHome = path.join(shadowRoot, "shadow"); + + yield* fileSystem.makeDirectory(path.join(sharedHome, "log")); + yield* fileSystem.makeDirectory(path.join(sharedHome, "memories")); + yield* fileSystem.makeDirectory(path.join(sharedHome, "tmp")); + yield* writeTextFile(path.join(sharedHome, "config.toml"), 'model = "gpt-5-codex"\n'); + yield* writeTextFile(path.join(shadowHome, "auth.json"), '{"shadow":true}\n'); + yield* fileSystem.makeDirectory(path.join(shadowHome, "log"), { recursive: true }); + yield* fileSystem.makeDirectory(path.join(shadowHome, "memories"), { recursive: true }); + yield* fileSystem.makeDirectory(path.join(shadowHome, "tmp"), { recursive: true }); + + const layout = yield* resolveCodexHomeLayout( + decodeCodexSettings({ + homePath: sharedHome, + shadowHomePath: shadowHome, + }), + ); + + yield* materializeCodexShadowHome(layout); + + const configTarget = yield* fileSystem.readLink(path.join(shadowHome, "config.toml")); + const logLinkResult = yield* fileSystem + .readLink(path.join(shadowHome, "log")) + .pipe(Effect.result); + const memoriesLinkResult = yield* fileSystem + .readLink(path.join(shadowHome, "memories")) + .pipe(Effect.result); + const tmpLinkResult = yield* fileSystem + .readLink(path.join(shadowHome, "tmp")) + .pipe(Effect.result); + + expect(configTarget).toBe(path.join(sharedHome, "config.toml")); + expect(logLinkResult._tag).toBe("Failure"); + expect(memoriesLinkResult._tag).toBe("Failure"); + expect(tmpLinkResult._tag).toBe("Failure"); + }), + ); + + it.effect("rejects shadow homes that point at the shared home", () => + Effect.gen(function* () { + const sharedHome = yield* makeTempDir("t3code-codex-shared-"); + const layout = yield* resolveCodexHomeLayout( + decodeCodexSettings({ + homePath: sharedHome, + shadowHomePath: sharedHome, + }), + ); + + const error = yield* materializeCodexShadowHome(layout).pipe(Effect.flip); + + expect(error).toBeInstanceOf(CodexShadowHomeError); + }), + ); + + it.effect("rejects shared entries that already exist in the shadow home as real files", () => + Effect.gen(function* () { + const path = yield* Path.Path; + const sharedHome = yield* makeTempDir("t3code-codex-shared-"); + const shadowRoot = yield* makeTempDir("t3code-codex-shadow-root-"); + const shadowHome = path.join(shadowRoot, "shadow"); + yield* writeTextFile(path.join(sharedHome, "config.toml"), 'model = "gpt-5-codex"\n'); + yield* writeTextFile(path.join(shadowHome, "config.toml"), 'model = "local"\n'); + + const layout = yield* resolveCodexHomeLayout( + decodeCodexSettings({ + homePath: sharedHome, + shadowHomePath: shadowHome, + }), + ); + + const error = yield* materializeCodexShadowHome(layout).pipe(Effect.flip); + + expect(error.detail).toContain("already exists and is not a symlink"); + }), + ); + }); +}); diff --git a/apps/server/src/provider/Drivers/CodexHomeLayout.ts b/apps/server/src/provider/Drivers/CodexHomeLayout.ts new file mode 100644 index 00000000000..0b6cd6b8918 --- /dev/null +++ b/apps/server/src/provider/Drivers/CodexHomeLayout.ts @@ -0,0 +1,263 @@ +import * as NodeOS from "node:os"; + +import { ProviderDriverKind, type CodexSettings } from "@t3tools/contracts"; +import { Effect, FileSystem, Path, Schema } from "effect"; +import * as PlatformError from "effect/PlatformError"; + +import { expandHomePath } from "../../pathExpansion.ts"; + +export interface CodexHomeLayout { + readonly mode: "direct" | "authOverlay"; + readonly sharedHomePath: string; + readonly effectiveHomePath: string | undefined; + readonly continuationKey: string; +} + +const KNOWN_SHARED_DIRECTORIES = [ + "sessions", + "archived_sessions", + "sqlite", + "shell_snapshots", + "worktrees", + "skills", + "plugins", + "cache", + "logs", +] as const; + +const PRIVATE_ENTRY_NAMES = new Set(["auth.json", "models_cache.json"]); +const SHADOW_LOCAL_ENTRY_NAMES = new Set(["log", "memories", "tmp"]); + +function resolveHomePath(path: Path.Path, value: string | undefined): string { + const expanded = + value && value.trim().length > 0 + ? expandHomePath(value) + : path.join(NodeOS.homedir(), ".codex"); + return path.resolve(expanded); +} + +export const resolveCodexHomeLayout = Effect.fn("resolveCodexHomeLayout")(function* ( + config: CodexSettings, +): Effect.fn.Return { + const path = yield* Path.Path; + const sharedHomePath = resolveHomePath(path, config.homePath); + const shadowHomePath = config.shadowHomePath.trim(); + if (shadowHomePath.length === 0) { + return { + mode: "direct", + sharedHomePath, + effectiveHomePath: config.homePath.trim().length > 0 ? sharedHomePath : undefined, + continuationKey: `codex:home:${sharedHomePath}`, + }; + } + + const effectiveHomePath = path.resolve(expandHomePath(shadowHomePath)); + return { + mode: "authOverlay", + sharedHomePath, + effectiveHomePath, + continuationKey: `codex:home:${sharedHomePath}`, + }; +}); + +export class CodexShadowHomeError extends Schema.TaggedErrorClass()( + "CodexShadowHomeError", + { + detail: Schema.String, + cause: Schema.optional(Schema.Unknown), + }, +) { + override get message(): string { + return this.detail; + } +} + +type LinkState = + | { + readonly _tag: "Missing"; + } + | { + readonly _tag: "NotSymlink"; + } + | { + readonly _tag: "Symlink"; + readonly target: string; + }; + +function toShadowHomeError(cause: unknown): CodexShadowHomeError { + return Schema.is(CodexShadowHomeError)(cause) + ? cause + : new CodexShadowHomeError({ + detail: "Failed to materialize Codex shadow home.", + cause, + }); +} + +function normalizeShadowHomeError( + effect: Effect.Effect, +): Effect.Effect { + return effect.pipe(Effect.mapError(toShadowHomeError)); +} + +function isNotSymlinkError(error: PlatformError.PlatformError): boolean { + const cause = error.reason.cause; + return ( + error.reason._tag === "Unknown" && + typeof cause === "object" && + cause !== null && + "code" in cause && + cause.code === "EINVAL" + ); +} + +const readLinkState = Effect.fn("CodexHomeLayout.readLinkState")(function* ( + fileSystem: FileSystem.FileSystem, + linkPath: string, +): Effect.fn.Return { + return yield* fileSystem.readLink(linkPath).pipe( + Effect.map((target): LinkState => ({ _tag: "Symlink", target })), + Effect.catch((error) => { + if (error.reason._tag === "NotFound") { + return Effect.succeed({ _tag: "Missing" }); + } + if (isNotSymlinkError(error)) { + return Effect.succeed({ _tag: "NotSymlink" }); + } + return Effect.fail(toShadowHomeError(error)); + }), + ); +}); + +const removePrivateSymlink = Effect.fn("CodexHomeLayout.removePrivateSymlink")(function* (input: { + readonly fileSystem: FileSystem.FileSystem; + readonly shadowPath: string; + readonly entryName: string; +}): Effect.fn.Return { + const path = yield* Path.Path; + const privatePath = path.join(input.shadowPath, input.entryName); + const state = yield* readLinkState(input.fileSystem, privatePath); + if (state._tag === "Symlink") { + yield* normalizeShadowHomeError(input.fileSystem.remove(privatePath)); + } +}); + +const ensureSymlink = Effect.fn("CodexHomeLayout.ensureSymlink")(function* (input: { + readonly fileSystem: FileSystem.FileSystem; + readonly shadowPath: string; + readonly sharedPath: string; + readonly entryName: string; +}): Effect.fn.Return { + const path = yield* Path.Path; + const target = path.join(input.sharedPath, input.entryName); + const link = path.join(input.shadowPath, input.entryName); + const state = yield* readLinkState(input.fileSystem, link); + + if (state._tag === "NotSymlink") { + return yield* new CodexShadowHomeError({ + detail: `Cannot create Codex shadow home because '${link}' already exists and is not a symlink.`, + }); + } + + if (state._tag === "Missing") { + return yield* normalizeShadowHomeError(input.fileSystem.symlink(target, link)); + } + + const resolvedExisting = path.resolve(path.dirname(link), state.target); + if (resolvedExisting !== target) { + yield* normalizeShadowHomeError(input.fileSystem.remove(link)); + yield* normalizeShadowHomeError(input.fileSystem.symlink(target, link)); + } +}); + +const ensureShadowAuthIsPrivate = Effect.fn("CodexHomeLayout.ensureShadowAuthIsPrivate")(function* ( + fileSystem: FileSystem.FileSystem, + shadowPath: string, +): Effect.fn.Return { + const path = yield* Path.Path; + const authPath = path.join(shadowPath, "auth.json"); + const state = yield* readLinkState(fileSystem, authPath); + if (state._tag === "Symlink") { + return yield* new CodexShadowHomeError({ + detail: `Codex shadow auth file '${authPath}' must be a real file, not a symlink.`, + }); + } +}); + +export const materializeCodexShadowHome = Effect.fn("materializeCodexShadowHome")(function* ( + layout: CodexHomeLayout, +) { + if (layout.mode !== "authOverlay") return; + const effectiveHomePath = layout.effectiveHomePath; + if (!effectiveHomePath) return; + if (layout.sharedHomePath === effectiveHomePath) { + return yield* new CodexShadowHomeError({ + detail: "Codex shadow home path must be different from the shared home path.", + }); + } + + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + + yield* normalizeShadowHomeError( + Effect.all( + [ + fileSystem.makeDirectory(layout.sharedHomePath, { recursive: true }), + fileSystem.makeDirectory(effectiveHomePath, { recursive: true }), + ...KNOWN_SHARED_DIRECTORIES.map((directory) => + fileSystem.makeDirectory(path.join(layout.sharedHomePath, directory), { + recursive: true, + }), + ), + ], + { concurrency: "unbounded" }, + ), + ); + + const sharedEntryNames = yield* normalizeShadowHomeError( + fileSystem.readDirectory(layout.sharedHomePath), + ); + const entries = new Set(KNOWN_SHARED_DIRECTORIES); + for (const entryName of sharedEntryNames) { + if (!PRIVATE_ENTRY_NAMES.has(entryName) && !SHADOW_LOCAL_ENTRY_NAMES.has(entryName)) { + entries.add(entryName); + } + } + + yield* Effect.forEach( + PRIVATE_ENTRY_NAMES, + (entryName) => + entryName === "auth.json" + ? Effect.void + : removePrivateSymlink({ + fileSystem, + shadowPath: effectiveHomePath, + entryName, + }), + { discard: true }, + ); + + yield* Effect.forEach( + entries, + (entryName) => { + if (PRIVATE_ENTRY_NAMES.has(entryName)) { + return Effect.void; + } + return ensureSymlink({ + fileSystem, + shadowPath: effectiveHomePath, + sharedPath: layout.sharedHomePath, + entryName, + }); + }, + { discard: true }, + ); + + yield* ensureShadowAuthIsPrivate(fileSystem, effectiveHomePath); +}); + +export function codexContinuationIdentity(layout: CodexHomeLayout) { + return { + driverKind: ProviderDriverKind.make("codex"), + continuationKey: layout.continuationKey, + }; +} diff --git a/apps/server/src/provider/Drivers/CopilotDriver.ts b/apps/server/src/provider/Drivers/CopilotDriver.ts new file mode 100644 index 00000000000..e6bb8b7bcbb --- /dev/null +++ b/apps/server/src/provider/Drivers/CopilotDriver.ts @@ -0,0 +1,140 @@ +/** + * CopilotDriver — `ProviderDriver` for the GitHub Copilot SDK runtime. + * + * Mirrors the Claude / OpenCode drivers: a plain value whose `create()` + * returns one `ProviderInstance` bundling `snapshot` / `adapter` / + * `textGeneration` closures captured over the per-instance + * `CopilotSettings`. + * + * Two driver instances therefore share zero mutable state — each owns its + * own session map, runtime event queue, approval resolvers, and turn + * tracker. Multiple GitHub accounts can be wired up by registering two + * driver instances against different `binaryPath`/`configDir` configs. + * + * Desktop-environment quirk: every spawn that ends up shelling out to the + * Copilot CLI flows through `withSanitizedCopilotDesktopEnv`, removing + * `ELECTRON_RUN_AS_NODE`, `ELECTRON_RENDERER_PORT`, and `CLAUDECODE` from + * the inherited environment so the spawned binary doesn't observe Electron + * host state. That helper lives in `copilotCliPath.ts` and is used inside + * the adapter, the snapshot probe, and the (optional) text-generation + * factory — there is no other place spawns happen for this driver. + * + * @module provider/Drivers/CopilotDriver + */ +import { ProviderDriverKind, type ServerProvider } from "@t3tools/contracts"; +import { Duration, Effect, FileSystem, Path, Schema, Stream } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; + +import { ServerConfig } from "../../config.ts"; +import { makeCopilotTextGeneration } from "../../textGeneration/CopilotTextGeneration.ts"; +import { ProviderDriverError } from "../Errors.ts"; +import { makeCopilotAdapter } from "../Layers/CopilotAdapter.ts"; +import { + checkCopilotProviderStatus, + makePendingCopilotProvider, +} from "../Layers/CopilotProvider.ts"; +import { ProviderEventLoggers } from "../Layers/ProviderEventLoggers.ts"; +import { makeManagedServerProvider } from "../makeManagedServerProvider.ts"; +import { + defaultProviderContinuationIdentity, + type ProviderDriver, + type ProviderInstance, +} from "../ProviderDriver.ts"; +import type { ServerProviderDraft } from "../providerSnapshot.ts"; +import { mergeProviderInstanceEnvironment } from "../ProviderInstanceEnvironment.ts"; +import { CopilotSettings } from "./CopilotSettings.ts"; + +const DRIVER_KIND = ProviderDriverKind.make("copilot"); +const SNAPSHOT_REFRESH_INTERVAL = Duration.minutes(5); + +export type CopilotDriverEnv = + | ChildProcessSpawner.ChildProcessSpawner + | FileSystem.FileSystem + | Path.Path + | ProviderEventLoggers + | ServerConfig; + +const withInstanceIdentity = + (input: { + readonly instanceId: ProviderInstance["instanceId"]; + readonly displayName: string | undefined; + readonly accentColor: string | undefined; + readonly continuationGroupKey: string; + }) => + (snapshot: ServerProviderDraft): ServerProvider => ({ + ...snapshot, + instanceId: input.instanceId, + driver: DRIVER_KIND, + ...(input.displayName ? { displayName: input.displayName } : {}), + ...(input.accentColor ? { accentColor: input.accentColor } : {}), + continuation: { groupKey: input.continuationGroupKey }, + }); + +export const CopilotDriver: ProviderDriver = { + driverKind: DRIVER_KIND, + metadata: { + displayName: "GitHub Copilot", + supportsMultipleInstances: true, + }, + configSchema: CopilotSettings, + defaultConfig: (): CopilotSettings => Schema.decodeSync(CopilotSettings)({}), + create: ({ instanceId, displayName, accentColor, environment, enabled, config }) => + Effect.gen(function* () { + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner; + const eventLoggers = yield* ProviderEventLoggers; + const processEnv = mergeProviderInstanceEnvironment(environment); + const continuationIdentity = defaultProviderContinuationIdentity({ + driverKind: DRIVER_KIND, + instanceId, + }); + const stampIdentity = withInstanceIdentity({ + instanceId, + displayName, + accentColor, + continuationGroupKey: continuationIdentity.continuationKey, + }); + const effectiveConfig = { ...config, enabled } satisfies CopilotSettings; + + const adapter = yield* makeCopilotAdapter(effectiveConfig, { + instanceId, + ...(eventLoggers.native ? { nativeEventLogger: eventLoggers.native } : {}), + }); + const textGeneration = yield* makeCopilotTextGeneration(effectiveConfig, processEnv); + + const checkProvider = checkCopilotProviderStatus(effectiveConfig, processEnv).pipe( + Effect.map(stampIdentity), + Effect.provideService(ChildProcessSpawner.ChildProcessSpawner, spawner), + ); + + const snapshot = yield* makeManagedServerProvider({ + getSettings: Effect.succeed(effectiveConfig), + streamSettings: Stream.never, + haveSettingsChanged: () => false, + initialSnapshot: (settings) => stampIdentity(makePendingCopilotProvider(settings)), + checkProvider, + refreshInterval: SNAPSHOT_REFRESH_INTERVAL, + }).pipe( + Effect.mapError( + (cause) => + new ProviderDriverError({ + driver: DRIVER_KIND, + instanceId, + detail: `Failed to build GitHub Copilot snapshot: ${cause.message ?? String(cause)}`, + cause, + }), + ), + ); + + return { + instanceId, + driverKind: DRIVER_KIND, + continuationIdentity, + displayName, + accentColor, + enabled, + snapshot, + adapter, + textGeneration, + } satisfies ProviderInstance; + }), +}; diff --git a/apps/server/src/provider/Drivers/CopilotSettings.ts b/apps/server/src/provider/Drivers/CopilotSettings.ts new file mode 100644 index 00000000000..4c02e72c34b --- /dev/null +++ b/apps/server/src/provider/Drivers/CopilotSettings.ts @@ -0,0 +1,27 @@ +/** + * CopilotSettings — local typed config schema for the GitHub Copilot driver. + * + * This module owns the driver-side schema temporarily; the canonical home + * is `packages/contracts/src/settings.ts`, but until that lands the driver + * needs a typed `Schema.Codec` for its + * `configSchema` SPI slot. The shape mirrors what we expect to publish in + * contracts (see `INTEGRATION_copilot.md`). + * + * Fields: + * - `enabled` — Master switch; default `true` (matches the legacy + * GenericProviderSettings entry). + * - `binaryPath` — Path to the Copilot CLI binary; empty defaults to + * the bundled CLI (see `copilotCliPath.ts`). + * - `configDir` — Optional override for the Copilot config dir. + * - `customModels` — User-added model slugs. + */ +import { Effect } from "effect"; +import * as Schema from "effect/Schema"; + +export const CopilotSettings = Schema.Struct({ + enabled: Schema.Boolean.pipe(Schema.withDecodingDefault(Effect.succeed(true))), + binaryPath: Schema.Trim.pipe(Schema.withDecodingDefault(Effect.succeed(""))), + configDir: Schema.Trim.pipe(Schema.withDecodingDefault(Effect.succeed(""))), + customModels: Schema.Array(Schema.String).pipe(Schema.withDecodingDefault(Effect.succeed([]))), +}); +export type CopilotSettings = typeof CopilotSettings.Type; diff --git a/apps/server/src/provider/Drivers/CursorDriver.ts b/apps/server/src/provider/Drivers/CursorDriver.ts new file mode 100644 index 00000000000..cd058800f29 --- /dev/null +++ b/apps/server/src/provider/Drivers/CursorDriver.ts @@ -0,0 +1,148 @@ +/** + * CursorDriver — `ProviderDriver` for the Cursor Agent (`agent`) runtime. + * + * Cursor exposes an ACP-based CLI. The driver is still a plain value, but + * its snapshot uses `makeManagedServerProvider`'s optional `enrichSnapshot` + * hook to run the slow ACP model-capability probe in the background without + * blocking the initial `ready`-state publish. + * + * Text generation is supported via the ACP runtime — `makeCursorTextGeneration` + * drives `runtime.prompt` with a structured-output schema and collects the + * agent's `agent_message_chunk` stream into a single JSON blob. + * + * @module provider/Drivers/CursorDriver + */ +import { CursorSettings, ProviderDriverKind, type ServerProvider } from "@t3tools/contracts"; +import { Duration, Effect, FileSystem, Path, Schema, Stream } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; + +import { ServerConfig } from "../../config.ts"; +import { makeCursorTextGeneration } from "../../textGeneration/CursorTextGeneration.ts"; +import { ProviderDriverError } from "../Errors.ts"; +import { makeCursorAdapter } from "../Layers/CursorAdapter.ts"; +import { + buildInitialCursorProviderSnapshot, + checkCursorProviderStatus, + enrichCursorSnapshot, +} from "../Layers/CursorProvider.ts"; +import { ProviderEventLoggers } from "../Layers/ProviderEventLoggers.ts"; +import { makeManagedServerProvider } from "../makeManagedServerProvider.ts"; +import { + defaultProviderContinuationIdentity, + type ProviderDriver, + type ProviderInstance, +} from "../ProviderDriver.ts"; +import type { ServerProviderDraft } from "../providerSnapshot.ts"; +import { mergeProviderInstanceEnvironment } from "../ProviderInstanceEnvironment.ts"; + +const DRIVER_KIND = ProviderDriverKind.make("cursor"); +const SNAPSHOT_REFRESH_INTERVAL = Duration.minutes(5); + +export type CursorDriverEnv = + | ChildProcessSpawner.ChildProcessSpawner + | FileSystem.FileSystem + | Path.Path + | ProviderEventLoggers + | ServerConfig; + +const withInstanceIdentity = + (input: { + readonly instanceId: ProviderInstance["instanceId"]; + readonly displayName: string | undefined; + readonly accentColor: string | undefined; + readonly continuationGroupKey: string; + }) => + (snapshot: ServerProviderDraft): ServerProvider => ({ + ...snapshot, + instanceId: input.instanceId, + driver: DRIVER_KIND, + ...(input.displayName ? { displayName: input.displayName } : {}), + ...(input.accentColor ? { accentColor: input.accentColor } : {}), + continuation: { groupKey: input.continuationGroupKey }, + }); + +export const CursorDriver: ProviderDriver = { + driverKind: DRIVER_KIND, + metadata: { + displayName: "Cursor", + supportsMultipleInstances: true, + }, + configSchema: CursorSettings, + defaultConfig: (): CursorSettings => Schema.decodeSync(CursorSettings)({}), + create: ({ instanceId, displayName, accentColor, environment, enabled, config }) => + Effect.gen(function* () { + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner; + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const eventLoggers = yield* ProviderEventLoggers; + const processEnv = mergeProviderInstanceEnvironment(environment); + const continuationIdentity = defaultProviderContinuationIdentity({ + driverKind: DRIVER_KIND, + instanceId, + }); + const stampIdentity = withInstanceIdentity({ + instanceId, + displayName, + accentColor, + continuationGroupKey: continuationIdentity.continuationKey, + }); + const effectiveConfig = { ...config, enabled } satisfies CursorSettings; + + const adapter = yield* makeCursorAdapter(effectiveConfig, { + environment: processEnv, + ...(eventLoggers.native ? { nativeEventLogger: eventLoggers.native } : {}), + instanceId, + }); + const textGeneration = yield* makeCursorTextGeneration(effectiveConfig, processEnv); + + const checkProvider = checkCursorProviderStatus(effectiveConfig, processEnv).pipe( + Effect.map(stampIdentity), + Effect.provideService(ChildProcessSpawner.ChildProcessSpawner, spawner), + Effect.provideService(FileSystem.FileSystem, fileSystem), + Effect.provideService(Path.Path, path), + ); + + const snapshot = yield* makeManagedServerProvider({ + getSettings: Effect.succeed(effectiveConfig), + streamSettings: Stream.never, + haveSettingsChanged: () => false, + initialSnapshot: (settings) => stampIdentity(buildInitialCursorProviderSnapshot(settings)), + checkProvider, + // Preserve the background ACP model-capability probe that used to + // live on `CursorProviderLive`. Only fires when the snapshot reports + // an authenticated, enabled provider with at least one non-custom + // model whose capabilities haven't been captured yet. + enrichSnapshot: ({ settings, snapshot: currentSnapshot, publishSnapshot }) => + enrichCursorSnapshot({ + settings, + environment: processEnv, + snapshot: currentSnapshot, + publishSnapshot, + stampIdentity, + }).pipe(Effect.provideService(ChildProcessSpawner.ChildProcessSpawner, spawner)), + refreshInterval: SNAPSHOT_REFRESH_INTERVAL, + }).pipe( + Effect.mapError( + (cause) => + new ProviderDriverError({ + driver: DRIVER_KIND, + instanceId, + detail: `Failed to build Cursor snapshot: ${cause.message ?? String(cause)}`, + cause, + }), + ), + ); + + return { + instanceId, + driverKind: DRIVER_KIND, + continuationIdentity, + displayName, + accentColor, + enabled, + snapshot, + adapter, + textGeneration, + } satisfies ProviderInstance; + }), +}; diff --git a/apps/server/src/provider/Drivers/GeminiCliDriver.ts b/apps/server/src/provider/Drivers/GeminiCliDriver.ts new file mode 100644 index 00000000000..774883c14e7 --- /dev/null +++ b/apps/server/src/provider/Drivers/GeminiCliDriver.ts @@ -0,0 +1,138 @@ +/** + * GeminiCliDriver — `ProviderDriver` for the Google Gemini CLI runtime. + * + * Mirrors `ClaudeDriver` / `OpenCodeDriver`: a plain value whose `create()` + * returns one `ProviderInstance` bundling `snapshot` / `adapter` / + * `textGeneration` closures captured over the per-instance + * `GenericProviderSettings` payload (used as `GeminiCliSettings` until the + * settings schema is split out). + * + * Two instances with different `binaryPath`s spawn fully independent + * `gemini` subprocesses — there is no module-global session table or usage + * accumulator anymore. + * + * @module provider/Drivers/GeminiCliDriver + */ +import { + GenericProviderSettings, + ProviderDriverKind, + type ServerProvider, +} from "@t3tools/contracts"; +import { Duration, Effect, FileSystem, Path, Schema, Stream } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; + +import { ServerConfig } from "../../config.ts"; +import { makeGeminiCliTextGeneration } from "../../textGeneration/GeminiCliTextGeneration.ts"; +import { ProviderDriverError } from "../Errors.ts"; +import { makeGeminiCliAdapter } from "../Layers/GeminiCliAdapter.ts"; +import { + checkGeminiCliStatus, + makePendingGeminiCliProvider, +} from "../Layers/GeminiCliProvider.ts"; +import { ProviderEventLoggers } from "../Layers/ProviderEventLoggers.ts"; +import { makeManagedServerProvider } from "../makeManagedServerProvider.ts"; +import { + defaultProviderContinuationIdentity, + type ProviderDriver, + type ProviderInstance, +} from "../ProviderDriver.ts"; +import type { ServerProviderDraft } from "../providerSnapshot.ts"; +import { mergeProviderInstanceEnvironment } from "../ProviderInstanceEnvironment.ts"; + +const DRIVER_KIND = ProviderDriverKind.make("geminiCli"); +const SNAPSHOT_REFRESH_INTERVAL = Duration.minutes(5); + +export type GeminiCliDriverEnv = + | ChildProcessSpawner.ChildProcessSpawner + | FileSystem.FileSystem + | Path.Path + | ProviderEventLoggers + | ServerConfig; + +const withInstanceIdentity = + (input: { + readonly instanceId: ProviderInstance["instanceId"]; + readonly displayName: string | undefined; + readonly accentColor: string | undefined; + readonly continuationGroupKey: string; + }) => + (snapshot: ServerProviderDraft): ServerProvider => ({ + ...snapshot, + instanceId: input.instanceId, + driver: DRIVER_KIND, + ...(input.displayName ? { displayName: input.displayName } : {}), + ...(input.accentColor ? { accentColor: input.accentColor } : {}), + continuation: { groupKey: input.continuationGroupKey }, + }); + +export const GeminiCliDriver: ProviderDriver = { + driverKind: DRIVER_KIND, + metadata: { + displayName: "Gemini CLI", + supportsMultipleInstances: true, + }, + configSchema: GenericProviderSettings, + defaultConfig: (): GenericProviderSettings => Schema.decodeSync(GenericProviderSettings)({}), + create: ({ instanceId, displayName, accentColor, environment, enabled, config }) => + Effect.gen(function* () { + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner; + const path = yield* Path.Path; + const eventLoggers = yield* ProviderEventLoggers; + const processEnv = mergeProviderInstanceEnvironment(environment); + const continuationIdentity = defaultProviderContinuationIdentity({ + driverKind: DRIVER_KIND, + instanceId, + }); + const stampIdentity = withInstanceIdentity({ + instanceId, + displayName, + accentColor, + continuationGroupKey: continuationIdentity.continuationKey, + }); + const effectiveConfig = { ...config, enabled } satisfies GenericProviderSettings; + + const adapter = yield* makeGeminiCliAdapter(effectiveConfig, { + instanceId, + environment: processEnv, + ...(eventLoggers.native ? { nativeEventLogger: eventLoggers.native } : {}), + }); + const textGeneration = yield* makeGeminiCliTextGeneration(effectiveConfig, processEnv); + + const checkProvider = checkGeminiCliStatus(effectiveConfig, processEnv).pipe( + Effect.map(stampIdentity), + Effect.provideService(ChildProcessSpawner.ChildProcessSpawner, spawner), + Effect.provideService(Path.Path, path), + ); + + const snapshot = yield* makeManagedServerProvider({ + getSettings: Effect.succeed(effectiveConfig), + streamSettings: Stream.never, + haveSettingsChanged: () => false, + initialSnapshot: (settings) => stampIdentity(makePendingGeminiCliProvider(settings)), + checkProvider, + refreshInterval: SNAPSHOT_REFRESH_INTERVAL, + }).pipe( + Effect.mapError( + (cause) => + new ProviderDriverError({ + driver: DRIVER_KIND, + instanceId, + detail: `Failed to build Gemini CLI snapshot: ${cause.message ?? String(cause)}`, + cause, + }), + ), + ); + + return { + instanceId, + driverKind: DRIVER_KIND, + continuationIdentity, + displayName, + accentColor, + enabled, + snapshot, + adapter, + textGeneration, + } satisfies ProviderInstance; + }), +}; diff --git a/apps/server/src/provider/Drivers/KiloDriver.ts b/apps/server/src/provider/Drivers/KiloDriver.ts new file mode 100644 index 00000000000..dddba4ac28d --- /dev/null +++ b/apps/server/src/provider/Drivers/KiloDriver.ts @@ -0,0 +1,133 @@ +/** + * KiloDriver — `ProviderDriver` for the Kilo Code runtime. + * + * Mirrors `OpenCodeDriver`: a plain value whose `create()` bundles + * `snapshot` / `adapter` / `textGeneration` closures over the per-instance + * `KiloSettings` (currently `GenericProviderSettings`). Every instance + * spins up its own `KiloServerManager`, so two Kilo instances never share + * server processes, sessions, or runtime event queues. + * + * @module provider/Drivers/KiloDriver + */ +import { + GenericProviderSettings, + ProviderDriverKind, + type ServerProvider, +} from "@t3tools/contracts"; +import { Duration, Effect, FileSystem, Path, Schema, Stream } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; + +import { ServerConfig } from "../../config.ts"; +import { makeKiloTextGeneration } from "../../textGeneration/KiloTextGeneration.ts"; +import { ProviderDriverError } from "../Errors.ts"; +import { makeKiloAdapter } from "../Layers/KiloAdapter.ts"; +import { + checkKiloProviderStatus, + makePendingKiloProvider, + type KiloSettings, +} from "../Layers/KiloProvider.ts"; +import { makeManagedServerProvider } from "../makeManagedServerProvider.ts"; +import { + defaultProviderContinuationIdentity, + type ProviderDriver, + type ProviderInstance, +} from "../ProviderDriver.ts"; +import type { ServerProviderDraft } from "../providerSnapshot.ts"; +import { mergeProviderInstanceEnvironment } from "../ProviderInstanceEnvironment.ts"; + +const DRIVER_KIND = ProviderDriverKind.make("kilo"); +const SNAPSHOT_REFRESH_INTERVAL = Duration.minutes(5); + +export type KiloDriverEnv = + | ChildProcessSpawner.ChildProcessSpawner + | FileSystem.FileSystem + | Path.Path + | ServerConfig; + +const withInstanceIdentity = + (input: { + readonly instanceId: ProviderInstance["instanceId"]; + readonly displayName: string | undefined; + readonly accentColor: string | undefined; + readonly continuationGroupKey: string; + }) => + (snapshot: ServerProviderDraft): ServerProvider => ({ + ...snapshot, + instanceId: input.instanceId, + driver: DRIVER_KIND, + ...(input.displayName ? { displayName: input.displayName } : {}), + ...(input.accentColor ? { accentColor: input.accentColor } : {}), + continuation: { groupKey: input.continuationGroupKey }, + }); + +export const KiloDriver: ProviderDriver = { + driverKind: DRIVER_KIND, + metadata: { + displayName: "Kilo Code", + supportsMultipleInstances: true, + }, + configSchema: GenericProviderSettings, + defaultConfig: (): KiloSettings => Schema.decodeSync(GenericProviderSettings)({}), + create: ({ instanceId, displayName, accentColor, environment, enabled, config }) => + Effect.gen(function* () { + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner; + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const processEnv = mergeProviderInstanceEnvironment(environment); + const continuationIdentity = defaultProviderContinuationIdentity({ + driverKind: DRIVER_KIND, + instanceId, + }); + const stampIdentity = withInstanceIdentity({ + instanceId, + displayName, + accentColor, + continuationGroupKey: continuationIdentity.continuationKey, + }); + const effectiveConfig = { ...config, enabled } satisfies KiloSettings; + + const adapter = yield* makeKiloAdapter(effectiveConfig, { + instanceId, + environment: processEnv, + }); + const textGeneration = yield* makeKiloTextGeneration(effectiveConfig, processEnv); + + const checkProvider = checkKiloProviderStatus(effectiveConfig, processEnv).pipe( + Effect.map(stampIdentity), + Effect.provideService(ChildProcessSpawner.ChildProcessSpawner, spawner), + Effect.provideService(FileSystem.FileSystem, fileSystem), + Effect.provideService(Path.Path, path), + ); + + const snapshot = yield* makeManagedServerProvider({ + getSettings: Effect.succeed(effectiveConfig), + streamSettings: Stream.never, + haveSettingsChanged: () => false, + initialSnapshot: (settings) => stampIdentity(makePendingKiloProvider(settings)), + checkProvider, + refreshInterval: SNAPSHOT_REFRESH_INTERVAL, + }).pipe( + Effect.mapError( + (cause) => + new ProviderDriverError({ + driver: DRIVER_KIND, + instanceId, + detail: `Failed to build Kilo snapshot: ${cause.message ?? String(cause)}`, + cause, + }), + ), + ); + + return { + instanceId, + driverKind: DRIVER_KIND, + continuationIdentity, + displayName, + accentColor, + enabled, + snapshot, + adapter, + textGeneration, + } satisfies ProviderInstance; + }), +}; diff --git a/apps/server/src/provider/Drivers/OpenCodeDriver.ts b/apps/server/src/provider/Drivers/OpenCodeDriver.ts new file mode 100644 index 00000000000..27f98a9830d --- /dev/null +++ b/apps/server/src/provider/Drivers/OpenCodeDriver.ts @@ -0,0 +1,135 @@ +/** + * OpenCodeDriver — `ProviderDriver` for the OpenCode runtime. + * + * Mirrors the Codex / Claude drivers: a plain value whose `create()` + * bundles `snapshot` / `adapter` / `textGeneration` closures over the + * per-instance `OpenCodeSettings`. + * + * Two instances with different `serverUrl`s therefore talk to independent + * OpenCode servers; when no `serverUrl` is set, the adapter + text-generation + * shares spin up their own scoped child processes, and those child + * processes are released when the registry scope closes. + * + * @module provider/Drivers/OpenCodeDriver + */ +import { OpenCodeSettings, ProviderDriverKind, type ServerProvider } from "@t3tools/contracts"; +import { Duration, Effect, FileSystem, Path, Schema, Stream } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; + +import { makeOpenCodeTextGeneration } from "../../textGeneration/OpenCodeTextGeneration.ts"; +import { ServerConfig } from "../../config.ts"; +import { ProviderDriverError } from "../Errors.ts"; +import { makeOpenCodeAdapter } from "../Layers/OpenCodeAdapter.ts"; +import { + checkOpenCodeProviderStatus, + makePendingOpenCodeProvider, +} from "../Layers/OpenCodeProvider.ts"; +import { ProviderEventLoggers } from "../Layers/ProviderEventLoggers.ts"; +import { makeManagedServerProvider } from "../makeManagedServerProvider.ts"; +import { OpenCodeRuntime } from "../opencodeRuntime.ts"; +import { + defaultProviderContinuationIdentity, + type ProviderDriver, + type ProviderInstance, +} from "../ProviderDriver.ts"; +import type { ServerProviderDraft } from "../providerSnapshot.ts"; +import { mergeProviderInstanceEnvironment } from "../ProviderInstanceEnvironment.ts"; + +const DRIVER_KIND = ProviderDriverKind.make("opencode"); +const SNAPSHOT_REFRESH_INTERVAL = Duration.minutes(5); + +export type OpenCodeDriverEnv = + | ChildProcessSpawner.ChildProcessSpawner + | FileSystem.FileSystem + | OpenCodeRuntime + | Path.Path + | ProviderEventLoggers + | ServerConfig; + +const withInstanceIdentity = + (input: { + readonly instanceId: ProviderInstance["instanceId"]; + readonly displayName: string | undefined; + readonly accentColor: string | undefined; + readonly continuationGroupKey: string; + }) => + (snapshot: ServerProviderDraft): ServerProvider => ({ + ...snapshot, + instanceId: input.instanceId, + driver: DRIVER_KIND, + ...(input.displayName ? { displayName: input.displayName } : {}), + ...(input.accentColor ? { accentColor: input.accentColor } : {}), + continuation: { groupKey: input.continuationGroupKey }, + }); + +export const OpenCodeDriver: ProviderDriver = { + driverKind: DRIVER_KIND, + metadata: { + displayName: "OpenCode", + supportsMultipleInstances: true, + }, + configSchema: OpenCodeSettings, + defaultConfig: (): OpenCodeSettings => Schema.decodeSync(OpenCodeSettings)({}), + create: ({ instanceId, displayName, accentColor, environment, enabled, config }) => + Effect.gen(function* () { + const openCodeRuntime = yield* OpenCodeRuntime; + const serverConfig = yield* ServerConfig; + const eventLoggers = yield* ProviderEventLoggers; + const processEnv = mergeProviderInstanceEnvironment(environment); + const continuationIdentity = defaultProviderContinuationIdentity({ + driverKind: DRIVER_KIND, + instanceId, + }); + const stampIdentity = withInstanceIdentity({ + instanceId, + displayName, + accentColor, + continuationGroupKey: continuationIdentity.continuationKey, + }); + const effectiveConfig = { ...config, enabled } satisfies OpenCodeSettings; + + const adapter = yield* makeOpenCodeAdapter(effectiveConfig, { + instanceId, + environment: processEnv, + ...(eventLoggers.native ? { nativeEventLogger: eventLoggers.native } : {}), + }); + const textGeneration = yield* makeOpenCodeTextGeneration(effectiveConfig, processEnv); + + const checkProvider = checkOpenCodeProviderStatus( + effectiveConfig, + serverConfig.cwd, + processEnv, + ).pipe(Effect.map(stampIdentity), Effect.provideService(OpenCodeRuntime, openCodeRuntime)); + + const snapshot = yield* makeManagedServerProvider({ + getSettings: Effect.succeed(effectiveConfig), + streamSettings: Stream.never, + haveSettingsChanged: () => false, + initialSnapshot: (settings) => stampIdentity(makePendingOpenCodeProvider(settings)), + checkProvider, + refreshInterval: SNAPSHOT_REFRESH_INTERVAL, + }).pipe( + Effect.mapError( + (cause) => + new ProviderDriverError({ + driver: DRIVER_KIND, + instanceId, + detail: `Failed to build OpenCode snapshot: ${cause.message ?? String(cause)}`, + cause, + }), + ), + ); + + return { + instanceId, + driverKind: DRIVER_KIND, + continuationIdentity, + displayName, + accentColor, + enabled, + snapshot, + adapter, + textGeneration, + } satisfies ProviderInstance; + }), +}; diff --git a/apps/server/src/provider/Errors.ts b/apps/server/src/provider/Errors.ts index e4e46d37486..2bd926512ed 100644 --- a/apps/server/src/provider/Errors.ts +++ b/apps/server/src/provider/Errors.ts @@ -116,6 +116,46 @@ export class ProviderUnsupportedError extends Schema.TaggedErrorClass()( + "ProviderInstanceNotFoundError", + { + instanceId: Schema.String, + cause: Schema.optional(Schema.Defect), + }, +) { + override get message(): string { + return `No provider instance bound to id '${this.instanceId}'`; + } +} + +/** + * ProviderDriverError - A driver `create` call failed before producing an + * instance. Surfaced to the registry, which marks the offending entry as + * an "unavailable" shadow snapshot rather than crashing the server. + */ +export class ProviderDriverError extends Schema.TaggedErrorClass()( + "ProviderDriverError", + { + driver: Schema.String, + instanceId: Schema.String, + detail: Schema.String, + cause: Schema.optional(Schema.Defect), + }, +) { + override get message(): string { + return `Provider driver '${this.driver}' failed to create instance '${this.instanceId}': ${this.detail}`; + } +} + /** * ProviderSessionNotFoundError - Provider-facing session not found. */ @@ -157,6 +197,7 @@ export type ProviderAdapterError = export type ProviderServiceError = | ProviderValidationError | ProviderUnsupportedError + | ProviderInstanceNotFoundError | ProviderSessionNotFoundError | ProviderSessionDirectoryPersistenceError | ProviderAdapterError diff --git a/apps/server/src/provider/Layers/AmpAdapter.test.ts b/apps/server/src/provider/Layers/AmpAdapter.test.ts index 447b04a449d..190b3b62960 100644 --- a/apps/server/src/provider/Layers/AmpAdapter.test.ts +++ b/apps/server/src/provider/Layers/AmpAdapter.test.ts @@ -3,6 +3,7 @@ import assert from "node:assert/strict"; import { ApprovalRequestId, EventId, + GenericProviderSettings, RuntimeItemId, ThreadId, TurnId, @@ -13,12 +14,10 @@ import { type ProviderUserInputAnswers, } from "@t3tools/contracts"; import { it, vi } from "@effect/vitest"; -import { Effect, Layer, Stream } from "effect"; +import { Effect, Schema, Stream } from "effect"; import { AmpServerManager } from "../../ampServerManager.ts"; -import { AmpAdapter } from "../Services/AmpAdapter.ts"; -import { makeAmpAdapterLive } from "./AmpAdapter.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; +import { makeAmpAdapter } from "./AmpAdapter.ts"; const asThreadId = (value: string): ThreadId => ThreadId.make(value); const asTurnId = (value: string): TurnId => TurnId.make(value); @@ -105,81 +104,139 @@ class FakeAmpManager extends AmpServerManager { } } -const manager = new FakeAmpManager(); -const layer = it.layer( - makeAmpAdapterLive({ manager }).pipe(Layer.provideMerge(ServerSettingsService.layerTest())), +const enabledAmpSettings = Schema.decodeSync(GenericProviderSettings)({ + enabled: true, + binaryPath: "", + configDir: "", + customModels: [], +}); + +const disabledAmpSettings = Schema.decodeSync(GenericProviderSettings)({ + enabled: false, + binaryPath: "", + configDir: "", + customModels: [], +}); + +it.effect("AmpAdapter delegates session startup to the manager", () => + Effect.gen(function* () { + const manager = new FakeAmpManager(); + const adapter = yield* makeAmpAdapter(enabledAmpSettings, { manager }); + + const session = yield* adapter.startSession({ + threadId: asThreadId("thread-1"), + runtimeMode: "full-access", + }); + + assert.equal(session.provider, "amp"); + assert.equal(manager.startSessionImpl.mock.calls[0]?.[0], asThreadId("thread-1")); + }).pipe(Effect.scoped), ); -layer("AmpAdapterLive", (it) => { - it.effect("delegates session startup to the manager", () => - Effect.gen(function* () { - manager.startSessionImpl.mockClear(); - const adapter = yield* AmpAdapter; +it.effect("AmpAdapter rejects startSession when provider is disabled", () => + Effect.gen(function* () { + const manager = new FakeAmpManager(); + const adapter = yield* makeAmpAdapter(disabledAmpSettings, { manager }); - const session = yield* adapter.startSession({ - threadId: asThreadId("thread-1"), + const result = yield* adapter + .startSession({ + threadId: asThreadId("thread-disabled"), runtimeMode: "full-access", - }); + }) + .pipe(Effect.result); + + assert.equal(result._tag, "Failure"); + if (result._tag !== "Failure") { + return; + } + assert.equal(result.failure._tag, "ProviderAdapterValidationError"); + }).pipe(Effect.scoped), +); - assert.equal(session.provider, "amp"); - assert.equal(manager.startSessionImpl.mock.calls[0]?.[0], asThreadId("thread-1")); - }), - ); +it.effect("AmpAdapter rejects attachments until AMP attachment wiring exists", () => + Effect.gen(function* () { + const manager = new FakeAmpManager(); + const adapter = yield* makeAmpAdapter(enabledAmpSettings, { manager }); + + const result = yield* adapter + .sendTurn({ + threadId: asThreadId("thread-attachments"), + input: "hello", + attachments: [{ id: "attachment-1" }] as never, + }) + .pipe(Effect.result); + + assert.equal(result._tag, "Failure"); + if (result._tag !== "Failure") { + return; + } + assert.equal(result.failure._tag, "ProviderAdapterValidationError"); + }).pipe(Effect.scoped), +); - it.effect("rejects attachments until AMP attachment wiring exists", () => - Effect.gen(function* () { - const adapter = yield* AmpAdapter; - const result = yield* adapter - .sendTurn({ - threadId: asThreadId("thread-attachments"), - input: "hello", - attachments: [{ id: "attachment-1" }] as never, - }) - .pipe(Effect.result); - - assert.equal(result._tag, "Failure"); - if (result._tag !== "Failure") { - return; - } - assert.equal(result.failure._tag, "ProviderAdapterValidationError"); - }), - ); +it.effect("AmpAdapter rejects rollbackThread with non-positive numTurns", () => + Effect.gen(function* () { + const manager = new FakeAmpManager(); + const adapter = yield* makeAmpAdapter(enabledAmpSettings, { manager }); + + const result = yield* adapter + .rollbackThread(asThreadId("thread-rollback"), 0) + .pipe(Effect.result); + + assert.equal(result._tag, "Failure"); + if (result._tag !== "Failure") { + return; + } + assert.equal(result.failure._tag, "ProviderAdapterValidationError"); + }).pipe(Effect.scoped), +); - it.effect("forwards manager runtime events through the adapter stream", () => - Effect.gen(function* () { - const adapter = yield* AmpAdapter; - - const event = { - type: "content.delta", - eventId: asEventId("evt-amp-delta"), - provider: "amp", - createdAt: new Date().toISOString(), - threadId: asThreadId("thread-1"), - turnId: asTurnId("turn-1"), - itemId: asItemId("item-1"), - payload: { - streamKind: "assistant_text", - delta: "hello", - }, - } as unknown as ProviderRuntimeEvent; - - // Emit first — the event is buffered in the unbounded queue via the - // listener that was registered during layer construction. - manager.emit("event", event); - - // Now consume the head. Since the queue already has an item, this - // resolves immediately without a race condition. - const received = yield* Stream.runHead(adapter.streamEvents); - - assert.equal(received._tag, "Some"); - if (received._tag !== "Some") { - return; - } - assert.equal(received.value.type, "content.delta"); - if (received.value.type !== "content.delta") { - return; - } - assert.equal(received.value.payload.delta, "hello"); - }), - ); -}); +it.effect("AmpAdapter forwards interruptTurn calls to the manager", () => + Effect.gen(function* () { + const manager = new FakeAmpManager(); + const adapter = yield* makeAmpAdapter(enabledAmpSettings, { manager }); + + yield* adapter.interruptTurn(asThreadId("thread-interrupt")); + + assert.equal(manager.interruptTurnImpl.mock.calls.length, 1); + }).pipe(Effect.scoped), +); + +it.effect("AmpAdapter forwards manager runtime events through the adapter stream", () => + Effect.gen(function* () { + const manager = new FakeAmpManager(); + const adapter = yield* makeAmpAdapter(enabledAmpSettings, { manager }); + + const event = { + type: "content.delta", + eventId: asEventId("evt-amp-delta"), + provider: "amp", + createdAt: new Date().toISOString(), + threadId: asThreadId("thread-1"), + turnId: asTurnId("turn-1"), + itemId: asItemId("item-1"), + payload: { + streamKind: "assistant_text", + delta: "hello", + }, + } as unknown as ProviderRuntimeEvent; + + // Emit first — the event is buffered in the unbounded queue via the + // listener that was registered during adapter construction. + manager.emit("event", event); + + // Now consume the head. Since the queue already has an item, this + // resolves immediately without a race condition. + const received = yield* Stream.runHead(adapter.streamEvents); + + assert.equal(received._tag, "Some"); + if (received._tag !== "Some") { + return; + } + assert.equal(received.value.type, "content.delta"); + if (received.value.type !== "content.delta") { + return; + } + assert.equal(received.value.payload.delta, "hello"); + }).pipe(Effect.scoped), +); diff --git a/apps/server/src/provider/Layers/AmpAdapter.ts b/apps/server/src/provider/Layers/AmpAdapter.ts index 5a703ccb8c5..4c46a4fda37 100644 --- a/apps/server/src/provider/Layers/AmpAdapter.ts +++ b/apps/server/src/provider/Layers/AmpAdapter.ts @@ -1,143 +1,150 @@ -import { type ProviderRuntimeEvent } from "@t3tools/contracts"; -import { Effect, Layer, Queue, Stream } from "effect"; +/** + * AmpAdapter — per-instance Amp provider adapter factory. + * + * Replaces the old `Layer.effect(AmpAdapter, ...)` singleton pattern with + * a `makeAmpAdapter(config, options)` factory that returns an + * `Effect` scoped to the caller. Each call yields an + * independent `AmpServerManager` whose lifetime is tied to the registry's + * scope; closing the scope tears down every spawned Amp child process. + * + * @module AmpAdapter + */ +import { + ProviderDriverKind, + ProviderInstanceId, + type GenericProviderSettings, + type ProviderRuntimeEvent, +} from "@t3tools/contracts"; +import { Effect, Queue, Stream } from "effect"; import { AmpServerManager } from "../../ampServerManager.ts"; -import { ProviderAdapterProcessError, ProviderAdapterValidationError } from "../Errors.ts"; -import { getProviderCapabilities } from "../Services/ProviderAdapter.ts"; -import { AmpAdapter, type AmpAdapterShape } from "../Services/AmpAdapter.ts"; +import { ProviderAdapterValidationError, type ProviderAdapterError } from "../Errors.ts"; +import type { ProviderAdapterShape } from "../Services/ProviderAdapter.ts"; import { makeErrorHelpers } from "./ProviderAdapterUtils.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; -const PROVIDER = "amp" as const; -const { toRequestError } = makeErrorHelpers(PROVIDER); +const PROVIDER = ProviderDriverKind.make("amp"); +const { toRequestError } = makeErrorHelpers("amp"); + +export interface AmpAdapterShape extends ProviderAdapterShape {} export interface AmpAdapterLiveOptions { + readonly instanceId?: ProviderInstanceId; + readonly environment?: NodeJS.ProcessEnv; + /** Optional pre-built manager (used by tests). */ readonly manager?: AmpServerManager; + /** Optional manager factory (used by tests). */ readonly makeManager?: () => AmpServerManager; } -export function makeAmpAdapterLive(options: AmpAdapterLiveOptions = {}) { - return Layer.effect( - AmpAdapter, - Effect.gen(function* () { - const manager = options.manager ?? options.makeManager?.() ?? new AmpServerManager(); - const runtimeEventQueue = yield* Queue.unbounded(); - const serverSettingsService = yield* ServerSettingsService; +export const makeAmpAdapter = Effect.fn("makeAmpAdapter")(function* ( + ampSettings: GenericProviderSettings, + options?: AmpAdapterLiveOptions, +) { + const manager = options?.manager ?? options?.makeManager?.() ?? new AmpServerManager(); + const runtimeEventQueue = yield* Queue.unbounded(); - yield* Effect.acquireRelease( - Effect.sync(() => { - const listener = (event: ProviderRuntimeEvent) => { - Effect.runFork(Queue.offer(runtimeEventQueue, event).pipe(Effect.asVoid)); - }; - manager.on("event", listener); - return listener; - }), - (listener) => - Effect.gen(function* () { - manager.off("event", listener); - manager.stopAll(); - yield* Queue.shutdown(runtimeEventQueue); - }), - ); + yield* Effect.acquireRelease( + Effect.sync(() => { + const listener = (event: ProviderRuntimeEvent) => { + Effect.runFork(Queue.offer(runtimeEventQueue, event).pipe(Effect.asVoid)); + }; + manager.on("event", listener); + return listener; + }), + (listener) => + Effect.gen(function* () { + manager.off("event", listener); + manager.stopAll(); + yield* Queue.shutdown(runtimeEventQueue); + }), + ); - const service = { - provider: PROVIDER, - capabilities: getProviderCapabilities(PROVIDER), - startSession: (input) => - Effect.gen(function* () { - const providerSettings = yield* serverSettingsService.getSettings.pipe( - Effect.map((s) => s.providers.amp), - Effect.mapError( - (error) => - new ProviderAdapterProcessError({ - provider: PROVIDER, - threadId: input.threadId, - detail: error.message, - cause: error, - }), - ), - ); - if (!providerSettings.enabled) { - return yield* new ProviderAdapterValidationError({ - provider: PROVIDER, - operation: "startSession", - issue: "AMP provider is disabled in server settings.", - }); - } - manager.binaryPath = providerSettings.binaryPath.trim() || undefined; - return yield* Effect.tryPromise({ - try: () => manager.startSession(input), - catch: (cause) => toRequestError(input.threadId, "session/start", cause), - }); - }), - sendTurn: (input) => { - if ((input.attachments?.length ?? 0) > 0) { - return Effect.fail( - new ProviderAdapterValidationError({ - provider: PROVIDER, - operation: "sendTurn", - issue: "AMP attachments are not supported yet.", - }), - ); - } + // Configure the per-instance binary path on the manager up front. Settings + // are immutable for the lifetime of the instance — registries replace the + // instance whenever config changes. + manager.binaryPath = ampSettings.binaryPath.trim() || undefined; - return Effect.tryPromise({ - try: () => manager.sendTurn(input), - catch: (cause) => toRequestError(input.threadId, "session/prompt", cause), + const service: AmpAdapterShape = { + provider: PROVIDER, + capabilities: { sessionModelSwitch: "in-session" }, + startSession: (input) => + Effect.gen(function* () { + if (!ampSettings.enabled) { + return yield* new ProviderAdapterValidationError({ + provider: "amp", + operation: "startSession", + issue: "AMP provider is disabled in server settings.", }); - }, - interruptTurn: (threadId) => - Effect.tryPromise({ - try: () => manager.interruptTurn(threadId), - catch: (cause) => toRequestError(threadId, "session/interrupt", cause), - }), - respondToRequest: (threadId, requestId, decision) => - Effect.tryPromise({ - try: () => manager.respondToRequest(threadId, requestId, decision), - catch: (cause) => toRequestError(threadId, "permission/reply", cause), - }), - respondToUserInput: (threadId, requestId, answers) => - Effect.tryPromise({ - try: () => manager.respondToUserInput(threadId, requestId, answers), - catch: (cause) => toRequestError(threadId, "question/reply", cause), + } + return yield* Effect.tryPromise({ + try: () => manager.startSession(input), + catch: (cause) => toRequestError(input.threadId, "session/start", cause), + }); + }), + sendTurn: (input) => { + if ((input.attachments?.length ?? 0) > 0) { + return Effect.fail( + new ProviderAdapterValidationError({ + provider: "amp", + operation: "sendTurn", + issue: "AMP attachments are not supported yet.", }), - stopSession: (threadId) => - Effect.sync(() => { - manager.stopSession(threadId); - }), - listSessions: () => Effect.sync(() => manager.listSessions()), - hasSession: (threadId) => Effect.sync(() => manager.hasSession(threadId)), - readThread: (threadId) => - Effect.tryPromise({ - try: () => manager.readThread(threadId), - catch: (cause) => toRequestError(threadId, "session/messages", cause), - }), - rollbackThread: (threadId, numTurns) => { - if (!Number.isInteger(numTurns) || numTurns < 1) { - return Effect.fail( - new ProviderAdapterValidationError({ - provider: PROVIDER, - operation: "rollbackThread", - issue: "numTurns must be an integer >= 1.", - }), - ); - } + ); + } - return Effect.tryPromise({ - try: () => manager.rollbackThread(threadId), - catch: (cause) => toRequestError(threadId, "session/revert", cause), - }); - }, - stopAll: () => - Effect.sync(() => { - manager.stopAll(); + return Effect.tryPromise({ + try: () => manager.sendTurn(input), + catch: (cause) => toRequestError(input.threadId, "session/prompt", cause), + }); + }, + interruptTurn: (threadId) => + Effect.tryPromise({ + try: () => manager.interruptTurn(threadId), + catch: (cause) => toRequestError(threadId, "session/interrupt", cause), + }), + respondToRequest: (threadId, requestId, decision) => + Effect.tryPromise({ + try: () => manager.respondToRequest(threadId, requestId, decision), + catch: (cause) => toRequestError(threadId, "permission/reply", cause), + }), + respondToUserInput: (threadId, requestId, answers) => + Effect.tryPromise({ + try: () => manager.respondToUserInput(threadId, requestId, answers), + catch: (cause) => toRequestError(threadId, "question/reply", cause), + }), + stopSession: (threadId) => + Effect.sync(() => { + manager.stopSession(threadId); + }), + listSessions: () => Effect.sync(() => manager.listSessions()), + hasSession: (threadId) => Effect.sync(() => manager.hasSession(threadId)), + readThread: (threadId) => + Effect.tryPromise({ + try: () => manager.readThread(threadId), + catch: (cause) => toRequestError(threadId, "session/messages", cause), + }), + rollbackThread: (threadId, numTurns) => { + if (!Number.isInteger(numTurns) || numTurns < 1) { + return Effect.fail( + new ProviderAdapterValidationError({ + provider: "amp", + operation: "rollbackThread", + issue: "numTurns must be an integer >= 1.", }), - streamEvents: Stream.fromQueue(runtimeEventQueue), - } satisfies AmpAdapterShape; + ); + } - return service; - }), - ); -} + return Effect.tryPromise({ + try: () => manager.rollbackThread(threadId), + catch: (cause) => toRequestError(threadId, "session/revert", cause), + }); + }, + stopAll: () => + Effect.sync(() => { + manager.stopAll(); + }), + streamEvents: Stream.fromQueue(runtimeEventQueue), + }; -export const AmpAdapterLive = makeAmpAdapterLive(); + return service; +}); diff --git a/apps/server/src/provider/Layers/AmpProvider.ts b/apps/server/src/provider/Layers/AmpProvider.ts new file mode 100644 index 00000000000..d6cf0ea94de --- /dev/null +++ b/apps/server/src/provider/Layers/AmpProvider.ts @@ -0,0 +1,230 @@ +/** + * AmpProvider — snapshot probe for the Amp CLI provider. + * + * Mirrors the Claude / Cursor / OpenCode provider modules: exposes a + * `checkAmpStatus` effect that runs `amp --version` to detect the binary + * and a `makePendingAmpProvider` initial-snapshot helper for the loading + * state surfaced before the first probe completes. + * + * @module AmpProvider + */ +import { + ProviderDriverKind, + type GenericProviderSettings, + type ModelCapabilities, + type ServerProviderModel, +} from "@t3tools/contracts"; +import { Effect, Option, Path, Result } from "effect"; +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; +import { createModelCapabilities } from "@t3tools/shared/model"; + +import { + buildSelectOptionDescriptor, + buildServerProvider, + DEFAULT_TIMEOUT_MS, + detailFromResult, + isCommandMissingCause, + parseGenericCliVersion, + providerModelsFromSettings, + spawnAndCollect, + type ServerProviderDraft, +} from "../providerSnapshot.ts"; + +const PROVIDER = ProviderDriverKind.make("amp"); +const AMP_PRESENTATION = { + displayName: "Amp", + showInteractionModeToggle: true, +} as const; + +const DEFAULT_AMP_MODEL_CAPABILITIES: ModelCapabilities = createModelCapabilities({ + optionDescriptors: [], +}); + +const AMP_MODE_CAPABILITIES: ModelCapabilities = createModelCapabilities({ + optionDescriptors: [ + buildSelectOptionDescriptor({ + id: "mode", + label: "Mode", + options: [ + { value: "smart", label: "Smart", isDefault: true }, + { value: "rush", label: "Rush" }, + { value: "deep", label: "Deep" }, + { value: "free", label: "Free" }, + ], + }), + ], +}); + +const BUILT_IN_MODELS: ReadonlyArray = [ + { + slug: "smart", + name: "Amp Smart", + isCustom: false, + capabilities: AMP_MODE_CAPABILITIES, + }, +]; + +function defaultBinaryPath(settings: GenericProviderSettings): string { + const trimmed = settings.binaryPath.trim(); + return trimmed.length > 0 ? trimmed : "amp"; +} + +const runAmpCommand = Effect.fn("runAmpCommand")(function* ( + ampSettings: GenericProviderSettings, + args: ReadonlyArray, + environment: NodeJS.ProcessEnv = process.env, +) { + const binaryPath = defaultBinaryPath(ampSettings); + const command = ChildProcess.make(binaryPath, [...args], { + env: environment, + shell: process.platform === "win32", + }); + return yield* spawnAndCollect(binaryPath, command); +}); + +export const checkAmpStatus = Effect.fn("checkAmpStatus")(function* ( + ampSettings: GenericProviderSettings, + environment: NodeJS.ProcessEnv = process.env, +): Effect.fn.Return< + ServerProviderDraft, + never, + ChildProcessSpawner.ChildProcessSpawner | Path.Path +> { + const checkedAt = new Date().toISOString(); + const allModels = providerModelsFromSettings( + BUILT_IN_MODELS, + PROVIDER, + ampSettings.customModels, + DEFAULT_AMP_MODEL_CAPABILITIES, + ); + + if (!ampSettings.enabled) { + return buildServerProvider({ + presentation: AMP_PRESENTATION, + enabled: false, + checkedAt, + models: allModels, + probe: { + installed: false, + version: null, + status: "warning", + auth: { status: "unknown" }, + message: "Amp is disabled in T3 Code settings.", + }, + }); + } + + const versionProbe = yield* runAmpCommand(ampSettings, ["--version"], environment).pipe( + Effect.timeoutOption(DEFAULT_TIMEOUT_MS), + Effect.result, + ); + + if (Result.isFailure(versionProbe)) { + const error = versionProbe.failure; + return buildServerProvider({ + presentation: AMP_PRESENTATION, + enabled: ampSettings.enabled, + checkedAt, + models: allModels, + probe: { + installed: !isCommandMissingCause(error), + version: null, + status: "error", + auth: { status: "unknown" }, + message: isCommandMissingCause(error) + ? "Amp CLI (`amp`) is not installed or not on PATH." + : `Failed to execute Amp CLI health check: ${error instanceof Error ? error.message : String(error)}.`, + }, + }); + } + + if (Option.isNone(versionProbe.success)) { + return buildServerProvider({ + presentation: AMP_PRESENTATION, + enabled: ampSettings.enabled, + checkedAt, + models: allModels, + probe: { + installed: true, + version: null, + status: "error", + auth: { status: "unknown" }, + message: "Amp CLI is installed but failed to run. Timed out while running command.", + }, + }); + } + + const version = versionProbe.success.value; + const parsedVersion = parseGenericCliVersion(`${version.stdout}\n${version.stderr}`); + if (version.code !== 0) { + const detail = detailFromResult(version); + return buildServerProvider({ + presentation: AMP_PRESENTATION, + enabled: ampSettings.enabled, + checkedAt, + models: allModels, + probe: { + installed: true, + version: parsedVersion, + status: "error", + auth: { status: "unknown" }, + message: detail + ? `Amp CLI is installed but failed to run. ${detail}` + : "Amp CLI is installed but failed to run.", + }, + }); + } + + return buildServerProvider({ + presentation: AMP_PRESENTATION, + enabled: ampSettings.enabled, + checkedAt, + models: allModels, + probe: { + installed: true, + version: parsedVersion, + status: "ready", + auth: { status: "unknown" }, + }, + }); +}); + +export const makePendingAmpProvider = (ampSettings: GenericProviderSettings): ServerProviderDraft => { + const checkedAt = new Date().toISOString(); + const models = providerModelsFromSettings( + BUILT_IN_MODELS, + PROVIDER, + ampSettings.customModels, + DEFAULT_AMP_MODEL_CAPABILITIES, + ); + + if (!ampSettings.enabled) { + return buildServerProvider({ + presentation: AMP_PRESENTATION, + enabled: false, + checkedAt, + models, + probe: { + installed: false, + version: null, + status: "warning", + auth: { status: "unknown" }, + message: "Amp is disabled in T3 Code settings.", + }, + }); + } + + return buildServerProvider({ + presentation: AMP_PRESENTATION, + enabled: true, + checkedAt, + models, + probe: { + installed: false, + version: null, + status: "warning", + auth: { status: "unknown" }, + message: "Amp provider status has not been checked in this session yet.", + }, + }); +}; diff --git a/apps/server/src/provider/Layers/ClaudeAdapter.test.ts b/apps/server/src/provider/Layers/ClaudeAdapter.test.ts index 2b53f21f1e2..4c71a016f32 100644 --- a/apps/server/src/provider/Layers/ClaudeAdapter.test.ts +++ b/apps/server/src/provider/Layers/ClaudeAdapter.test.ts @@ -12,20 +12,29 @@ import type { } from "@anthropic-ai/claude-agent-sdk"; import { ApprovalRequestId, + ClaudeSettings, + ProviderDriverKind, ProviderItemId, ProviderRuntimeEvent, type RuntimeMode, ThreadId, + ProviderInstanceId, } from "@t3tools/contracts"; +import { createModelSelection } from "@t3tools/shared/model"; import { assert, describe, it } from "@effect/vitest"; -import { Effect, Fiber, Layer, Random, Stream } from "effect"; +import { Context, Effect, Fiber, Layer, Random, Schema, Stream } from "effect"; import { attachmentRelativePath } from "../../attachmentStore.ts"; import { ServerConfig } from "../../config.ts"; import { ServerSettingsService } from "../../serverSettings.ts"; import { ProviderAdapterValidationError } from "../Errors.ts"; -import { ClaudeAdapter } from "../Services/ClaudeAdapter.ts"; -import { makeClaudeAdapterLive, type ClaudeAdapterLiveOptions } from "./ClaudeAdapter.ts"; +import type { ClaudeAdapterShape } from "../Services/ClaudeAdapter.ts"; +import { makeClaudeAdapter, type ClaudeAdapterLiveOptions } from "./ClaudeAdapter.ts"; + +// Test-local service tag so the rest of the file can keep using `yield* ClaudeAdapter`. +class ClaudeAdapter extends Context.Service()( + "test/ClaudeAdapter", +) {} class FakeClaudeQuery implements AsyncIterable { private readonly queue: Array = []; @@ -136,6 +145,8 @@ function makeHarness(config?: { readonly nativeEventLogger?: ClaudeAdapterLiveOptions["nativeEventLogger"]; readonly cwd?: string; readonly baseDir?: string; + readonly claudeConfig?: Partial; + readonly instanceId?: ProviderInstanceId; }) { const query = new FakeClaudeQuery(); let createInput: @@ -146,6 +157,7 @@ function makeHarness(config?: { | undefined; const adapterOptions: ClaudeAdapterLiveOptions = { + ...(config?.instanceId ? { instanceId: config.instanceId } : {}), createQuery: (input) => { createInput = input; return query; @@ -163,7 +175,13 @@ function makeHarness(config?: { }; return { - layer: makeClaudeAdapterLive(adapterOptions).pipe( + layer: Layer.effect( + ClaudeAdapter, + Effect.gen(function* () { + const claudeConfig = Schema.decodeSync(ClaudeSettings)(config?.claudeConfig ?? {}); + return yield* makeClaudeAdapter(claudeConfig, adapterOptions); + }), + ).pipe( Layer.provideMerge( ServerConfig.layerTest( config?.cwd ?? "/tmp/claude-adapter-test", @@ -209,11 +227,10 @@ async function readFirstPromptText( if (next.done) { return undefined; } - const msg = next.value.message as any; - if (typeof msg.content === "string") { - return msg.content; + if (typeof next.value.message.content === "string") { + return next.value.message.content; } - const content = msg.content[0]; + const content = next.value.message.content[0]; if (!content || content.type !== "text") { return undefined; } @@ -247,7 +264,11 @@ describe("ClaudeAdapterLive", () => { return Effect.gen(function* () { const adapter = yield* ClaudeAdapter; const result = yield* adapter - .startSession({ threadId: THREAD_ID, provider: "codex", runtimeMode: "full-access" }) + .startSession({ + threadId: THREAD_ID, + provider: ProviderDriverKind.make("codex"), + runtimeMode: "full-access", + }) .pipe(Effect.result); assert.equal(result._tag, "Failure"); @@ -257,7 +278,7 @@ describe("ClaudeAdapterLive", () => { assert.deepEqual( result.failure, new ProviderAdapterValidationError({ - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), operation: "startSession", issue: "Expected provider 'claudeAgent' but received 'codex'.", }), @@ -274,7 +295,7 @@ describe("ClaudeAdapterLive", () => { const adapter = yield* ClaudeAdapter; yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -294,7 +315,7 @@ describe("ClaudeAdapterLive", () => { const adapter = yield* ClaudeAdapter; yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "approval-required", }); @@ -314,7 +335,7 @@ describe("ClaudeAdapterLive", () => { const adapter = yield* ClaudeAdapter; yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -333,14 +354,12 @@ describe("ClaudeAdapterLive", () => { const adapter = yield* ClaudeAdapter; yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", - modelSelection: { - provider: "claudeAgent", - model: "claude-opus-4-6", - options: { - effort: "max", - }, - }, + provider: ProviderDriverKind.make("claudeAgent"), + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-opus-4-6", + [{ id: "effort", value: "max" }], + ), runtimeMode: "full-access", }); @@ -352,15 +371,37 @@ describe("ClaudeAdapterLive", () => { ); }); + it.effect("runs Claude SDK sessions with the configured Claude HOME", () => { + const harness = makeHarness({ claudeConfig: { homePath: "~/.claude-work" } }); + return Effect.gen(function* () { + const adapter = yield* ClaudeAdapter; + yield* adapter.startSession({ + threadId: THREAD_ID, + provider: ProviderDriverKind.make("claudeAgent"), + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-opus-4-6", + ), + runtimeMode: "full-access", + }); + + const createInput = harness.getLastCreateQueryInput(); + assert.equal(createInput?.options.env?.HOME, path.join(os.homedir(), ".claude-work")); + }).pipe( + Effect.provideService(Random.Random, makeDeterministicRandomService()), + Effect.provide(harness.layer), + ); + }); + it.effect("maps the Claude Opus 4.7 default effort to the SDK-supported max value", () => { const harness = makeHarness(); return Effect.gen(function* () { const adapter = yield* ClaudeAdapter; yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), modelSelection: { - provider: "claudeAgent", + instanceId: ProviderInstanceId.make("claudeAgent"), model: "claude-opus-4-7", }, runtimeMode: "full-access", @@ -380,14 +421,12 @@ describe("ClaudeAdapterLive", () => { const adapter = yield* ClaudeAdapter; yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", - modelSelection: { - provider: "claudeAgent", - model: "claude-opus-4-7", - options: { - effort: "xhigh", - }, - }, + provider: ProviderDriverKind.make("claudeAgent"), + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-opus-4-7", + [{ id: "effort", value: "xhigh" }], + ), runtimeMode: "full-access", }); @@ -405,14 +444,12 @@ describe("ClaudeAdapterLive", () => { const adapter = yield* ClaudeAdapter; yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", - modelSelection: { - provider: "claudeAgent", - model: "claude-sonnet-4-6", - options: { - effort: "max", - }, - }, + provider: ProviderDriverKind.make("claudeAgent"), + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-sonnet-4-6", + [{ id: "effort", value: "max" }], + ), runtimeMode: "full-access", }); @@ -430,14 +467,12 @@ describe("ClaudeAdapterLive", () => { const adapter = yield* ClaudeAdapter; yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", - modelSelection: { - provider: "claudeAgent", - model: "claude-haiku-4-5", - options: { - effort: "high", - }, - }, + provider: ProviderDriverKind.make("claudeAgent"), + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-haiku-4-5", + [{ id: "effort", value: "high" }], + ), runtimeMode: "full-access", }); @@ -455,14 +490,12 @@ describe("ClaudeAdapterLive", () => { const adapter = yield* ClaudeAdapter; yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", - modelSelection: { - provider: "claudeAgent", - model: "claude-haiku-4-5", - options: { - thinking: false, - }, - }, + provider: ProviderDriverKind.make("claudeAgent"), + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-haiku-4-5", + [{ id: "thinking", value: false }], + ), runtimeMode: "full-access", }); @@ -482,14 +515,12 @@ describe("ClaudeAdapterLive", () => { const adapter = yield* ClaudeAdapter; yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", - modelSelection: { - provider: "claudeAgent", - model: "claude-sonnet-4-6", - options: { - thinking: false, - }, - }, + provider: ProviderDriverKind.make("claudeAgent"), + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-sonnet-4-6", + [{ id: "thinking", value: false }], + ), runtimeMode: "full-access", }); @@ -507,14 +538,12 @@ describe("ClaudeAdapterLive", () => { const adapter = yield* ClaudeAdapter; yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", - modelSelection: { - provider: "claudeAgent", - model: "claude-opus-4-6", - options: { - fastMode: true, - }, - }, + provider: ProviderDriverKind.make("claudeAgent"), + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-opus-4-6", + [{ id: "fastMode", value: true }], + ), runtimeMode: "full-access", }); @@ -534,14 +563,12 @@ describe("ClaudeAdapterLive", () => { const adapter = yield* ClaudeAdapter; yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", - modelSelection: { - provider: "claudeAgent", - model: "claude-sonnet-4-6", - options: { - fastMode: true, - }, - }, + provider: ProviderDriverKind.make("claudeAgent"), + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-sonnet-4-6", + [{ id: "fastMode", value: true }], + ), runtimeMode: "full-access", }); @@ -559,14 +586,12 @@ describe("ClaudeAdapterLive", () => { const adapter = yield* ClaudeAdapter; const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", - modelSelection: { - provider: "claudeAgent", - model: "claude-sonnet-4-6", - options: { - effort: "ultrathink", - }, - }, + provider: ProviderDriverKind.make("claudeAgent"), + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-sonnet-4-6", + [{ id: "effort", value: "ultrathink" }], + ), runtimeMode: "full-access", }); @@ -574,13 +599,11 @@ describe("ClaudeAdapterLive", () => { threadId: session.threadId, input: "Investigate the edge cases", attachments: [], - modelSelection: { - provider: "claudeAgent", - model: "claude-sonnet-4-6", - options: { - effort: "ultrathink", - }, - }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-sonnet-4-6", + [{ id: "effort", value: "ultrathink" }], + ), }); const createInput = harness.getLastCreateQueryInput(); @@ -625,7 +648,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -638,7 +661,7 @@ describe("ClaudeAdapterLive", () => { const createInput = harness.getLastCreateQueryInput(); const promptMessage = yield* Effect.promise(() => readFirstPromptMessage(createInput)); assert.isDefined(promptMessage); - assert.deepEqual((promptMessage?.message as any).content, [ + assert.deepEqual(promptMessage?.message.content, [ { type: "text", text: "What's in this image?", @@ -670,9 +693,9 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), modelSelection: { - provider: "claudeAgent", + instanceId: ProviderInstanceId.make("claudeAgent"), model: "claude-sonnet-4-5", }, runtimeMode: "full-access", @@ -847,7 +870,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -1026,7 +1049,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -1117,7 +1140,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -1193,7 +1216,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -1259,7 +1282,7 @@ describe("ClaudeAdapterLive", () => { yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -1310,13 +1333,19 @@ describe("ClaudeAdapterLive", () => { it.effect("closes the previous session before replacing an existing thread session", () => { const queries: FakeClaudeQuery[] = []; - const layer = makeClaudeAdapterLive({ - createQuery: () => { - const query = new FakeClaudeQuery(); - queries.push(query); - return query; - }, - }).pipe( + const layer = Layer.effect( + ClaudeAdapter, + Effect.gen(function* () { + const claudeConfig = Schema.decodeSync(ClaudeSettings)({}); + return yield* makeClaudeAdapter(claudeConfig, { + createQuery: () => { + const query = new FakeClaudeQuery(); + queries.push(query); + return query; + }, + }); + }), + ).pipe( Layer.provideMerge(ServerConfig.layerTest("/tmp/claude-adapter-test", "/tmp")), Layer.provideMerge(ServerSettingsService.layerTest()), Layer.provideMerge(NodeServices.layer), @@ -1332,13 +1361,13 @@ describe("ClaudeAdapterLive", () => { const firstSession = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); const secondSession = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", resumeCursor: firstSession.resumeCursor, }); @@ -1387,21 +1416,27 @@ describe("ClaudeAdapterLive", () => { let promptConsumerError: unknown = undefined; - const layer = makeClaudeAdapterLive({ - createQuery: (input) => { - // Simulate the SDK consuming the prompt iterable - (async () => { - try { - for await (const _message of input.prompt) { - /* SDK processes user messages */ - } - } catch (error) { - promptConsumerError = error; - } - })(); - return query; - }, - }).pipe( + const layer = Layer.effect( + ClaudeAdapter, + Effect.gen(function* () { + const claudeConfig = Schema.decodeSync(ClaudeSettings)({}); + return yield* makeClaudeAdapter(claudeConfig, { + createQuery: (input) => { + // Simulate the SDK consuming the prompt iterable + (async () => { + try { + for await (const _message of input.prompt) { + /* SDK processes user messages */ + } + } catch (error) { + promptConsumerError = error; + } + })(); + return query; + }, + }); + }), + ).pipe( Layer.provideMerge(ServerConfig.layerTest("/tmp/claude-adapter-test", "/tmp")), Layer.provideMerge(ServerSettingsService.layerTest()), Layer.provideMerge(NodeServices.layer), @@ -1419,7 +1454,7 @@ describe("ClaudeAdapterLive", () => { yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -1456,7 +1491,7 @@ describe("ClaudeAdapterLive", () => { yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -1503,7 +1538,7 @@ describe("ClaudeAdapterLive", () => { yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -1557,7 +1592,7 @@ describe("ClaudeAdapterLive", () => { yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -1624,7 +1659,7 @@ describe("ClaudeAdapterLive", () => { yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -1689,7 +1724,7 @@ describe("ClaudeAdapterLive", () => { yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -1770,7 +1805,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -1861,7 +1896,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -2027,7 +2062,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -2096,7 +2131,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -2318,7 +2353,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); assert.equal(session.threadId, THREAD_ID); @@ -2391,7 +2426,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "approval-required", }); @@ -2500,7 +2535,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "approval-required", }); @@ -2573,7 +2608,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: RESUME_THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), resumeCursor: { threadId: "resume-thread-1", resume: "550e8400-e29b-41d4-a716-446655440000", @@ -2601,6 +2636,96 @@ describe("ClaudeAdapterLive", () => { ); }); + it.effect("preserves durable resume ids across Claude resume hooks", () => { + const harness = makeHarness(); + return Effect.gen(function* () { + const adapter = yield* ClaudeAdapter; + const durableSessionId = "550e8400-e29b-41d4-a716-446655440000"; + const transientHookSessionId = "7368d0c7-40a3-4d8a-bcc1-ac80c49f2719"; + + const runtimeEventsFiber = yield* Stream.take(adapter.streamEvents, 7).pipe( + Stream.runCollect, + Effect.forkChild, + ); + + yield* adapter.startSession({ + threadId: RESUME_THREAD_ID, + provider: ProviderDriverKind.make("claudeAgent"), + resumeCursor: { + threadId: RESUME_THREAD_ID, + resume: durableSessionId, + resumeSessionAt: "assistant-99", + turnCount: 3, + }, + runtimeMode: "full-access", + }); + + harness.query.emit({ + type: "system", + subtype: "hook_started", + hook_id: "resume-hook-1", + hook_name: "SessionStart:resume", + hook_event: "SessionStart", + session_id: transientHookSessionId, + uuid: "resume-hook-started", + } as unknown as SDKMessage); + + harness.query.emit({ + type: "system", + subtype: "hook_response", + hook_id: "resume-hook-1", + hook_name: "SessionStart:resume", + hook_event: "SessionStart", + output: "", + stdout: "", + stderr: "", + outcome: "success", + session_id: transientHookSessionId, + uuid: "resume-hook-response", + } as unknown as SDKMessage); + + harness.query.emit({ + type: "system", + subtype: "init", + apiKeySource: "none", + claude_code_version: "test", + cwd: "/tmp/claude-adapter-test", + tools: [], + mcp_servers: [], + model: "claude-sonnet-4-5", + permissionMode: "bypassPermissions", + slash_commands: [], + output_style: "default", + skills: [], + plugins: [], + session_id: durableSessionId, + uuid: "resume-init", + } as unknown as SDKMessage); + + const runtimeEvents = Array.from(yield* Fiber.join(runtimeEventsFiber)); + const threadStartedEvents = runtimeEvents.filter((event) => event.type === "thread.started"); + assert.equal(threadStartedEvents.length, 1); + const threadStarted = threadStartedEvents[0]; + assert.equal(threadStarted?.type, "thread.started"); + if (threadStarted?.type === "thread.started") { + assert.deepEqual(threadStarted.payload, { + providerThreadId: durableSessionId, + }); + } + + const activeSessions = yield* adapter.listSessions(); + const resumeCursor = activeSessions[0]?.resumeCursor as + | { + readonly resume?: string; + } + | undefined; + assert.equal(resumeCursor?.resume, durableSessionId); + }).pipe( + Effect.provideService(Random.Random, makeDeterministicRandomService()), + Effect.provide(harness.layer), + ); + }); + it.effect("uses an app-generated Claude session id for fresh sessions", () => { const harness = makeHarness(); return Effect.gen(function* () { @@ -2608,7 +2733,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -2642,7 +2767,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -2722,14 +2847,14 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); yield* adapter.sendTurn({ threadId: session.threadId, input: "hello", modelSelection: { - provider: "claudeAgent", + instanceId: ProviderInstanceId.make("claudeAgent"), model: "claude-opus-4-6", }, attachments: [], @@ -2742,6 +2867,34 @@ describe("ClaudeAdapterLive", () => { ); }); + it.effect("updates model on sendTurn for the adapter's bound custom instance id", () => { + const customInstanceId = ProviderInstanceId.make("claude_openrouter"); + const harness = makeHarness({ instanceId: customInstanceId }); + return Effect.gen(function* () { + const adapter = yield* ClaudeAdapter; + + const session = yield* adapter.startSession({ + threadId: THREAD_ID, + provider: ProviderDriverKind.make("claudeAgent"), + runtimeMode: "full-access", + }); + yield* adapter.sendTurn({ + threadId: session.threadId, + input: "hello", + modelSelection: { + instanceId: customInstanceId, + model: "openai/gpt-5.5", + }, + attachments: [], + }); + + assert.deepEqual(harness.query.setModelCalls, ["openai/gpt-5.5"]); + }).pipe( + Effect.provideService(Random.Random, makeDeterministicRandomService()), + Effect.provide(harness.layer), + ); + }); + it.effect( "does not re-set the Claude model when the session already uses the same effective API model", () => { @@ -2749,13 +2902,13 @@ describe("ClaudeAdapterLive", () => { return Effect.gen(function* () { const adapter = yield* ClaudeAdapter; const modelSelection = { - provider: "claudeAgent" as const, + instanceId: ProviderInstanceId.make("claudeAgent"), model: "claude-opus-4-6", }; const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), modelSelection, runtimeMode: "full-access", }); @@ -2788,27 +2941,25 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); yield* adapter.sendTurn({ threadId: session.threadId, input: "hello", - modelSelection: { - provider: "claudeAgent", - model: "claude-opus-4-6", - options: { - contextWindow: "1m", - }, - }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-opus-4-6", + [{ id: "contextWindow", value: "1m" }], + ), attachments: [], }); yield* adapter.sendTurn({ threadId: session.threadId, input: "hello again", modelSelection: { - provider: "claudeAgent", + instanceId: ProviderInstanceId.make("claudeAgent"), model: "claude-opus-4-6", }, attachments: [], @@ -2828,7 +2979,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); yield* adapter.sendTurn({ @@ -2858,7 +3009,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode, }); @@ -2910,7 +3061,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); yield* adapter.sendTurn({ @@ -2933,7 +3084,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -2999,7 +3150,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -3071,7 +3222,7 @@ describe("ClaudeAdapterLive", () => { // Start session in approval-required mode so canUseTool fires. const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "approval-required", }); @@ -3145,6 +3296,9 @@ describe("ClaudeAdapterLive", () => { assert.equal(typeof requestId, "string"); assert.equal(requestedEvent.value.payload.questions.length, 1); assert.equal(requestedEvent.value.payload.questions[0]?.question, "Which framework?"); + // Regression for #2388: `id` must equal the full question text so the + // UI's draft-answer key matches what the SDK looks up downstream. + assert.equal(requestedEvent.value.payload.questions[0]?.id, "Which framework?"); assert.deepEqual(requestedEvent.value.providerRefs, { providerItemId: ProviderItemId.make("tool-ask-1"), }); @@ -3179,6 +3333,34 @@ describe("ClaudeAdapterLive", () => { assert.deepEqual(updatedInput.answers, { "Which framework?": "React" }); // Original questions should be passed through. assert.deepEqual(updatedInput.questions, askInput.questions); + + // Compatibility check for #2388: the answers shape we hand to the SDK + // must produce a non-empty rendered tool_result on BOTH SDK iteration + // patterns we have seen, so we don't regress the issue and we don't + // break users still on the older Claude CLI. + const sdkAnswers = updatedInput.answers as Record; + const sdkQuestions = updatedInput.questions as ReadonlyArray<{ + readonly question: string; + }>; + + // Claude CLI 2.1.119 — key-agnostic Object.entries iteration. Any key + // works here, but it must at least round-trip into a non-empty string. + const v119Rendered = Object.entries(sdkAnswers) + .map(([key, value]) => `"${key}"="${String(value)}"`) + .join(", "); + assert.equal(v119Rendered, '"Which framework?"="React"'); + + // Claude CLI 2.1.121 — lookup by full question text. This is the path + // that regressed in #2388 when the answers were keyed by `header`. + const v121Rendered = sdkQuestions + .map(({ question }) => { + const answer = sdkAnswers[question]; + return answer === undefined ? null : `"${question}"="${String(answer)}"`; + }) + .filter((entry): entry is string => entry !== null) + .join(", "); + assert.notEqual(v121Rendered, "", "Expected non-empty SDK 2.1.121 tool_result (#2388)"); + assert.equal(v121Rendered, '"Which framework?"="React"'); }).pipe( Effect.provideService(Random.Random, makeDeterministicRandomService()), Effect.provide(harness.layer), @@ -3194,7 +3376,7 @@ describe("ClaudeAdapterLive", () => { // AskUserQuestion should still go through the user-input flow. const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); @@ -3260,7 +3442,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "approval-required", }); @@ -3347,7 +3529,7 @@ describe("ClaudeAdapterLive", () => { const session = yield* adapter.startSession({ threadId: THREAD_ID, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), runtimeMode: "full-access", }); const turn = yield* adapter.sendTurn({ diff --git a/apps/server/src/provider/Layers/ClaudeAdapter.ts b/apps/server/src/provider/Layers/ClaudeAdapter.ts index 3b32646ec40..556504d6cf4 100644 --- a/apps/server/src/provider/Layers/ClaudeAdapter.ts +++ b/apps/server/src/provider/Layers/ClaudeAdapter.ts @@ -17,88 +17,18 @@ import { type SDKResultMessage, type SettingSource, type SDKUserMessage, + type ModelUsage, } from "@anthropic-ai/claude-agent-sdk"; - -/** Inline types from SDK — not yet re-exported from the public entry. */ -type ModelUsage = { - inputTokens: number; - outputTokens: number; - cacheReadInputTokens: number; - cacheCreationInputTokens: number; - webSearchRequests: number; - contextWindow: number; -}; -type NonNullableUsage = Record; - -/** Inline type aliases for SDK message subtypes that aren't re-exported publicly. */ -type SDKHookStartedMessage = { - type: "system"; - subtype: "hook_started"; - hook_id: string; - hook_name: string; - hook_event: string; - [k: string]: unknown; -}; -type SDKHookProgressMessage = { - type: "system"; - subtype: "hook_progress"; - hook_id: string; - output: string; - stdout: string; - stderr: string; - [k: string]: unknown; -}; -type SDKHookResponseMessage = { - type: "system"; - subtype: "hook_response"; - hook_id: string; - outcome: "error" | "cancelled" | "success"; - output: string; - stdout: string; - stderr: string; - exit_code?: number; - [k: string]: unknown; -}; -type SDKTaskStartedMessage = { - type: "system"; - subtype: "task_started"; - task_id: string; - description: string; - task_type?: string; - [k: string]: unknown; -}; -type SDKTaskProgressMessage = { - type: "system"; - subtype: "task_progress"; - task_id: string; - description: string; - summary?: string; - usage?: Record; - last_tool_name?: string; - [k: string]: unknown; -}; -type SDKTaskNotificationMessage = { - type: "system"; - subtype: "task_notification"; - task_id: string; - status: "completed" | "failed" | "stopped"; - summary?: string; - usage?: Record; - [k: string]: unknown; -}; -type SDKToolUseSummaryMessage = { - type: "tool_use_summary"; - summary: string; - preceding_tool_use_ids?: readonly string[]; - [k: string]: unknown; -}; import { parseCliArgs } from "@t3tools/shared/cliArgs"; import { ApprovalRequestId, type CanonicalItemType, type CanonicalRequestType, + type ClaudeSettings, EventId, type ProviderApprovalDecision, + ProviderDriverKind, + ProviderInstanceId, ProviderItemId, type ProviderRuntimeEvent, type ProviderRuntimeTurnStatus, @@ -113,9 +43,14 @@ import { ThreadId, TurnId, type UserInputQuestion, - type ClaudeAgentEffort, } from "@t3tools/contracts"; -import { applyClaudePromptEffortPrefix, resolveEffort, trimOrNull } from "@t3tools/shared/model"; +import { + applyClaudePromptEffortPrefix, + getModelSelectionBooleanOptionValue, + getModelSelectionStringOptionValue, + getProviderOptionDescriptors, + resolvePromptInjectedEffort, +} from "@t3tools/shared/model"; import { Cause, DateTime, @@ -124,7 +59,7 @@ import { Exit, FileSystem, Fiber, - Layer, + Path, Queue, Random, Ref, @@ -133,8 +68,13 @@ import { import { resolveAttachmentPath } from "../../attachmentStore.ts"; import { ServerConfig } from "../../config.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; -import { getClaudeModelCapabilities, resolveClaudeApiModelId } from "./ClaudeProvider.ts"; +import { makeClaudeEnvironment } from "../Drivers/ClaudeHome.ts"; +import { + getClaudeModelCapabilities, + normalizeClaudeCliEffort, + resolveClaudeApiModelId, + resolveClaudeEffort, +} from "./ClaudeProvider.ts"; import { ProviderAdapterProcessError, ProviderAdapterRequestError, @@ -143,11 +83,10 @@ import { ProviderAdapterValidationError, type ProviderAdapterError, } from "../Errors.ts"; -import { ClaudeAdapter, type ClaudeAdapterShape } from "../Services/ClaudeAdapter.ts"; -import { getProviderCapabilities } from "../Services/ProviderAdapter.ts"; +import { type ClaudeAdapterShape } from "../Services/ClaudeAdapter.ts"; import { type EventNdjsonLogger, makeEventNdjsonLogger } from "./EventNdjsonLogger.ts"; -const PROVIDER = "claudeAgent" as const; +const PROVIDER = ProviderDriverKind.make("claudeAgent"); type ClaudeTextStreamKind = Extract; type ClaudeToolResultStreamKind = Extract< RuntimeContentStreamKind, @@ -246,6 +185,8 @@ interface ClaudeQueryRuntime extends AsyncIterable { } export interface ClaudeAdapterLiveOptions { + readonly instanceId?: ProviderInstanceId; + readonly environment?: NodeJS.ProcessEnv; readonly createQuery?: (input: { readonly prompt: AsyncIterable; readonly options: ClaudeQueryOptions; @@ -297,19 +238,9 @@ function normalizeClaudeStreamMessages(cause: Cause.Cause): ReadonlyArray return squashed.length > 0 ? [squashed] : []; } -function getEffectiveClaudeAgentEffort( - effort: ClaudeAgentEffort | null | undefined, -): ClaudeSdkEffort | null { - if (!effort) { - return null; - } - if (effort === "ultrathink") { - return null; - } - if (effort === "xhigh") { - return "max"; - } - return effort; +function getEffectiveClaudeAgentEffort(effort: string | null | undefined): ClaudeSdkEffort | null { + const normalized = normalizeClaudeCliEffort(effort); + return normalized ? (normalized as ClaudeSdkEffort) : null; } function isClaudeInterruptedMessage(message: string): boolean { @@ -639,18 +570,19 @@ const CLAUDE_SETTING_SOURCES = [ "local", ] as const satisfies ReadonlyArray; -function buildPromptText(input: ProviderSendTurnInput): string { +function buildPromptText( + input: ProviderSendTurnInput, + boundInstanceId: ProviderInstanceId, +): string { const rawEffort = - input.modelSelection?.provider === "claudeAgent" ? input.modelSelection.options?.effort : null; + input.modelSelection?.instanceId === boundInstanceId + ? getModelSelectionStringOptionValue(input.modelSelection, "effort") + : null; const claudeModel = - input.modelSelection?.provider === "claudeAgent" ? input.modelSelection.model : undefined; + input.modelSelection?.instanceId === boundInstanceId ? input.modelSelection.model : undefined; const caps = getClaudeModelCapabilities(claudeModel); - // For prompt injection, we check if the raw effort is a prompt-injected level (e.g. "ultrathink"). - // resolveEffort strips prompt-injected values (returning the default instead), so we check the raw value directly. - const trimmedEffort = trimOrNull(rawEffort); - const promptEffort = - trimmedEffort && caps.promptInjectedEffortLevels.includes(trimmedEffort) ? trimmedEffort : null; + const promptEffort = resolvePromptInjectedEffort(caps, rawEffort); return applyClaudePromptEffortPrefix(input.input?.trim() ?? "", promptEffort); } @@ -663,7 +595,7 @@ function buildUserMessage(input: { parent_tool_use_id: null, message: { role: "user", - content: input.sdkContent as unknown as Array>, + content: input.sdkContent as unknown as SDKUserMessage["message"]["content"], }, } as SDKUserMessage; } @@ -687,9 +619,10 @@ const buildUserMessageEffect = Effect.fn("buildUserMessageEffect")(function* ( dependencies: { readonly fileSystem: FileSystem.FileSystem; readonly attachmentsDir: string; + readonly boundInstanceId: ProviderInstanceId; }, ) { - const text = buildPromptText(input); + const text = buildPromptText(input, dependencies.boundInstanceId); const sdkContent: Array> = []; if (text.length > 0) { @@ -1039,11 +972,17 @@ function sdkNativeItemId(message: SDKMessage): string | undefined { return undefined; } -const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( +export const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( + claudeSettings: ClaudeSettings, options?: ClaudeAdapterLiveOptions, ) { + const boundInstanceId = options?.instanceId ?? ProviderInstanceId.make("claudeAgent"); const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; const serverConfig = yield* ServerConfig; + const claudeEnvironment = yield* makeClaudeEnvironment(claudeSettings, options?.environment).pipe( + Effect.provideService(Path.Path, path), + ); const nativeEventLogger = options?.nativeEventLogger ?? (options?.nativeEventLogPath !== undefined @@ -1065,7 +1004,6 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( const sessions = new Map(); const runtimeEventQueue = yield* Queue.unbounded(); - const serverSettingsService = yield* ServerSettingsService; const nowIso = Effect.map(DateTime.now, DateTime.formatIso); const nextEventId = Effect.map(Random.nextUUIDv4, (id) => EventId.make(id)); @@ -1469,9 +1407,7 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( errorMessage?: string, result?: SDKResultMessage, ) { - const resultContextWindow = maxClaudeContextWindowFromModelUsage( - result?.modelUsage as Record | undefined, - ); + const resultContextWindow = maxClaudeContextWindowFromModelUsage(result?.modelUsage); if (resultContextWindow !== undefined) { context.lastKnownContextWindow = resultContextWindow; } @@ -1648,26 +1584,7 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( return; } - type StreamEvent = { - type: string; - delta: { - type: string; - text?: string; - thinking?: string; - partial_json?: string; - [k: string]: unknown; - }; - index: number; - content_block?: { - type: string; - id?: string; - name?: string; - input?: unknown; - [k: string]: unknown; - }; - [k: string]: unknown; - }; - const { event } = message as { event: StreamEvent }; + const { event } = message; if (event.type === "content_block_delta") { if ( @@ -1676,7 +1593,7 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( ) { const deltaText = event.delta.type === "text_delta" - ? (event.delta.text ?? "") + ? event.delta.text : typeof event.delta.thinking === "string" ? event.delta.thinking : ""; @@ -1823,9 +1740,6 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( if (event.type === "content_block_start") { const { index, content_block: block } = event; - if (!block) { - return; - } if (block.type === "text") { yield* ensureAssistantTextBlock(context, index, { fallbackText: extractContentBlockText(block), @@ -1840,13 +1754,13 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( return; } - const toolName = block.name ?? "unknown"; + const toolName = block.name; const itemType = classifyToolItemType(toolName); const toolInput = typeof block.input === "object" && block.input !== null ? (block.input as Record) : {}; - const itemId = block.id ?? ""; + const itemId = block.id; const detail = summarizeToolRequest(toolName, toolInput); const inputFingerprint = Object.keys(toolInput).length > 0 ? toolInputFingerprint(toolInput) : undefined; @@ -2118,13 +2032,7 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( } const status = turnStatusFromResult(message); - const errors = (message as { errors?: unknown[] }).errors; - const errorMessage = - message.subtype === "success" - ? undefined - : typeof errors?.[0] === "string" - ? errors[0] - : undefined; + const errorMessage = message.subtype === "success" ? undefined : message.errors[0]; if (status === "failed") { yield* emitRuntimeError(context, errorMessage ?? "Claude turn failed."); @@ -2188,69 +2096,58 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( }, }); return; - case "hook_started": { - const hookStarted = message as SDKHookStartedMessage; + case "hook_started": yield* offerRuntimeEvent({ ...base, type: "hook.started", payload: { - hookId: hookStarted.hook_id, - hookName: hookStarted.hook_name, - hookEvent: hookStarted.hook_event, + hookId: message.hook_id, + hookName: message.hook_name, + hookEvent: message.hook_event, }, }); return; - } - case "hook_progress": { - const hookProgress = message as SDKHookProgressMessage; + case "hook_progress": yield* offerRuntimeEvent({ ...base, type: "hook.progress", payload: { - hookId: hookProgress.hook_id, - output: hookProgress.output, - stdout: hookProgress.stdout, - stderr: hookProgress.stderr, + hookId: message.hook_id, + output: message.output, + stdout: message.stdout, + stderr: message.stderr, }, }); return; - } - case "hook_response": { - const hookResponse = message as SDKHookResponseMessage; + case "hook_response": yield* offerRuntimeEvent({ ...base, type: "hook.completed", payload: { - hookId: hookResponse.hook_id, - outcome: hookResponse.outcome, - output: hookResponse.output, - stdout: hookResponse.stdout, - stderr: hookResponse.stderr, - ...(typeof hookResponse.exit_code === "number" - ? { exitCode: hookResponse.exit_code } - : {}), + hookId: message.hook_id, + outcome: message.outcome, + output: message.output, + stdout: message.stdout, + stderr: message.stderr, + ...(typeof message.exit_code === "number" ? { exitCode: message.exit_code } : {}), }, }); return; - } - case "task_started": { - const taskStarted = message as SDKTaskStartedMessage; + case "task_started": yield* offerRuntimeEvent({ ...base, type: "task.started", payload: { - taskId: RuntimeTaskId.make(taskStarted.task_id), - description: taskStarted.description, - ...(taskStarted.task_type ? { taskType: taskStarted.task_type } : {}), + taskId: RuntimeTaskId.make(message.task_id), + description: message.description, + ...(message.task_type ? { taskType: message.task_type } : {}), }, }); return; - } - case "task_progress": { - const taskProgress = message as SDKTaskProgressMessage; - if (taskProgress.usage) { + case "task_progress": + if (message.usage) { const normalizedUsage = normalizeClaudeTokenUsage( - taskProgress.usage, + message.usage, context.lastKnownContextWindow, ); if (normalizedUsage) { @@ -2271,20 +2168,18 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( ...base, type: "task.progress", payload: { - taskId: RuntimeTaskId.make(taskProgress.task_id), - description: taskProgress.description, - ...(taskProgress.summary ? { summary: taskProgress.summary } : {}), - ...(taskProgress.usage ? { usage: taskProgress.usage } : {}), - ...(taskProgress.last_tool_name ? { lastToolName: taskProgress.last_tool_name } : {}), + taskId: RuntimeTaskId.make(message.task_id), + description: message.description, + ...(message.summary ? { summary: message.summary } : {}), + ...(message.usage ? { usage: message.usage } : {}), + ...(message.last_tool_name ? { lastToolName: message.last_tool_name } : {}), }, }); return; - } - case "task_notification": { - const taskNotification = message as SDKTaskNotificationMessage; - if (taskNotification.usage) { + case "task_notification": + if (message.usage) { const normalizedUsage = normalizeClaudeTokenUsage( - taskNotification.usage, + message.usage, context.lastKnownContextWindow, ); if (normalizedUsage) { @@ -2305,14 +2200,13 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( ...base, type: "task.completed", payload: { - taskId: RuntimeTaskId.make(taskNotification.task_id), - status: taskNotification.status, - ...(taskNotification.summary ? { summary: taskNotification.summary } : {}), - ...(taskNotification.usage ? { usage: taskNotification.usage } : {}), + taskId: RuntimeTaskId.make(message.task_id), + status: message.status, + ...(message.summary ? { summary: message.summary } : {}), + ...(message.usage ? { usage: message.usage } : {}), }, }); return; - } case "files_persisted": yield* offerRuntimeEvent({ ...base, @@ -2380,14 +2274,15 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( } if (message.type === "tool_use_summary") { - const toolSummary = message as SDKToolUseSummaryMessage; yield* offerRuntimeEvent({ ...base, type: "tool.summary", payload: { - summary: toolSummary.summary, - ...(toolSummary.preceding_tool_use_ids && toolSummary.preceding_tool_use_ids.length > 0 - ? { precedingToolUseIds: toolSummary.preceding_tool_use_ids } + summary: message.summary, + ...(message.preceding_tool_use_ids.length > 0 + ? { + precedingToolUseIds: message.preceding_tool_use_ids, + } : {}), }, }); @@ -2395,19 +2290,13 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( } if (message.type === "auth_status") { - const authMsg = message as { - isAuthenticating?: boolean; - output?: string; - error?: string; - [k: string]: unknown; - }; yield* offerRuntimeEvent({ ...base, type: "auth.status", payload: { - isAuthenticating: authMsg.isAuthenticating, - ...(authMsg.output ? { output: [authMsg.output] } : {}), - ...(authMsg.error ? { error: authMsg.error } : {}), + isAuthenticating: message.isAuthenticating, + output: message.output, + ...(message.error ? { error: message.error } : {}), }, }); return; @@ -2674,10 +2563,14 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( const requestId = ApprovalRequestId.make(yield* Random.nextUUIDv4); // Parse questions from the SDK's AskUserQuestion input. + // `id` MUST equal the full question text — Claude SDK >= 2.1.121 looks + // up answers by question text in `mapToolResultToToolResultBlockParam`, + // so the key the UI uses to keep its draft answer must match the SDK's + // expected lookup key. See https://github.com/pingdotgg/t3code/issues/2388 const rawQuestions = Array.isArray(toolInput.questions) ? toolInput.questions : []; const questions: Array = rawQuestions.map( (q: Record, idx: number) => ({ - id: typeof q.header === "string" ? q.header : `q-${idx}`, + id: typeof q.question === "string" && q.question.length > 0 ? q.question : `q-${idx}`, header: typeof q.header === "string" ? q.header : `Question ${idx + 1}`, question: typeof q.question === "string" ? q.question : "", options: Array.isArray(q.options) @@ -2945,31 +2838,27 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( const canUseTool: CanUseTool = (toolName, toolInput, callbackOptions) => runPromise(canUseToolEffect(toolName, toolInput, callbackOptions)); - const claudeSettings = yield* serverSettingsService.getSettings.pipe( - Effect.map((settings) => settings.providers.claudeAgent), - Effect.mapError( - (error) => - new ProviderAdapterProcessError({ - provider: PROVIDER, - threadId: input.threadId, - detail: error.message, - cause: error, - }), - ), - ); const claudeBinaryPath = claudeSettings.binaryPath; const extraArgs = parseCliArgs(claudeSettings.launchArgs).flags; const modelSelection = - input.modelSelection?.provider === "claudeAgent" ? input.modelSelection : undefined; + input.modelSelection?.instanceId === boundInstanceId ? input.modelSelection : undefined; const caps = getClaudeModelCapabilities(modelSelection?.model); + const descriptors = getProviderOptionDescriptors({ caps }); const apiModelId = modelSelection ? resolveClaudeApiModelId(modelSelection) : undefined; - const effort = (resolveEffort(caps, modelSelection?.options?.effort) ?? - null) as ClaudeAgentEffort | null; - const fastMode = modelSelection?.options?.fastMode === true && caps.supportsFastMode; - const thinking = - typeof modelSelection?.options?.thinking === "boolean" && caps.supportsThinkingToggle - ? modelSelection.options.thinking - : undefined; + const rawEffort = getModelSelectionStringOptionValue(modelSelection, "effort"); + const effort = resolveClaudeEffort(caps, rawEffort) ?? null; + const fastModeSupported = descriptors.some( + (descriptor) => descriptor.type === "boolean" && descriptor.id === "fastMode", + ); + const thinkingSupported = descriptors.some( + (descriptor) => descriptor.type === "boolean" && descriptor.id === "thinking", + ); + const fastMode = + getModelSelectionBooleanOptionValue(modelSelection, "fastMode") === true && + fastModeSupported; + const thinking = thinkingSupported + ? getModelSelectionBooleanOptionValue(modelSelection, "thinking") + : undefined; const effectiveEffort = getEffectiveClaudeAgentEffort(effort); const runtimeModeToPermission: Record = { "auto-accept-edits": "acceptEdits", @@ -2980,11 +2869,11 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( ...(typeof thinking === "boolean" ? { alwaysThinkingEnabled: thinking } : {}), ...(fastMode ? { fastMode: true } : {}), }; - const queryOptions: ClaudeQueryOptions = { ...(input.cwd ? { cwd: input.cwd } : {}), ...(apiModelId ? { model: apiModelId } : {}), pathToClaudeCodeExecutable: claudeBinaryPath, + systemPrompt: { type: "preset", preset: "claude_code" }, settingSources: [...CLAUDE_SETTING_SOURCES], // The SDK type lags the CLI here: Opus 4.7 accepts `xhigh` even though // the published `Options["effort"]` union currently stops at `max`. @@ -3002,7 +2891,7 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( ...(newSessionId ? { sessionId: newSessionId } : {}), includePartialMessages: true, canUseTool, - env: process.env, + env: claudeEnvironment, ...(input.cwd ? { additionalDirectories: [input.cwd] } : {}), ...(Object.keys(extraArgs).length > 0 ? { extraArgs } : {}), }; @@ -3027,8 +2916,8 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( "claude.query.include_partial_messages": true, "claude.query.additional_directories": input.cwd ? [input.cwd] : [], "claude.query.setting_sources": [...CLAUDE_SETTING_SOURCES], - "claude.query.settings_keys": Object.keys(settings).sort(), - "claude.query.extra_args_count": Object.keys(extraArgs).length, + "claude.query.settings_json": JSON.stringify(settings), + "claude.query.extra_args_json": JSON.stringify(extraArgs), "claude.query.path_to_executable": claudeBinaryPath, }); @@ -3050,6 +2939,7 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( const session: ProviderSession = { threadId, provider: PROVIDER, + providerInstanceId: boundInstanceId, status: "ready", runtimeMode: input.runtimeMode, ...(input.cwd ? { cwd: input.cwd } : {}), @@ -3161,7 +3051,9 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( const sendTurn: ClaudeAdapterShape["sendTurn"] = Effect.fn("sendTurn")(function* (input) { const context = yield* requireSession(input.threadId); const modelSelection = - input.modelSelection?.provider === "claudeAgent" ? input.modelSelection : undefined; + input.modelSelection !== undefined && input.modelSelection.instanceId === boundInstanceId + ? input.modelSelection + : undefined; if (context.turnState) { // Auto-close a stale synthetic turn (from background agent responses @@ -3235,6 +3127,7 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( const message = yield* buildUserMessageEffect(input, { fileSystem, attachmentsDir: serverConfig.attachmentsDir, + boundInstanceId, }); yield* Queue.offer(context.promptQueue, { @@ -3353,7 +3246,9 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( return { provider: PROVIDER, - capabilities: getProviderCapabilities(PROVIDER), + capabilities: { + sessionModelSwitch: "in-session", + }, startSession, sendTurn, interruptTurn, @@ -3370,9 +3265,3 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( }, } satisfies ClaudeAdapterShape; }); - -export const ClaudeAdapterLive = Layer.effect(ClaudeAdapter, makeClaudeAdapter()); - -export function makeClaudeAdapterLive(options?: ClaudeAdapterLiveOptions) { - return Layer.effect(ClaudeAdapter, makeClaudeAdapter(options)); -} diff --git a/apps/server/src/provider/Layers/ClaudeProvider.ts b/apps/server/src/provider/Layers/ClaudeProvider.ts index d50f9761730..43505967002 100644 --- a/apps/server/src/provider/Layers/ClaudeProvider.ts +++ b/apps/server/src/provider/Layers/ClaudeProvider.ts @@ -1,52 +1,45 @@ -import type { - ClaudeSettings, - ClaudeModelSelection, - ModelCapabilities, - ServerProvider, - ServerProviderModel, - ServerProviderAuth, - ServerProviderSlashCommand, - ServerProviderState, +import { + type ClaudeSettings, + type ModelCapabilities, + type ModelSelection, + ProviderDriverKind, + type ServerProviderModel, + type ServerProviderSlashCommand, } from "@t3tools/contracts"; -import { Cache, Duration, Effect, Equal, Layer, Option, Result, Schema, Stream } from "effect"; +import { Effect, Option, Path, Result } from "effect"; import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; -import { decodeJsonResult } from "@t3tools/shared/schemaJson"; -import { query as claudeQuery, type SDKUserMessage } from "@anthropic-ai/claude-agent-sdk"; - -/** Inline type — not yet re-exported from the public SDK entry. */ -type ClaudeSlashCommand = { - name: string; - description: string; - argumentHint: string; -}; +import { + createModelCapabilities, + getModelSelectionStringOptionValue, + getProviderOptionCurrentValue, + getProviderOptionDescriptors, +} from "@t3tools/shared/model"; +import { + query as claudeQuery, + type SlashCommand as ClaudeSlashCommand, + type SDKUserMessage, +} from "@anthropic-ai/claude-agent-sdk"; import { + buildBooleanOptionDescriptor, + buildSelectOptionDescriptor, buildServerProvider, - AUTH_PROBE_TIMEOUT_MS, DEFAULT_TIMEOUT_MS, detailFromResult, - extractAuthBoolean, isCommandMissingCause, parseGenericCliVersion, providerModelsFromSettings, - collectStreamAsString, - type CommandResult, + spawnAndCollect, + type ServerProviderDraft, } from "../providerSnapshot.ts"; import { compareCliVersions } from "../cliVersion.ts"; -import { makeManagedServerProvider } from "../makeManagedServerProvider.ts"; -import { ClaudeProvider } from "../Services/ClaudeProvider.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; -import { ServerSettingsError } from "@t3tools/contracts"; - -const DEFAULT_CLAUDE_MODEL_CAPABILITIES: ModelCapabilities = { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], -}; +import { makeClaudeEnvironment } from "../Drivers/ClaudeHome.ts"; -const PROVIDER = "claudeAgent" as const; +const DEFAULT_CLAUDE_MODEL_CAPABILITIES: ModelCapabilities = createModelCapabilities({ + optionDescriptors: [], +}); + +const PROVIDER = ProviderDriverKind.make("claudeAgent"); const CLAUDE_PRESENTATION = { displayName: "Claude", showInteractionModeToggle: true, @@ -57,93 +50,128 @@ const BUILT_IN_MODELS: ReadonlyArray = [ slug: "claude-opus-4-7", name: "Claude Opus 4.7", isCustom: false, - capabilities: { - reasoningEffortLevels: [ - { value: "low", label: "Low" }, - { value: "medium", label: "Medium" }, - { value: "high", label: "High" }, - { value: "xhigh", label: "Extra High", isDefault: true }, - { value: "max", label: "Max" }, - { value: "ultrathink", label: "Ultrathink" }, + capabilities: createModelCapabilities({ + optionDescriptors: [ + buildSelectOptionDescriptor({ + id: "effort", + label: "Reasoning", + options: [ + { value: "low", label: "Low" }, + { value: "medium", label: "Medium" }, + { value: "high", label: "High" }, + { value: "xhigh", label: "Extra High", isDefault: true }, + { value: "max", label: "Max" }, + { value: "ultrathink", label: "Ultrathink" }, + ], + promptInjectedValues: ["ultrathink"], + }), + buildSelectOptionDescriptor({ + id: "contextWindow", + label: "Context Window", + options: [ + { value: "200k", label: "200k", isDefault: true }, + { value: "1m", label: "1M" }, + ], + }), ], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [ - { value: "200k", label: "200k", isDefault: true }, - { value: "1m", label: "1M" }, - ], - promptInjectedEffortLevels: ["ultrathink"], - } satisfies ModelCapabilities, + }), }, { slug: "claude-opus-4-6", name: "Claude Opus 4.6", isCustom: false, - capabilities: { - reasoningEffortLevels: [ - { value: "low", label: "Low" }, - { value: "medium", label: "Medium" }, - { value: "high", label: "High", isDefault: true }, - { value: "max", label: "Max" }, - { value: "ultrathink", label: "Ultrathink" }, + capabilities: createModelCapabilities({ + optionDescriptors: [ + buildSelectOptionDescriptor({ + id: "effort", + label: "Reasoning", + options: [ + { value: "low", label: "Low" }, + { value: "medium", label: "Medium" }, + { value: "high", label: "High", isDefault: true }, + { value: "max", label: "Max" }, + { value: "ultrathink", label: "Ultrathink" }, + ], + promptInjectedValues: ["ultrathink"], + }), + buildBooleanOptionDescriptor({ + id: "fastMode", + label: "Fast Mode", + }), + buildSelectOptionDescriptor({ + id: "contextWindow", + label: "Context Window", + options: [ + { value: "200k", label: "200k", isDefault: true }, + { value: "1m", label: "1M" }, + ], + }), ], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [ - { value: "200k", label: "200k", isDefault: true }, - { value: "1m", label: "1M" }, - ], - promptInjectedEffortLevels: ["ultrathink"], - } satisfies ModelCapabilities, + }), }, { slug: "claude-opus-4-5", name: "Claude Opus 4.5", isCustom: false, - capabilities: { - reasoningEffortLevels: [ - { value: "low", label: "Low" }, - { value: "medium", label: "Medium" }, - { value: "high", label: "High", isDefault: true }, - { value: "max", label: "Max" }, + capabilities: createModelCapabilities({ + optionDescriptors: [ + buildSelectOptionDescriptor({ + id: "effort", + label: "Reasoning", + options: [ + { value: "low", label: "Low" }, + { value: "medium", label: "Medium" }, + { value: "high", label: "High", isDefault: true }, + { value: "max", label: "Max" }, + ], + }), + buildBooleanOptionDescriptor({ + id: "fastMode", + label: "Fast Mode", + }), ], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - } satisfies ModelCapabilities, + }), }, { slug: "claude-sonnet-4-6", name: "Claude Sonnet 4.6", isCustom: false, - capabilities: { - reasoningEffortLevels: [ - { value: "low", label: "Low" }, - { value: "medium", label: "Medium" }, - { value: "high", label: "High", isDefault: true }, - { value: "ultrathink", label: "Ultrathink" }, + capabilities: createModelCapabilities({ + optionDescriptors: [ + buildSelectOptionDescriptor({ + id: "effort", + label: "Reasoning", + options: [ + { value: "low", label: "Low" }, + { value: "medium", label: "Medium" }, + { value: "high", label: "High", isDefault: true }, + { value: "ultrathink", label: "Ultrathink" }, + ], + promptInjectedValues: ["ultrathink"], + }), + buildSelectOptionDescriptor({ + id: "contextWindow", + label: "Context Window", + options: [ + { value: "200k", label: "200k", isDefault: true }, + { value: "1m", label: "1M" }, + ], + }), ], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [ - { value: "200k", label: "200k", isDefault: true }, - { value: "1m", label: "1M" }, - ], - promptInjectedEffortLevels: ["ultrathink"], - } satisfies ModelCapabilities, + }), }, { slug: "claude-haiku-4-5", name: "Claude Haiku 4.5", isCustom: false, - capabilities: { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: true, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - } satisfies ModelCapabilities, + capabilities: createModelCapabilities({ + optionDescriptors: [ + buildBooleanOptionDescriptor({ + id: "thinking", + label: "Thinking", + }), + ], + }), }, ]; @@ -173,188 +201,46 @@ export function getClaudeModelCapabilities(model: string | null | undefined): Mo ); } -export function resolveClaudeApiModelId(modelSelection: ClaudeModelSelection): string { - switch (modelSelection.options?.contextWindow) { - case "1m": - return `${modelSelection.model}[1m]`; - default: - return modelSelection.model; - } -} -export function parseClaudeAuthStatusFromOutput(result: CommandResult): { - readonly status: Exclude; - readonly auth: Pick; - readonly message?: string; -} { - const lowerOutput = `${result.stdout}\n${result.stderr}`.toLowerCase(); - - if ( - lowerOutput.includes("unknown command") || - lowerOutput.includes("unrecognized command") || - lowerOutput.includes("unexpected argument") - ) { - return { - status: "warning", - auth: { status: "unknown" }, - message: - "Claude Agent authentication status command is unavailable in this version of Claude.", - }; - } - - if ( - lowerOutput.includes("not logged in") || - lowerOutput.includes("login required") || - lowerOutput.includes("authentication required") || - lowerOutput.includes("run `claude login`") || - lowerOutput.includes("run claude login") - ) { - return { - status: "error", - auth: { status: "unauthenticated" }, - message: "Claude is not authenticated. Run `claude auth login` and try again.", - }; - } - - const parsedAuth = (() => { - const trimmed = result.stdout.trim(); - if (!trimmed || (!trimmed.startsWith("{") && !trimmed.startsWith("["))) { - return { attemptedJsonParse: false as const, auth: undefined as boolean | undefined }; - } - try { - return { - attemptedJsonParse: true as const, - auth: extractAuthBoolean(JSON.parse(trimmed)), - }; - } catch { - return { attemptedJsonParse: false as const, auth: undefined as boolean | undefined }; - } - })(); - - if (parsedAuth.auth === true) { - return { status: "ready", auth: { status: "authenticated" } }; - } - if (parsedAuth.auth === false) { - return { - status: "error", - auth: { status: "unauthenticated" }, - message: "Claude is not authenticated. Run `claude auth login` and try again.", - }; - } - if (parsedAuth.attemptedJsonParse) { - return { - status: "warning", - auth: { status: "unknown" }, - message: - "Could not verify Claude authentication status from JSON output (missing auth marker).", - }; - } - if (result.code === 0) { - return { status: "ready", auth: { status: "authenticated" } }; - } - - const detail = detailFromResult(result); - return { - status: "warning", - auth: { status: "unknown" }, - message: detail - ? `Could not verify Claude authentication status. ${detail}` - : "Could not verify Claude authentication status.", - }; +export function resolveClaudeEffort( + caps: ModelCapabilities, + raw: string | null | undefined, +): string | undefined { + const descriptors = getProviderOptionDescriptors({ + caps, + ...(raw ? { selections: [{ id: "effort", value: raw }] } : {}), + }); + const effortDescriptor = descriptors.find((descriptor) => descriptor.id === "effort"); + const value = getProviderOptionCurrentValue(effortDescriptor); + return typeof value === "string" ? value : undefined; } -// ── Subscription type detection ───────────────────────────────────── -// -// The SDK probe returns typed `AccountInfo.subscriptionType` directly. -// This walker is a best-effort fallback for the `claude auth status` -// JSON output whose shape is not guaranteed. - -/** Keys that directly hold a subscription/plan identifier. */ -const SUBSCRIPTION_TYPE_KEYS = [ - "subscriptionType", - "subscription_type", - "plan", - "tier", - "planType", - "plan_type", -] as const; - -/** Keys whose value may be a nested object containing subscription info. */ -const SUBSCRIPTION_CONTAINER_KEYS = ["account", "subscription", "user", "billing"] as const; -const AUTH_METHOD_KEYS = ["authMethod", "auth_method"] as const; -const AUTH_METHOD_CONTAINER_KEYS = ["auth", "account", "session"] as const; - -/** Lift an unknown value into `Option` if it is a non-empty string. */ -const asNonEmptyString = (v: unknown): Option.Option => - typeof v === "string" && v.length > 0 ? Option.some(v) : Option.none(); - -/** Lift an unknown value into `Option` if it is a plain object. */ -const asRecord = (v: unknown): Option.Option> => - typeof v === "object" && v !== null && !globalThis.Array.isArray(v) - ? Option.some(v as Record) - : Option.none(); - /** - * Walk an unknown parsed JSON value looking for a subscription/plan - * identifier, returning the first match as an `Option`. + * Normalize a resolved Claude effort value into one suitable for the Claude + * CLI's `--effort` flag. + * + * Mirrors the mapping used when invoking the Claude Agent SDK + * ({@link getEffectiveClaudeAgentEffort} in ClaudeAdapter): the Opus 4.7 + * capability `"xhigh"` is rewritten to the accepted CLI value `"max"`, and + * `"ultrathink"` is filtered out because it is a prompt-prefix mode rather + * than a CLI-effort value. Returns `undefined` when no flag should be passed. */ -function findSubscriptionType(value: unknown): Option.Option { - if (globalThis.Array.isArray(value)) { - return Option.firstSomeOf(value.map(findSubscriptionType)); +export function normalizeClaudeCliEffort(effort: string | null | undefined): string | undefined { + if (!effort || effort === "ultrathink") { + return undefined; } - - return asRecord(value).pipe( - Option.flatMap((record) => { - const direct = Option.firstSomeOf( - SUBSCRIPTION_TYPE_KEYS.map((key) => asNonEmptyString(record[key])), - ); - if (Option.isSome(direct)) return direct; - - return Option.firstSomeOf( - SUBSCRIPTION_CONTAINER_KEYS.map((key) => - asRecord(record[key]).pipe(Option.flatMap(findSubscriptionType)), - ), - ); - }), - ); -} - -function findAuthMethod(value: unknown): Option.Option { - if (globalThis.Array.isArray(value)) { - return Option.firstSomeOf(value.map(findAuthMethod)); + if (effort === "xhigh") { + return "max"; } - - return asRecord(value).pipe( - Option.flatMap((record) => { - const direct = Option.firstSomeOf( - AUTH_METHOD_KEYS.map((key) => asNonEmptyString(record[key])), - ); - if (Option.isSome(direct)) return direct; - - return Option.firstSomeOf( - AUTH_METHOD_CONTAINER_KEYS.map((key) => - asRecord(record[key]).pipe(Option.flatMap(findAuthMethod)), - ), - ); - }), - ); -} - -/** - * Try to extract a subscription type from the `claude auth status` JSON - * output. This is a zero-cost operation on data we already have. - */ -const decodeUnknownJson = decodeJsonResult(Schema.Unknown); - -function extractSubscriptionTypeFromOutput(result: CommandResult): string | undefined { - const parsed = decodeUnknownJson(result.stdout.trim()); - if (Result.isFailure(parsed)) return undefined; - return Option.getOrUndefined(findSubscriptionType(parsed.success)); + return effort; } -function extractClaudeAuthMethodFromOutput(result: CommandResult): string | undefined { - const parsed = decodeUnknownJson(result.stdout.trim()); - if (Result.isFailure(parsed)) return undefined; - return Option.getOrUndefined(findAuthMethod(parsed.success)); +export function resolveClaudeApiModelId(modelSelection: ModelSelection): string { + switch (getModelSelectionStringOptionValue(modelSelection, "contextWindow")) { + case "1m": + return `${modelSelection.model}[1m]`; + default: + return modelSelection.model; + } } function toTitleCaseWords(value: string): string { @@ -370,11 +256,27 @@ function claudeSubscriptionLabel(subscriptionType: string | undefined): string | if (!normalized) return undefined; switch (normalized) { + case "claudemaxsubscription": + return "Max"; + case "claudemax5xsubscription": + return "Max 5x"; + case "claudemax20xsubscription": + return "Max 20x"; + case "claudeenterprisesubscription": + return "Enterprise"; + case "claudeteamsubscription": + return "Team"; + case "claudeprosubscription": + return "Pro"; + case "claudefreesubscription": + return "Free"; case "max": case "maxplan": + return "Max"; case "max5": + return "Max 5x"; case "max20": - return "Max"; + return "Max 20x"; case "enterprise": return "Enterprise"; case "team": @@ -391,10 +293,33 @@ function claudeSubscriptionLabel(subscriptionType: string | undefined): string | function normalizeClaudeAuthMethod(authMethod: string | undefined): string | undefined { const normalized = authMethod?.toLowerCase().replace(/[\s_-]+/g, ""); if (!normalized) return undefined; - if (normalized === "apikey") return "apiKey"; + if ( + normalized === "apikey" || + normalized === "anthropicapikey" || + normalized === "anthropicauthtoken" + ) { + return "apiKey"; + } return undefined; } +function formatClaudeSubscriptionAuthLabel(subscriptionType: string): string { + const subscriptionLabel = + claudeSubscriptionLabel(subscriptionType) ?? toTitleCaseWords(subscriptionType); + const normalized = subscriptionLabel.toLowerCase().replace(/[\s_-]+/g, ""); + + if (normalized.startsWith("claude") && normalized.endsWith("subscription")) { + return subscriptionLabel; + } + if (normalized.startsWith("claude")) { + return `${subscriptionLabel} Subscription`; + } + if (normalized.endsWith("subscription")) { + return `Claude ${subscriptionLabel}`; + } + return `Claude ${subscriptionLabel} Subscription`; +} + function claudeAuthMetadata(input: { readonly subscriptionType: string | undefined; readonly authMethod: string | undefined; @@ -407,10 +332,9 @@ function claudeAuthMetadata(input: { } if (input.subscriptionType) { - const subscriptionLabel = claudeSubscriptionLabel(input.subscriptionType); return { type: input.subscriptionType, - label: `Claude ${subscriptionLabel ?? toTitleCaseWords(input.subscriptionType)} Subscription`, + label: formatClaudeSubscriptionAuthLabel(input.subscriptionType), }; } @@ -426,6 +350,13 @@ function nonEmptyProbeString(value: string): string | undefined { return candidate ? candidate : undefined; } +type ClaudeCapabilitiesProbe = { + readonly email: string | undefined; + readonly subscriptionType: string | undefined; + readonly tokenSource: string | undefined; + readonly slashCommands: ReadonlyArray; +}; + function parseClaudeInitializationCommands( commands: ReadonlyArray | undefined, ): ReadonlyArray { @@ -437,9 +368,7 @@ function parseClaudeInitializationCommands( } const description = nonEmptyProbeString(command.description); - const argumentHint = command.argumentHint - ? nonEmptyProbeString(command.argumentHint) - : undefined; + const argumentHint = nonEmptyProbeString(command.argumentHint); return [ { @@ -513,34 +442,46 @@ function waitForAbortSignal(signal: AbortSignal): Promise { * This is used as a fallback when `claude auth status` does not include * subscription type information. */ -const probeClaudeCapabilities = (binaryPath: string) => { +const probeClaudeCapabilities = ( + claudeSettings: ClaudeSettings, + environment: NodeJS.ProcessEnv = process.env, +) => { const abort = new AbortController(); - return Effect.tryPromise(async () => { - const q = claudeQuery({ - // Never yield — we only need initialization data, not a conversation. - // This prevents any prompt from reaching the Anthropic API. - // oxlint-disable-next-line require-yield - prompt: (async function* (): AsyncGenerator { - await waitForAbortSignal(abort.signal); - })(), - options: { - persistSession: false, - pathToClaudeCodeExecutable: binaryPath, - // @ts-expect-error SDK 0.2.77 types diverge under exactOptionalPropertyTypes - abortController: abort, - settingSources: ["user", "project", "local"], - allowedTools: [], - stderr: () => {}, - }, + return Effect.gen(function* () { + const claudeEnvironment = yield* makeClaudeEnvironment(claudeSettings, environment); + return yield* Effect.tryPromise(async () => { + const q = claudeQuery({ + // Never yield — we only need initialization data, not a conversation. + // This prevents any prompt from reaching the Anthropic API. + // oxlint-disable-next-line require-yield + prompt: (async function* (): AsyncGenerator { + await waitForAbortSignal(abort.signal); + })(), + options: { + persistSession: false, + pathToClaudeCodeExecutable: claudeSettings.binaryPath, + abortController: abort, + settingSources: ["user", "project", "local"], + allowedTools: [], + env: claudeEnvironment, + stderr: () => {}, + }, + }); + const init = await q.initializationResult(); + const account = init.account as + | { + readonly email?: string; + readonly subscriptionType?: string; + readonly tokenSource?: string; + } + | undefined; + return { + email: account?.email, + subscriptionType: account?.subscriptionType, + tokenSource: account?.tokenSource, + slashCommands: parseClaudeInitializationCommands(init.commands), + } satisfies ClaudeCapabilitiesProbe; }); - const init = await q.initializationResult!(); - const account = (init as { account?: { subscriptionType?: string } }).account; - return { - subscriptionType: account?.subscriptionType, - slashCommands: parseClaudeInitializationCommands( - init.commands as ClaudeSlashCommand[] | undefined, - ), - }; }).pipe( Effect.ensuring( Effect.sync(() => { @@ -556,44 +497,30 @@ const probeClaudeCapabilities = (binaryPath: string) => { ); }; -const runClaudeCommand = (args: ReadonlyArray) => - Effect.gen(function* () { - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner; - const claudeSettings = yield* Effect.service(ServerSettingsService).pipe( - Effect.flatMap((service) => service.getSettings), - Effect.map((settings) => settings.providers.claudeAgent), - ); - const command = ChildProcess.make(claudeSettings.binaryPath.trim() || "claude", [...args], { - shell: process.platform === "win32", - }); - - const child = yield* spawner.spawn(command); - const [stdout, stderr, exitCode] = yield* Effect.all( - [ - collectStreamAsString(child.stdout), - collectStreamAsString(child.stderr), - child.exitCode.pipe(Effect.map(Number)), - ], - { concurrency: "unbounded" }, - ); - - return { stdout, stderr, code: exitCode } satisfies CommandResult; - }).pipe(Effect.scoped); +const runClaudeCommand = Effect.fn("runClaudeCommand")(function* ( + claudeSettings: ClaudeSettings, + args: ReadonlyArray, + environment: NodeJS.ProcessEnv = process.env, +) { + const claudeEnvironment = yield* makeClaudeEnvironment(claudeSettings, environment); + const command = ChildProcess.make(claudeSettings.binaryPath, [...args], { + env: claudeEnvironment, + shell: process.platform === "win32", + }); + return yield* spawnAndCollect(claudeSettings.binaryPath, command); +}); export const checkClaudeProviderStatus = Effect.fn("checkClaudeProviderStatus")(function* ( - resolveSubscriptionType?: (binaryPath: string) => Effect.Effect, - resolveSlashCommands?: ( - binaryPath: string, - ) => Effect.Effect | undefined>, + claudeSettings: ClaudeSettings, + resolveCapabilities?: ( + claudeSettings: ClaudeSettings, + ) => Effect.Effect, + environment: NodeJS.ProcessEnv = process.env, ): Effect.fn.Return< - ServerProvider, - ServerSettingsError, - ChildProcessSpawner.ChildProcessSpawner | ServerSettingsService + ServerProviderDraft, + never, + ChildProcessSpawner.ChildProcessSpawner | Path.Path > { - const claudeSettings = yield* Effect.service(ServerSettingsService).pipe( - Effect.flatMap((service) => service.getSettings), - Effect.map((settings) => settings.providers.claudeAgent), - ); const checkedAt = new Date().toISOString(); const allModels = providerModelsFromSettings( BUILT_IN_MODELS, @@ -604,7 +531,6 @@ export const checkClaudeProviderStatus = Effect.fn("checkClaudeProviderStatus")( if (!claudeSettings.enabled) { return buildServerProvider({ - provider: PROVIDER, presentation: CLAUDE_PRESENTATION, enabled: false, checkedAt, @@ -619,7 +545,7 @@ export const checkClaudeProviderStatus = Effect.fn("checkClaudeProviderStatus")( }); } - const versionProbe = yield* runClaudeCommand(["--version"]).pipe( + const versionProbe = yield* runClaudeCommand(claudeSettings, ["--version"], environment).pipe( Effect.timeoutOption(DEFAULT_TIMEOUT_MS), Effect.result, ); @@ -627,7 +553,6 @@ export const checkClaudeProviderStatus = Effect.fn("checkClaudeProviderStatus")( if (Result.isFailure(versionProbe)) { const error = versionProbe.failure; return buildServerProvider({ - provider: PROVIDER, presentation: CLAUDE_PRESENTATION, enabled: claudeSettings.enabled, checkedAt, @@ -646,7 +571,6 @@ export const checkClaudeProviderStatus = Effect.fn("checkClaudeProviderStatus")( if (Option.isNone(versionProbe.success)) { return buildServerProvider({ - provider: PROVIDER, presentation: CLAUDE_PRESENTATION, enabled: claudeSettings.enabled, checkedAt, @@ -667,7 +591,6 @@ export const checkClaudeProviderStatus = Effect.fn("checkClaudeProviderStatus")( if (version.code !== 0) { const detail = detailFromResult(version); return buildServerProvider({ - provider: PROVIDER, presentation: CLAUDE_PRESENTATION, enabled: claudeSettings.enabled, checkedAt, @@ -694,66 +617,14 @@ export const checkClaudeProviderStatus = Effect.fn("checkClaudeProviderStatus")( ? undefined : formatClaudeOpus47UpgradeMessage(parsedVersion); - const slashCommands = - (resolveSlashCommands - ? yield* resolveSlashCommands(claudeSettings.binaryPath).pipe( - Effect.orElseSucceed(() => undefined), - ) - : undefined) ?? []; + const capabilities = resolveCapabilities + ? yield* resolveCapabilities(claudeSettings).pipe(Effect.orElseSucceed(() => undefined)) + : undefined; + const slashCommands = capabilities?.slashCommands ?? []; const dedupedSlashCommands = dedupeSlashCommands(slashCommands); - // ── Auth check + subscription detection ──────────────────────────── - - const authProbe = yield* runClaudeCommand(["auth", "status"]).pipe( - Effect.timeoutOption(AUTH_PROBE_TIMEOUT_MS), - Effect.result, - ); - - // Determine subscription type from multiple sources (cheapest first): - // 1. `claude auth status` JSON output (may or may not contain it) - // 2. Cached SDK probe (spawns a Claude process on miss, reads - // `initializationResult()` for account metadata, then aborts - // immediately — no API tokens are consumed) - - let subscriptionType: string | undefined; - let authMethod: string | undefined; - - if (Result.isSuccess(authProbe) && Option.isSome(authProbe.success)) { - subscriptionType = extractSubscriptionTypeFromOutput(authProbe.success.value); - authMethod = extractClaudeAuthMethodFromOutput(authProbe.success.value); - } - - if (!subscriptionType && resolveSubscriptionType) { - subscriptionType = yield* resolveSubscriptionType(claudeSettings.binaryPath); - } - - // ── Handle auth results (same logic as before, adjusted models) ── - - if (Result.isFailure(authProbe)) { - const error = authProbe.failure; - return buildServerProvider({ - provider: PROVIDER, - presentation: CLAUDE_PRESENTATION, - enabled: claudeSettings.enabled, - checkedAt, - models, - slashCommands: dedupedSlashCommands, - probe: { - installed: true, - version: parsedVersion, - status: "warning", - auth: { status: "unknown" }, - message: - error instanceof Error - ? `Could not verify Claude authentication status: ${error.message}.` - : "Could not verify Claude authentication status.", - }, - }); - } - - if (Option.isNone(authProbe.success)) { + if (!capabilities) { return buildServerProvider({ - provider: PROVIDER, presentation: CLAUDE_PRESENTATION, enabled: claudeSettings.enabled, checkedAt, @@ -764,15 +635,16 @@ export const checkClaudeProviderStatus = Effect.fn("checkClaudeProviderStatus")( version: parsedVersion, status: "warning", auth: { status: "unknown" }, - message: "Could not verify Claude authentication status. Timed out while running command.", + message: "Could not verify Claude authentication status from initialization result.", }, }); } - const parsed = parseClaudeAuthStatusFromOutput(authProbe.success.value); - const authMetadata = claudeAuthMetadata({ subscriptionType, authMethod }); + const authMetadata = claudeAuthMetadata({ + subscriptionType: capabilities.subscriptionType, + authMethod: capabilities.tokenSource, + }); return buildServerProvider({ - provider: PROVIDER, presentation: CLAUDE_PRESENTATION, enabled: claudeSettings.enabled, checkedAt, @@ -781,21 +653,18 @@ export const checkClaudeProviderStatus = Effect.fn("checkClaudeProviderStatus")( probe: { installed: true, version: parsedVersion, - status: parsed.status, + status: "ready", auth: { - ...parsed.auth, + status: "authenticated", + ...(capabilities.email ? { email: capabilities.email } : {}), ...(authMetadata ? authMetadata : {}), }, - ...(parsed.message - ? { message: parsed.message } - : opus47UpgradeMessage - ? { message: opus47UpgradeMessage } - : {}), + ...(opus47UpgradeMessage ? { message: opus47UpgradeMessage } : {}), }, }); }); -const makePendingClaudeProvider = (claudeSettings: ClaudeSettings): ServerProvider => { +export const makePendingClaudeProvider = (claudeSettings: ClaudeSettings): ServerProviderDraft => { const checkedAt = new Date().toISOString(); const models = providerModelsFromSettings( BUILT_IN_MODELS, @@ -806,7 +675,6 @@ const makePendingClaudeProvider = (claudeSettings: ClaudeSettings): ServerProvid if (!claudeSettings.enabled) { return buildServerProvider({ - provider: PROVIDER, presentation: CLAUDE_PRESENTATION, enabled: false, checkedAt, @@ -822,7 +690,6 @@ const makePendingClaudeProvider = (claudeSettings: ClaudeSettings): ServerProvid } return buildServerProvider({ - provider: PROVIDER, presentation: CLAUDE_PRESENTATION, enabled: true, checkedAt, @@ -837,43 +704,4 @@ const makePendingClaudeProvider = (claudeSettings: ClaudeSettings): ServerProvid }); }; -export const ClaudeProviderLive = Layer.effect( - ClaudeProvider, - Effect.gen(function* () { - const serverSettings = yield* ServerSettingsService; - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner; - - const subscriptionProbeCache = yield* Cache.make({ - capacity: 1, - timeToLive: Duration.minutes(5), - lookup: (binaryPath: string) => probeClaudeCapabilities(binaryPath), - }); - - const checkProvider = checkClaudeProviderStatus( - (binaryPath) => - Cache.get(subscriptionProbeCache, binaryPath).pipe( - Effect.map((probe) => probe?.subscriptionType), - ), - (binaryPath) => - Cache.get(subscriptionProbeCache, binaryPath).pipe( - Effect.map((probe) => probe?.slashCommands), - ), - ).pipe( - Effect.provideService(ServerSettingsService, serverSettings), - Effect.provideService(ChildProcessSpawner.ChildProcessSpawner, spawner), - ); - - return yield* makeManagedServerProvider({ - getSettings: serverSettings.getSettings.pipe( - Effect.map((settings) => settings.providers.claudeAgent), - Effect.orDie, - ), - streamSettings: serverSettings.streamChanges.pipe( - Stream.map((settings) => settings.providers.claudeAgent), - ), - haveSettingsChanged: (previous, next) => !Equal.equals(previous, next), - initialSnapshot: makePendingClaudeProvider, - checkProvider, - }); - }), -); +export { probeClaudeCapabilities }; diff --git a/apps/server/src/provider/Layers/CodexAdapter.test.ts b/apps/server/src/provider/Layers/CodexAdapter.test.ts index bd7b3bddaef..4df4fb5d32f 100644 --- a/apps/server/src/provider/Layers/CodexAdapter.test.ts +++ b/apps/server/src/provider/Layers/CodexAdapter.test.ts @@ -4,7 +4,10 @@ import os from "node:os"; import path from "node:path"; import { ApprovalRequestId, + CodexSettings, EventId, + ProviderDriverKind, + ProviderInstanceId, ProviderItemId, type ProviderApprovalDecision, type ProviderEvent, @@ -14,16 +17,17 @@ import { ThreadId, TurnId, } from "@t3tools/contracts"; +import { createModelSelection } from "@t3tools/shared/model"; import * as NodeServices from "@effect/platform-node/NodeServices"; import { it, vi } from "@effect/vitest"; -import { Effect, Exit, Fiber, Layer, Option, Queue, Scope, Stream } from "effect"; +import { Context, Effect, Exit, Fiber, Layer, Option, Queue, Schema, Scope, Stream } from "effect"; import * as CodexErrors from "effect-codex-app-server/errors"; import { ServerConfig } from "../../config.ts"; import { ServerSettingsService } from "../../serverSettings.ts"; import { ProviderAdapterValidationError } from "../Errors.ts"; -import { CodexAdapter } from "../Services/CodexAdapter.ts"; +import type { CodexAdapterShape } from "../Services/CodexAdapter.ts"; import { ProviderSessionDirectory } from "../Services/ProviderSessionDirectory.ts"; import { type CodexSessionRuntimeOptions, @@ -31,7 +35,12 @@ import { type CodexSessionRuntimeShape, type CodexThreadSnapshot, } from "./CodexSessionRuntime.ts"; -import { fetchCodexUsage, makeCodexAdapterLive } from "./CodexAdapter.ts"; +import { makeCodexAdapter } from "./CodexAdapter.ts"; + +// Test-local service tag so the rest of the file can keep using `yield* CodexAdapter`. +class CodexAdapter extends Context.Service()( + "test/CodexAdapter", +) {} const asThreadId = (value: string): ThreadId => ThreadId.make(value); const asTurnId = (value: string): TurnId => TurnId.make(value); @@ -44,7 +53,7 @@ class FakeCodexRuntime implements CodexSessionRuntimeShape { public readonly startImpl = vi.fn(() => Promise.resolve({ - provider: "codex" as const, + provider: ProviderDriverKind.make("codex"), status: "ready" as const, runtimeMode: this.options.runtimeMode, threadId: this.options.threadId, @@ -202,7 +211,15 @@ const providerSessionDirectoryTestLayer = Layer.succeed(ProviderSessionDirectory const validationRuntimeFactory = makeRuntimeFactory(); const validationLayer = it.layer( - makeCodexAdapterLive({ makeRuntime: validationRuntimeFactory.factory }).pipe( + Layer.effect( + CodexAdapter, + Effect.gen(function* () { + const codexConfig = Schema.decodeSync(CodexSettings)({}); + return yield* makeCodexAdapter(codexConfig, { + makeRuntime: validationRuntimeFactory.factory, + }); + }), + ).pipe( Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), Layer.provideMerge(ServerSettingsService.layerTest()), Layer.provideMerge(providerSessionDirectoryTestLayer), @@ -216,7 +233,7 @@ validationLayer("CodexAdapterLive validation", (it) => { const adapter = yield* CodexAdapter; const result = yield* adapter .startSession({ - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), threadId: asThreadId("thread-1"), runtimeMode: "full-access", }) @@ -226,7 +243,7 @@ validationLayer("CodexAdapterLive validation", (it) => { assert.deepStrictEqual( result.failure, new ProviderAdapterValidationError({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), operation: "startSession", issue: "Expected provider 'codex' but received 'claudeAgent'.", }), @@ -234,22 +251,17 @@ validationLayer("CodexAdapterLive validation", (it) => { assert.equal(validationRuntimeFactory.factory.mock.calls.length, 0); }), ); - it.effect("maps codex model options before starting a session", () => Effect.gen(function* () { validationRuntimeFactory.factory.mockClear(); const adapter = yield* CodexAdapter; yield* adapter.startSession({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), - modelSelection: { - provider: "codex", - model: "gpt-5.3-codex", - options: { - fastMode: true, - }, - }, + modelSelection: createModelSelection(ProviderInstanceId.make("codex"), "gpt-5.3-codex", [ + { id: "fastMode", value: true }, + ]), runtimeMode: "full-access", }); @@ -257,26 +269,26 @@ validationLayer("CodexAdapterLive validation", (it) => { binaryPath: "codex", cwd: process.cwd(), model: "gpt-5.3-codex", + providerInstanceId: ProviderInstanceId.make("codex"), serviceTier: "fast", threadId: asThreadId("thread-1"), runtimeMode: "full-access", }); }), ); - - it.effect("fetchCodexUsage returns empty usage after manager removal", () => - Effect.gen(function* () { - const usage = yield* Effect.promise(() => fetchCodexUsage()); - assert.equal(usage.provider, "codex"); - assert.equal(usage.quota, undefined); - assert.equal(usage.quotas, undefined); - }), - ); }); const sessionRuntimeFactory = makeRuntimeFactory(); const sessionErrorLayer = it.layer( - makeCodexAdapterLive({ makeRuntime: sessionRuntimeFactory.factory }).pipe( + Layer.effect( + CodexAdapter, + Effect.gen(function* () { + const codexConfig = Schema.decodeSync(CodexSettings)({}); + return yield* makeCodexAdapter(codexConfig, { + makeRuntime: sessionRuntimeFactory.factory, + }); + }), + ).pipe( Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), Layer.provideMerge(ServerSettingsService.layerTest()), Layer.provideMerge(providerSessionDirectoryTestLayer), @@ -307,7 +319,7 @@ sessionErrorLayer("CodexAdapterLive session errors", (it) => { Effect.gen(function* () { const adapter = yield* CodexAdapter; yield* adapter.startSession({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("sess-missing"), runtimeMode: "full-access", }); @@ -319,14 +331,10 @@ sessionErrorLayer("CodexAdapterLive session errors", (it) => { adapter.sendTurn({ threadId: asThreadId("sess-missing"), input: "hello", - modelSelection: { - provider: "codex", - model: "gpt-5.3-codex", - options: { - reasoningEffort: "high", - fastMode: true, - }, - }, + modelSelection: createModelSelection(ProviderInstanceId.make("codex"), "gpt-5.3-codex", [ + { id: "reasoningEffort", value: "high" }, + { id: "fastMode", value: true }, + ]), attachments: [], }), ); @@ -339,11 +347,74 @@ sessionErrorLayer("CodexAdapterLive session errors", (it) => { }); }), ); + + it.effect("maps codex model options for the adapter's bound custom instance id", () => { + const customInstanceId = ProviderInstanceId.make("codex_personal"); + const customRuntimeFactory = makeRuntimeFactory(); + const customLayer = Layer.effect( + CodexAdapter, + Effect.gen(function* () { + const codexConfig = Schema.decodeSync(CodexSettings)({}); + return yield* makeCodexAdapter(codexConfig, { + instanceId: customInstanceId, + makeRuntime: customRuntimeFactory.factory, + }); + }), + ).pipe( + Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), + Layer.provideMerge(ServerSettingsService.layerTest()), + Layer.provideMerge(providerSessionDirectoryTestLayer), + Layer.provideMerge(NodeServices.layer), + ); + + return Effect.gen(function* () { + const adapter = yield* CodexAdapter; + yield* adapter.startSession({ + provider: ProviderDriverKind.make("codex"), + threadId: asThreadId("sess-custom-instance"), + runtimeMode: "full-access", + }); + const runtime = customRuntimeFactory.lastRuntime; + assert.ok(runtime); + runtime.sendTurnImpl.mockClear(); + + yield* Effect.ignore( + adapter.sendTurn({ + threadId: asThreadId("sess-custom-instance"), + input: "hello", + modelSelection: createModelSelection( + ProviderInstanceId.make("codex_personal"), + "gpt-5.3-codex", + [ + { id: "reasoningEffort", value: "high" }, + { id: "fastMode", value: true }, + ], + ), + attachments: [], + }), + ); + + assert.deepStrictEqual(runtime.sendTurnImpl.mock.calls[0]?.[0], { + input: "hello", + model: "gpt-5.3-codex", + effort: "high", + serviceTier: "fast", + }); + }).pipe(Effect.provide(customLayer)); + }); }); const lifecycleRuntimeFactory = makeRuntimeFactory(); const lifecycleLayer = it.layer( - makeCodexAdapterLive({ makeRuntime: lifecycleRuntimeFactory.factory }).pipe( + Layer.effect( + CodexAdapter, + Effect.gen(function* () { + const codexConfig = Schema.decodeSync(CodexSettings)({}); + return yield* makeCodexAdapter(codexConfig, { + makeRuntime: lifecycleRuntimeFactory.factory, + }); + }), + ).pipe( Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), Layer.provideMerge(ServerSettingsService.layerTest()), Layer.provideMerge(providerSessionDirectoryTestLayer), @@ -355,7 +426,7 @@ function startLifecycleRuntime() { return Effect.gen(function* () { const adapter = yield* CodexAdapter; yield* adapter.startSession({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), runtimeMode: "full-access", }); @@ -374,7 +445,7 @@ lifecycleLayer("CodexAdapterLive lifecycle", (it) => { const event: ProviderEvent = { id: asEventId("evt-msg-complete"), kind: "notification", - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), method: "item/completed", threadId: asThreadId("thread-1"), @@ -416,7 +487,7 @@ lifecycleLayer("CodexAdapterLive lifecycle", (it) => { const event: ProviderEvent = { id: asEventId("evt-plan-complete"), kind: "notification", - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), method: "item/completed", threadId: asThreadId("thread-1"), @@ -457,7 +528,7 @@ lifecycleLayer("CodexAdapterLive lifecycle", (it) => { yield* runtime.emit({ id: asEventId("evt-plan-delta"), kind: "notification", - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), method: "item/plan/delta", threadId: asThreadId("thread-1"), @@ -494,7 +565,7 @@ lifecycleLayer("CodexAdapterLive lifecycle", (it) => { const event: ProviderEvent = { id: asEventId("evt-session-closed"), kind: "session", - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), createdAt: new Date().toISOString(), method: "session/closed", @@ -525,7 +596,7 @@ lifecycleLayer("CodexAdapterLive lifecycle", (it) => { yield* runtime.emit({ id: asEventId("evt-retryable-error"), kind: "notification", - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), createdAt: new Date().toISOString(), method: "error", @@ -563,7 +634,7 @@ lifecycleLayer("CodexAdapterLive lifecycle", (it) => { yield* runtime.emit({ id: asEventId("evt-process-stderr"), kind: "notification", - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), createdAt: new Date().toISOString(), method: "process/stderr", @@ -597,7 +668,7 @@ lifecycleLayer("CodexAdapterLive lifecycle", (it) => { yield* runtime.emit({ id: asEventId("evt-process-stderr-websocket"), kind: "notification", - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), createdAt: new Date().toISOString(), method: "process/stderr", @@ -633,7 +704,7 @@ lifecycleLayer("CodexAdapterLive lifecycle", (it) => { const event: ProviderEvent = { id: asEventId("evt-request-resolved"), kind: "notification", - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), createdAt: new Date().toISOString(), method: "serverRequest/resolved", @@ -668,7 +739,7 @@ lifecycleLayer("CodexAdapterLive lifecycle", (it) => { const event: ProviderEvent = { id: asEventId("evt-file-read-request-resolved"), kind: "notification", - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), createdAt: new Date().toISOString(), method: "serverRequest/resolved", @@ -703,7 +774,7 @@ lifecycleLayer("CodexAdapterLive lifecycle", (it) => { const event: ProviderEvent = { id: asEventId("evt-user-input-empty"), kind: "notification", - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), createdAt: new Date().toISOString(), method: "item/tool/requestUserInput/answered", @@ -743,7 +814,7 @@ lifecycleLayer("CodexAdapterLive lifecycle", (it) => { const event: ProviderEvent = { id: asEventId("evt-windows-sandbox-failed"), kind: "notification", - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), createdAt: new Date().toISOString(), method: "windowsSandbox/setupCompleted", @@ -788,7 +859,7 @@ lifecycleLayer("CodexAdapterLive lifecycle", (it) => { yield* runtime.emit({ id: asEventId("evt-user-input-requested"), kind: "request", - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), createdAt: new Date().toISOString(), method: "item/tool/requestUserInput", @@ -815,7 +886,7 @@ lifecycleLayer("CodexAdapterLive lifecycle", (it) => { yield* runtime.emit({ id: asEventId("evt-user-input-resolved"), kind: "notification", - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), createdAt: new Date().toISOString(), method: "item/tool/requestUserInput/answered", @@ -855,7 +926,7 @@ lifecycleLayer("CodexAdapterLive lifecycle", (it) => { yield* runtime.emit({ id: asEventId("evt-codex-thread-token-usage-updated"), kind: "notification", - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-1"), turnId: asTurnId("turn-1"), createdAt: new Date().toISOString(), @@ -914,7 +985,15 @@ lifecycleLayer("CodexAdapterLive lifecycle", (it) => { const scopedLifecycleRuntimeFactory = makeScopedRuntimeFactory(); const scopedLifecycleLayer = it.layer( - makeCodexAdapterLive({ makeRuntime: scopedLifecycleRuntimeFactory.factory }).pipe( + Layer.effect( + CodexAdapter, + Effect.gen(function* () { + const codexConfig = Schema.decodeSync(CodexSettings)({}); + return yield* makeCodexAdapter(codexConfig, { + makeRuntime: scopedLifecycleRuntimeFactory.factory, + }); + }), + ).pipe( Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), Layer.provideMerge(ServerSettingsService.layerTest()), Layer.provideMerge(providerSessionDirectoryTestLayer), @@ -929,7 +1008,7 @@ scopedLifecycleLayer("CodexAdapterLive scoped lifecycle", (it) => { const adapter = yield* CodexAdapter; yield* adapter.startSession({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-stop"), runtimeMode: "full-access", }); @@ -950,7 +1029,15 @@ scopedLifecycleLayer("CodexAdapterLive scoped lifecycle", (it) => { const scopedFailureRuntimeFactory = makeScopedRuntimeFactory({ failConstruction: true }); const scopedFailureLayer = it.layer( - makeCodexAdapterLive({ makeRuntime: scopedFailureRuntimeFactory.factory }).pipe( + Layer.effect( + CodexAdapter, + Effect.gen(function* () { + const codexConfig = Schema.decodeSync(CodexSettings)({}); + return yield* makeCodexAdapter(codexConfig, { + makeRuntime: scopedFailureRuntimeFactory.factory, + }); + }), + ).pipe( Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), Layer.provideMerge(ServerSettingsService.layerTest()), Layer.provideMerge(providerSessionDirectoryTestLayer), @@ -966,7 +1053,7 @@ scopedFailureLayer("CodexAdapterLive scoped startup failure", (it) => { const result = yield* adapter .startSession({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-fail"), runtimeMode: "full-access", }) @@ -991,10 +1078,16 @@ it.effect("flushes managed native logs when the adapter layer shuts down", () => let scopeClosed = false; try { - const layer = makeCodexAdapterLive({ - makeRuntime: runtimeFactory.factory, - nativeEventLogPath: basePath, - }).pipe( + const layer = Layer.effect( + CodexAdapter, + Effect.gen(function* () { + const codexConfig = Schema.decodeSync(CodexSettings)({}); + return yield* makeCodexAdapter(codexConfig, { + makeRuntime: runtimeFactory.factory, + nativeEventLogPath: basePath, + }); + }), + ).pipe( Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), Layer.provideMerge(ServerSettingsService.layerTest()), Layer.provideMerge(providerSessionDirectoryTestLayer), @@ -1004,7 +1097,7 @@ it.effect("flushes managed native logs when the adapter layer shuts down", () => const adapter = yield* Effect.service(CodexAdapter).pipe(Effect.provide(context)); yield* adapter.startSession({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-logger"), runtimeMode: "full-access", }); @@ -1016,7 +1109,7 @@ it.effect("flushes managed native logs when the adapter layer shuts down", () => yield* runtime.emit({ id: asEventId("evt-native-log"), kind: "notification", - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-logger"), createdAt: new Date().toISOString(), method: "process/stderr", @@ -1030,7 +1123,7 @@ it.effect("flushes managed native logs when the adapter layer shuts down", () => const threadLogPath = path.join(tempDir, "thread-logger.log"); assert.equal(fs.existsSync(threadLogPath), true); const contents = fs.readFileSync(threadLogPath, "utf8"); - assert.match(contents, /NATIVE: .*"message":"native flush test"/); + assert.match(contents, /NTIVE: .*"message":"native flush test"/); } finally { if (!scopeClosed) { yield* Scope.close(scope, Exit.void); diff --git a/apps/server/src/provider/Layers/CodexAdapter.ts b/apps/server/src/provider/Layers/CodexAdapter.ts index d65741e45f5..5186dc29627 100644 --- a/apps/server/src/provider/Layers/CodexAdapter.ts +++ b/apps/server/src/provider/Layers/CodexAdapter.ts @@ -10,7 +10,10 @@ import { type CanonicalItemType, type CanonicalRequestType, + type CodexSettings, + ProviderDriverKind, type ProviderEvent, + ProviderInstanceId, type ProviderRuntimeEvent, type ProviderRequestKind, type ThreadTokenUsageSnapshot, @@ -21,11 +24,16 @@ import { ThreadId, ProviderSendTurnInput, } from "@t3tools/contracts"; -import { Effect, Exit, Fiber, FileSystem, Layer, Queue, Schema, Scope, Stream } from "effect"; +import { Effect, Exit, Fiber, FileSystem, Queue, Schema, Scope, Stream } from "effect"; import { ChildProcessSpawner } from "effect/unstable/process"; import * as CodexErrors from "effect-codex-app-server/errors"; import * as EffectCodexSchema from "effect-codex-app-server/schema"; +import { + getModelSelectionBooleanOptionValue, + getModelSelectionStringOptionValue, +} from "@t3tools/shared/model"; + import { ProviderAdapterRequestError, ProviderAdapterProcessError, @@ -34,12 +42,9 @@ import { ProviderAdapterValidationError, type ProviderAdapterError, } from "../Errors.ts"; -import { CodexAdapter, type CodexAdapterShape } from "../Services/CodexAdapter.ts"; +import { type CodexAdapterShape } from "../Services/CodexAdapter.ts"; import { resolveAttachmentPath } from "../../attachmentStore.ts"; import { ServerConfig } from "../../config.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; -import { getProviderCapabilities } from "../Services/ProviderAdapter.ts"; -import type { ProviderUsageResult } from "@t3tools/contracts"; import { CodexResumeCursorSchema, CodexSessionRuntimeThreadIdMissingError, @@ -50,9 +55,11 @@ import { } from "./CodexSessionRuntime.ts"; import { type EventNdjsonLogger, makeEventNdjsonLogger } from "./EventNdjsonLogger.ts"; -const PROVIDER = "codex" as const; +const PROVIDER = ProviderDriverKind.make("codex"); export interface CodexAdapterLiveOptions { + readonly instanceId?: ProviderInstanceId; + readonly environment?: NodeJS.ProcessEnv; readonly makeRuntime?: ( options: CodexSessionRuntimeOptions, ) => Effect.Effect< @@ -317,15 +324,11 @@ function toUserInputQuestions(questions: ReadonlyArray { const label = trimText(option.label); - // Description is optional — keep label-only options rather than - // dropping them, otherwise Codex tool prompts that only provide - // labels (no per-option description) lose all choices and the - // surrounding `options.length === 0` guard rejects the question. const description = trimText(option.description); - if (!label) { + if (!label || !description) { return undefined; } - return description ? { label, description } : { label }; + return { label, description }; }) .filter((option) => option !== undefined) ?? []; @@ -1319,9 +1322,20 @@ function mapToRuntimeEvents( return []; } -const makeCodexAdapter = Effect.fn("makeCodexAdapter")(function* ( +/** + * Build a Codex provider adapter bound to a specific `CodexSettings` payload. + * + * The adapter is a captured closure over `codexConfig` — the `binaryPath` and + * `homePath` are read from that payload, not from `ServerSettingsService`. + * This is what makes multi-instance routing possible: each `ProviderInstance` + * in the registry owns its own closure with its own config, so two Codex + * instances with different `homePath`s cannot step on each other. + */ +export const makeCodexAdapter = Effect.fn("makeCodexAdapter")(function* ( + codexConfig: CodexSettings, options?: CodexAdapterLiveOptions, ) { + const boundInstanceId = options?.instanceId ?? ProviderInstanceId.make("codex"); const fileSystem = yield* FileSystem.FileSystem; const childProcessSpawner = yield* ChildProcessSpawner.ChildProcessSpawner; const serverConfig = yield* Effect.service(ServerConfig); @@ -1334,7 +1348,6 @@ const makeCodexAdapter = Effect.fn("makeCodexAdapter")(function* ( : undefined); const managedNativeEventLogger = options?.nativeEventLogger === undefined ? nativeEventLogger : undefined; - const serverSettingsService = yield* ServerSettingsService; const runtimeEventQueue = yield* Queue.unbounded(); const sessions = new Map(); @@ -1354,31 +1367,22 @@ const makeCodexAdapter = Effect.fn("makeCodexAdapter")(function* ( yield* Effect.suspend(() => stopSessionInternal(existing)); } - const codexSettings = yield* serverSettingsService.getSettings.pipe( - Effect.map((settings) => settings.providers.codex), - Effect.mapError( - (error) => - new ProviderAdapterProcessError({ - provider: PROVIDER, - threadId: input.threadId, - detail: error.message, - cause: error, - }), - ), - ); const runtimeInput: CodexSessionRuntimeOptions = { threadId: input.threadId, + providerInstanceId: boundInstanceId, cwd: input.cwd ?? process.cwd(), - binaryPath: codexSettings.binaryPath, - ...(codexSettings.homePath ? { homePath: codexSettings.homePath } : {}), + binaryPath: codexConfig.binaryPath, + ...(options?.environment ? { environment: options.environment } : {}), + ...(codexConfig.homePath ? { homePath: codexConfig.homePath } : {}), ...(Schema.is(CodexResumeCursorSchema)(input.resumeCursor) ? { resumeCursor: input.resumeCursor } : {}), runtimeMode: input.runtimeMode, - ...(input.modelSelection?.provider === "codex" + ...(input.modelSelection?.instanceId === boundInstanceId ? { model: input.modelSelection.model } : {}), - ...(input.modelSelection?.provider === "codex" && input.modelSelection.options?.fastMode + ...(input.modelSelection?.instanceId === boundInstanceId && + getModelSelectionBooleanOptionValue(input.modelSelection, "fastMode") === true ? { serviceTier: "fast" } : {}), }; @@ -1402,10 +1406,6 @@ const makeCodexAdapter = Effect.fn("makeCodexAdapter")(function* ( ), ); - // Keep the Codex event pump in the session scope so it is - // interrupted automatically when the session's scope closes, - // rather than leaking into the surrounding `Effect.scoped` fiber - // which exits as soon as `startSession` returns. const eventFiber = yield* Stream.runForEach(runtime.events, (event) => Effect.gen(function* () { yield* writeNativeEvent(event); @@ -1421,7 +1421,7 @@ const makeCodexAdapter = Effect.fn("makeCodexAdapter")(function* ( } yield* Queue.offerAll(runtimeEventQueue, runtimeEvents); }), - ).pipe(Effect.forkIn(sessionScope)); + ).pipe(Effect.forkChild); const started = yield* runtime.start().pipe( Effect.mapError( @@ -1495,19 +1495,26 @@ const makeCodexAdapter = Effect.fn("makeCodexAdapter")(function* ( ); const session = yield* requireSession(input.threadId); + const reasoningEffort = + input.modelSelection?.instanceId === boundInstanceId + ? getModelSelectionStringOptionValue(input.modelSelection, "reasoningEffort") + : undefined; + const fastMode = + input.modelSelection?.instanceId === boundInstanceId + ? getModelSelectionBooleanOptionValue(input.modelSelection, "fastMode") + : undefined; return yield* session.runtime .sendTurn({ ...(input.input !== undefined ? { input: input.input } : {}), - ...(input.modelSelection?.provider === "codex" + ...(input.modelSelection?.instanceId === boundInstanceId ? { model: input.modelSelection.model } : {}), - ...(input.modelSelection?.provider === "codex" && - input.modelSelection.options?.reasoningEffort !== undefined - ? { effort: input.modelSelection.options.reasoningEffort } - : {}), - ...(input.modelSelection?.provider === "codex" && input.modelSelection.options?.fastMode - ? { serviceTier: "fast" } + ...(reasoningEffort + ? { + effort: reasoningEffort as EffectCodexSchema.V2TurnStartParams__ReasoningEffort, + } : {}), + ...(fastMode === true ? { serviceTier: "fast" } : {}), ...(input.interactionMode !== undefined ? { interactionMode: input.interactionMode } : {}), ...(codexAttachments.length > 0 ? { attachments: codexAttachments } : {}), }) @@ -1653,7 +1660,9 @@ const makeCodexAdapter = Effect.fn("makeCodexAdapter")(function* ( return { provider: PROVIDER, - capabilities: getProviderCapabilities(PROVIDER), + capabilities: { + sessionModelSwitch: "in-session", + }, startSession, sendTurn, interruptTurn, @@ -1671,18 +1680,9 @@ const makeCodexAdapter = Effect.fn("makeCodexAdapter")(function* ( } satisfies CodexAdapterShape; }); -/** - * Fetches Codex usage information. After upstream's effect-codex-app-server - * refactor (PR #1942), the manager-based rate limit readout is no longer - * available at the adapter layer. Returns an empty usage record until - * rate-limit events are surfaced through the new session runtime. - */ -export async function fetchCodexUsage(): Promise { - return { provider: "codex" }; -} - -export const CodexAdapterLive = Layer.effect(CodexAdapter, makeCodexAdapter()); - -export function makeCodexAdapterLive(options?: CodexAdapterLiveOptions) { - return Layer.effect(CodexAdapter, makeCodexAdapter(options)); -} +// NOTE: the old `CodexAdapterLive` / `makeCodexAdapterLive` singleton Layer +// exports have been removed as part of the per-instance-driver refactor. +// `makeCodexAdapter(codexConfig, options?)` is now invoked directly by +// `CodexDriver.create()` for each configured instance; downstream consumers +// (server bootstrap, integration harness, this module's tests) will be +// migrated to the registry in a follow-up pass. diff --git a/apps/server/src/provider/Layers/CodexProvider.ts b/apps/server/src/provider/Layers/CodexProvider.ts index 4dee22bdd3c..0917d842a6d 100644 --- a/apps/server/src/provider/Layers/CodexProvider.ts +++ b/apps/server/src/provider/Layers/CodexProvider.ts @@ -1,15 +1,4 @@ -import { - DateTime, - Duration, - Effect, - Equal, - Layer, - Option, - Result, - Schema, - Stream, - Types, -} from "effect"; +import { DateTime, Duration, Effect, Layer, Option, Result, Schema, Types } from "effect"; import { ChildProcessSpawner } from "effect/unstable/process"; import * as CodexClient from "effect-codex-app-server/client"; import * as CodexSchema from "effect-codex-app-server/schema"; @@ -25,16 +14,18 @@ import type { } from "@t3tools/contracts"; import { ServerSettingsError } from "@t3tools/contracts"; -import { makeManagedServerProvider } from "../makeManagedServerProvider.ts"; -import { buildServerProvider } from "../providerSnapshot.ts"; -import { CodexProvider } from "../Services/CodexProvider.ts"; -import { ServerConfig } from "../../config.ts"; +import { createModelCapabilities } from "@t3tools/shared/model"; + +import { buildServerProvider, type ServerProviderDraft } from "../providerSnapshot.ts"; import { expandHomePath } from "../../pathExpansion.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; +import { scopedSafeTeardown } from "./scopedSafeTeardown.ts"; import packageJson from "../../../package.json" with { type: "json" }; -const PROVIDER = "codex" as const; const PROVIDER_PROBE_TIMEOUT_MS = 8_000; +const CODEX_PRESENTATION = { + displayName: "Codex", + showInteractionModeToggle: true, +} as const; export interface CodexAppServerProviderSnapshot { readonly account: CodexSchema.V2GetAccountResponse; @@ -85,20 +76,52 @@ function codexAccountAuthLabel(account: CodexSchema.V2GetAccountResponse["accoun } } +function codexAccountEmail(account: CodexSchema.V2GetAccountResponse["account"]) { + if (!account || account.type !== "chatgpt") return undefined; + return account.email; +} + function mapCodexModelCapabilities( model: CodexSchema.V2ModelListResponse__Model, ): ModelCapabilities { - return { - reasoningEffortLevels: model.supportedReasoningEfforts.map(({ reasoningEffort }) => ({ - value: reasoningEffort, - label: REASONING_EFFORT_LABELS[reasoningEffort], - ...(reasoningEffort === model.defaultReasoningEffort ? { isDefault: true } : {}), - })), - supportsFastMode: (model.additionalSpeedTiers ?? []).includes("fast"), - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }; + const reasoningOptions = model.supportedReasoningEfforts.map(({ reasoningEffort }) => + reasoningEffort === model.defaultReasoningEffort + ? { + id: reasoningEffort, + label: REASONING_EFFORT_LABELS[reasoningEffort], + isDefault: true, + } + : { + id: reasoningEffort, + label: REASONING_EFFORT_LABELS[reasoningEffort], + }, + ); + const defaultReasoning = reasoningOptions.find((option) => option.isDefault)?.id; + const supportsFastMode = (model.additionalSpeedTiers ?? []).includes("fast"); + return createModelCapabilities({ + optionDescriptors: [ + ...(reasoningOptions.length > 0 + ? [ + { + id: "reasoningEffort", + label: "Reasoning", + type: "select" as const, + options: reasoningOptions, + ...(defaultReasoning ? { currentValue: defaultReasoning } : {}), + }, + ] + : []), + ...(supportsFastMode + ? [ + { + id: "fastMode", + label: "Fast Mode", + type: "boolean" as const, + }, + ] + : []), + ], + }); } const toDisplayName = (model: CodexSchema.V2ModelListResponse__Model): string => { @@ -213,18 +236,33 @@ export function buildCodexInitializeParams(): CodexSchema.V1InitializeParams { }; } +// Wrapped with `scopedSafeTeardown("codex-probe")` rather than the usual +// `Effect.scoped` so that a defect from the `Layer.build` finalizer (e.g. +// `ChildProcess.kill` throwing because the `codex app-server` child exited +// early) cannot override a successful probe body. Without this guard the +// defect bubbles past `Effect.result` in `checkCodexProviderStatus`, dies +// `refreshOneSource`, and `providersRef` never receives the snapshot. const probeCodexAppServerProvider = Effect.fn("probeCodexAppServerProvider")(function* (input: { readonly binaryPath: string; readonly homePath?: string; readonly cwd: string; readonly customModels?: ReadonlyArray; + readonly environment?: NodeJS.ProcessEnv; }) { + // `~` is not shell-expanded when env vars are set via `child_process.spawn`, + // so `CODEX_HOME=~/.codex_work` would reach codex verbatim and trip + // "CODEX_HOME points to '~/.codex_work', but that path does not exist". + // Expand here for parity with `CodexTextGeneration`/`CodexSessionRuntime`. + const resolvedHomePath = input.homePath ? expandHomePath(input.homePath) : undefined; const clientContext = yield* Layer.build( CodexClient.layerCommand({ command: input.binaryPath, args: ["app-server"], cwd: input.cwd, - ...(input.homePath ? { env: { CODEX_HOME: expandHomePath(input.homePath) } } : {}), + env: { + ...(input.environment ?? process.env), + ...(resolvedHomePath ? { CODEX_HOME: resolvedHomePath } : {}), + }, }), ); const client = yield* Effect.service(CodexClient.CodexAppServerClient).pipe( @@ -273,7 +311,7 @@ const probeCodexAppServerProvider = Effect.fn("probeCodexAppServerProvider")(fun models: appendCustomCodexModels(models, input.customModels ?? []), skills: parseCodexSkillsListResponse(skillsResponse, input.cwd), } satisfies CodexAppServerProviderSnapshot; -}, Effect.scoped); +}, scopedSafeTeardown("codex-probe")); const emptyCodexModelsFromSettings = (codexSettings: CodexSettings): ServerProvider["models"] => codexSettings.customModels @@ -286,13 +324,13 @@ const emptyCodexModelsFromSettings = (codexSettings: CodexSettings): ServerProvi capabilities: null, })); -const makePendingCodexProvider = (codexSettings: CodexSettings): ServerProvider => { +const makePendingCodexProvider = (codexSettings: CodexSettings): ServerProviderDraft => { const checkedAt = new Date().toISOString(); const models = emptyCodexModelsFromSettings(codexSettings); if (!codexSettings.enabled) { return buildServerProvider({ - provider: PROVIDER, + presentation: CODEX_PRESENTATION, enabled: false, checkedAt, models, @@ -308,7 +346,7 @@ const makePendingCodexProvider = (codexSettings: CodexSettings): ServerProvider } return buildServerProvider({ - provider: PROVIDER, + presentation: CODEX_PRESENTATION, enabled: true, checkedAt, models, @@ -329,10 +367,12 @@ function accountProbeStatus(account: CodexAppServerProviderSnapshot["account"]): readonly message?: string; } { const authLabel = codexAccountAuthLabel(account.account); + const authEmail = codexAccountEmail(account.account); const auth = { status: account.account ? ("authenticated" as const) : ("unknown" as const), ...(account.account?.type ? { type: account.account?.type } : {}), ...(authLabel ? { label: authLabel } : {}), + ...(authEmail ? { email: authEmail } : {}), } satisfies ServerProvider["auth"]; if (account.account) { @@ -351,32 +391,30 @@ function accountProbeStatus(account: CodexAppServerProviderSnapshot["account"]): } export const checkCodexProviderStatus = Effect.fn("checkCodexProviderStatus")(function* ( + codexSettings: CodexSettings, probe: (input: { readonly binaryPath: string; readonly homePath?: string; readonly cwd: string; readonly customModels: ReadonlyArray; + readonly environment?: NodeJS.ProcessEnv; }) => Effect.Effect< CodexAppServerProviderSnapshot, CodexErrors.CodexAppServerError, ChildProcessSpawner.ChildProcessSpawner > = probeCodexAppServerProvider, + environment: NodeJS.ProcessEnv = process.env, ): Effect.fn.Return< - ServerProvider, + ServerProviderDraft, ServerSettingsError, - ServerSettingsService | ServerConfig | ChildProcessSpawner.ChildProcessSpawner + ChildProcessSpawner.ChildProcessSpawner > { - const codexSettings = yield* Effect.service(ServerSettingsService).pipe( - Effect.flatMap((service) => service.getSettings), - Effect.map((settings) => settings.providers.codex), - ); - const serverConfig = yield* Effect.service(ServerConfig); const checkedAt = DateTime.formatIso(yield* DateTime.now); const emptyModels = emptyCodexModelsFromSettings(codexSettings); if (!codexSettings.enabled) { return buildServerProvider({ - provider: PROVIDER, + presentation: CODEX_PRESENTATION, enabled: false, checkedAt, models: emptyModels, @@ -394,15 +432,16 @@ export const checkCodexProviderStatus = Effect.fn("checkCodexProviderStatus")(fu const probeResult = yield* probe({ binaryPath: codexSettings.binaryPath, homePath: codexSettings.homePath, - cwd: serverConfig.cwd, + cwd: process.cwd(), customModels: codexSettings.customModels, + environment, }).pipe(Effect.timeoutOption(Duration.millis(PROVIDER_PROBE_TIMEOUT_MS)), Effect.result); if (Result.isFailure(probeResult)) { const error = probeResult.failure; const installed = !Schema.is(CodexErrors.CodexAppServerSpawnError)(error); return buildServerProvider({ - provider: PROVIDER, + presentation: CODEX_PRESENTATION, enabled: codexSettings.enabled, checkedAt, models: emptyModels, @@ -421,7 +460,7 @@ export const checkCodexProviderStatus = Effect.fn("checkCodexProviderStatus")(fu if (Option.isNone(probeResult.success)) { return buildServerProvider({ - provider: PROVIDER, + presentation: CODEX_PRESENTATION, enabled: codexSettings.enabled, checkedAt, models: emptyModels, @@ -440,7 +479,7 @@ export const checkCodexProviderStatus = Effect.fn("checkCodexProviderStatus")(fu const accountStatus = accountProbeStatus(snapshot.account); return buildServerProvider({ - provider: PROVIDER, + presentation: CODEX_PRESENTATION, enabled: codexSettings.enabled, checkedAt, models: snapshot.models, @@ -455,30 +494,11 @@ export const checkCodexProviderStatus = Effect.fn("checkCodexProviderStatus")(fu }); }); -export const CodexProviderLive = Layer.effect( - CodexProvider, - Effect.gen(function* () { - const serverSettings = yield* ServerSettingsService; - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner; - const serverConfig = yield* Effect.service(ServerConfig); - const checkProvider = checkCodexProviderStatus().pipe( - Effect.provideService(ServerSettingsService, serverSettings), - Effect.provideService(ChildProcessSpawner.ChildProcessSpawner, spawner), - Effect.provideService(ServerConfig, serverConfig), - ); - - return yield* makeManagedServerProvider({ - getSettings: serverSettings.getSettings.pipe( - Effect.map((settings) => settings.providers.codex), - Effect.orDie, - ), - streamSettings: serverSettings.streamChanges.pipe( - Stream.map((settings) => settings.providers.codex), - ), - haveSettingsChanged: (previous, next) => !Equal.equals(previous, next), - initialSnapshot: makePendingCodexProvider, - checkProvider, - refreshInterval: Duration.minutes(5), - }); - }), -); +// NOTE: the singleton `CodexProviderLive` Layer has been removed as part of +// the per-instance-driver refactor. `CodexDriver.create()` builds a managed +// snapshot per instance (each with its own `CodexSettings`) and hands the +// resulting `ServerProviderShape` back as `ProviderInstance.snapshot`. +// +// The `makePendingCodexProvider` and `checkCodexProviderStatus` helpers are +// re-exported for use by `CodexDriver`. +export { makePendingCodexProvider }; diff --git a/apps/server/src/provider/Layers/CodexSessionRuntime.ts b/apps/server/src/provider/Layers/CodexSessionRuntime.ts index e45e1825ee2..4f9011fba04 100644 --- a/apps/server/src/provider/Layers/CodexSessionRuntime.ts +++ b/apps/server/src/provider/Layers/CodexSessionRuntime.ts @@ -1,10 +1,10 @@ -import { randomUUID } from "node:crypto"; - import { ApprovalRequestId, - DEFAULT_MODEL_BY_PROVIDER, + DEFAULT_MODEL, EventId, + ProviderDriverKind, ProviderItemId, + type ProviderInstanceId, type ProviderApprovalDecision, type ProviderEvent, type ProviderInteractionMode, @@ -17,7 +17,7 @@ import { TurnId, } from "@t3tools/contracts"; import { normalizeModelSlug } from "@t3tools/shared/model"; -import { Deferred, Effect, Exit, Layer, Queue, Ref, Scope, Schema, Stream } from "effect"; +import { Deferred, Effect, Exit, Layer, Queue, Ref, Scope, Random, Schema, Stream } from "effect"; import * as SchemaIssue from "effect/SchemaIssue"; import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; import * as CodexClient from "effect-codex-app-server/client"; @@ -26,13 +26,13 @@ import * as CodexRpc from "effect-codex-app-server/rpc"; import * as EffectCodexSchema from "effect-codex-app-server/schema"; import { buildCodexInitializeParams } from "./CodexProvider.ts"; +import { expandHomePath } from "../../pathExpansion.ts"; import { CODEX_DEFAULT_MODE_DEVELOPER_INSTRUCTIONS, CODEX_PLAN_MODE_DEVELOPER_INSTRUCTIONS, } from "../CodexDeveloperInstructions.ts"; -import { expandHomePath } from "../../pathExpansion.ts"; -const PROVIDER = "codex" as const; +const PROVIDER = ProviderDriverKind.make("codex"); const ANSI_ESCAPE_CHAR = String.fromCharCode(27); const ANSI_ESCAPE_REGEX = new RegExp(`${ANSI_ESCAPE_CHAR}\\[[0-9;]*m`, "g"); @@ -76,8 +76,10 @@ type CodexThreadItem = export interface CodexSessionRuntimeOptions { readonly threadId: ThreadId; + readonly providerInstanceId?: ProviderInstanceId; readonly binaryPath: string; readonly homePath?: string; + readonly environment?: NodeJS.ProcessEnv; readonly cwd: string; readonly runtimeMode: RuntimeMode; readonly model?: string; @@ -87,7 +89,10 @@ export interface CodexSessionRuntimeOptions { export interface CodexSessionRuntimeSendTurnInput { readonly input?: string; - readonly attachments?: ReadonlyArray<{ readonly type: "image"; readonly url: string }>; + readonly attachments?: ReadonlyArray<{ + readonly type: "image"; + readonly url: string; + }>; readonly model?: string; readonly serviceTier?: EffectCodexSchema.V2TurnStartParams__ServiceTier | undefined; readonly effort?: EffectCodexSchema.V2TurnStartParams__ReasoningEffort | undefined; @@ -303,7 +308,7 @@ function buildCodexCollaborationMode(input: { if (input.interactionMode === undefined) { return undefined; } - const model = normalizeCodexModelSlug(input.model) ?? DEFAULT_MODEL_BY_PROVIDER.codex; + const model = normalizeCodexModelSlug(input.model) ?? DEFAULT_MODEL; return { mode: input.interactionMode, settings: { @@ -321,7 +326,10 @@ export function buildTurnStartParams(input: { readonly threadId: string; readonly runtimeMode: RuntimeMode; readonly prompt?: string; - readonly attachments?: ReadonlyArray<{ readonly type: "image"; readonly url: string }>; + readonly attachments?: ReadonlyArray<{ + readonly type: "image"; + readonly url: string; + }>; readonly model?: string; readonly serviceTier?: EffectCodexSchema.V2TurnStartParams__ServiceTier; readonly effort?: EffectCodexSchema.V2TurnStartParams__ReasoningEffort; @@ -680,13 +688,19 @@ export const makeCodexSessionRuntime = ( const collabReceiverTurnsRef = yield* Ref.make(new Map()); const closedRef = yield* Ref.make(false); + // `~` is not shell-expanded when env vars are set via + // `child_process.spawn`; `expandHomePath` lets a configured + // `CODEX_HOME=~/.codex_work` reach codex as an absolute path. + const resolvedHomePath = options.homePath ? expandHomePath(options.homePath) : undefined; + const env = { + ...(options.environment ?? process.env), + ...(resolvedHomePath ? { CODEX_HOME: resolvedHomePath } : {}), + }; const child = yield* spawner .spawn( ChildProcess.make(options.binaryPath, ["app-server"], { cwd: options.cwd, - ...(options.homePath - ? { env: { ...process.env, CODEX_HOME: expandHomePath(options.homePath) } } - : {}), + env, shell: process.platform === "win32", }), ) @@ -712,6 +726,7 @@ export const makeCodexSessionRuntime = ( const initialSession = { provider: PROVIDER, + ...(options.providerInstanceId ? { providerInstanceId: options.providerInstanceId } : {}), status: "connecting", runtimeMode: options.runtimeMode, cwd: options.cwd, @@ -725,13 +740,15 @@ export const makeCodexSessionRuntime = ( const offerEvent = (event: ProviderEvent) => Queue.offer(events, event).pipe(Effect.asVoid); const emitEvent = (event: Omit) => - offerEvent({ - id: EventId.make(randomUUID()), - provider: PROVIDER, - createdAt: new Date().toISOString(), - ...event, - }); - + Effect.flatMap(Random.nextUUIDv4, (id) => + offerEvent({ + id: EventId.make(id), + provider: PROVIDER, + ...(options.providerInstanceId ? { providerInstanceId: options.providerInstanceId } : {}), + createdAt: new Date().toISOString(), + ...event, + }), + ); const emitSessionEvent = (method: string, message: string) => emitEvent({ kind: "session", @@ -891,7 +908,7 @@ export const makeCodexSessionRuntime = ( yield* client.handleServerRequest("item/commandExecution/requestApproval", (payload) => Effect.gen(function* () { - const requestId = ApprovalRequestId.make(randomUUID()); + const requestId = ApprovalRequestId.make(yield* Random.nextUUIDv4); const turnId = TurnId.make(payload.turnId); const itemId = ProviderItemId.make(payload.itemId); const decision = yield* Deferred.make(); @@ -947,7 +964,7 @@ export const makeCodexSessionRuntime = ( yield* client.handleServerRequest("item/fileChange/requestApproval", (payload) => Effect.gen(function* () { - const requestId = ApprovalRequestId.make(randomUUID()); + const requestId = ApprovalRequestId.make(yield* Random.nextUUIDv4); const turnId = TurnId.make(payload.turnId); const itemId = ProviderItemId.make(payload.itemId); const decision = yield* Deferred.make(); @@ -1003,7 +1020,7 @@ export const makeCodexSessionRuntime = ( yield* client.handleServerRequest("item/tool/requestUserInput", (payload) => Effect.gen(function* () { - const requestId = ApprovalRequestId.make(randomUUID()); + const requestId = ApprovalRequestId.make(yield* Random.nextUUIDv4); const turnId = TurnId.make(payload.turnId); const itemId = ProviderItemId.make(payload.itemId); const answers = yield* Deferred.make(); diff --git a/apps/server/src/provider/Layers/CopilotAdapter.test.ts b/apps/server/src/provider/Layers/CopilotAdapter.test.ts index 36fa447fcff..4216a516857 100644 --- a/apps/server/src/provider/Layers/CopilotAdapter.test.ts +++ b/apps/server/src/provider/Layers/CopilotAdapter.test.ts @@ -1,6 +1,6 @@ import assert from "node:assert/strict"; -import { ThreadId } from "@t3tools/contracts"; +import { ProviderDriverKind, ThreadId } from "@t3tools/contracts"; import { type SessionEvent } from "@github/copilot-sdk"; import * as NodeServices from "@effect/platform-node/NodeServices"; import { afterAll, it, vi } from "@effect/vitest"; @@ -142,7 +142,7 @@ modeLayer("CopilotAdapterLive interaction mode", (it) => { const adapter = yield* CopilotAdapter; const session = yield* adapter.startSession({ - provider: "copilot", + provider: ProviderDriverKind.make("copilot"), threadId: asThreadId("thread-mode"), runtimeMode: "full-access", }); @@ -196,7 +196,7 @@ planLayer("CopilotAdapterLive proposed plan events", (it) => { const adapter = yield* CopilotAdapter; const session = yield* adapter.startSession({ - provider: "copilot", + provider: ProviderDriverKind.make("copilot"), threadId: asThreadId("thread-plan"), runtimeMode: "full-access", }); diff --git a/apps/server/src/provider/Layers/CopilotAdapter.ts b/apps/server/src/provider/Layers/CopilotAdapter.ts index 5d00bf1e5a8..9195a936b0b 100644 --- a/apps/server/src/provider/Layers/CopilotAdapter.ts +++ b/apps/server/src/provider/Layers/CopilotAdapter.ts @@ -1,9 +1,36 @@ +/** + * CopilotAdapter — `ProviderAdapterShape` for the GitHub Copilot SDK runtime. + * + * Refactored from the legacy singleton-Layer adapter to the new + * `ProviderDriver` SPI. Exports `makeCopilotAdapter(config, options)` + * which returns an Effect that resolves to a fully-formed adapter shape + * ready for `ProviderInstance`. All per-session state, including turn + * tracking, lives inside this factory's closure — two instances of the + * Copilot driver therefore share zero mutable state. + * + * Two callers: + * 1. `CopilotDriver.create()` — production path; wraps the result into + * a `ProviderInstance`. + * 2. `makeCopilotAdapterLive(options)` — back-compat Layer that binds the + * adapter to the legacy `CopilotAdapter` Service tag for tests and + * the conformance suite. + * + * Critical invariants preserved from the legacy implementation: + * - Every spawn that may end up as a child process goes through + * `withSanitizedCopilotDesktopEnv` so the Electron desktop env vars + * (`ELECTRON_RUN_AS_NODE`, `ELECTRON_RENDERER_PORT`, `CLAUDECODE`) + * never leak into the spawned binary. + * - Per-session turn tracking (`currentTurnId`, `pendingCompletionTurnId`, + * etc.) is owned per `ActiveCopilotSession`, which itself lives inside + * the per-driver-instance `sessions` map. + */ import { randomUUID } from "node:crypto"; import { - type CodexReasoningEffort, EventId, type ProviderApprovalDecision, + ProviderDriverKind, + ProviderInstanceId, ProviderItemId, type ProviderRuntimeEvent, type ProviderSession, @@ -23,19 +50,24 @@ import type { PermissionRequestResult, SessionEvent, } from "@github/copilot-sdk"; + +/** + * The Copilot SDK's `ReasoningEffort` literal union, redeclared locally + * because it's not re-exported from `@github/copilot-sdk`'s package root. + * Kept in sync with `dist/types.d.ts` (`"low" | "medium" | "high" | "xhigh"`). + */ +type CopilotReasoningEffort = "low" | "medium" | "high" | "xhigh"; import { Effect, Layer, Queue, Stream } from "effect"; import { resolveAttachmentPath } from "../../attachmentStore.ts"; import { ServerConfig } from "../../config.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; import { ProviderAdapterProcessError, ProviderAdapterRequestError, ProviderAdapterSessionNotFoundError, ProviderAdapterValidationError, } from "../Errors.ts"; -import { getProviderCapabilities } from "../Services/ProviderAdapter.ts"; -import { type EventNdjsonLogger } from "./EventNdjsonLogger.ts"; +import type { EventNdjsonLogger } from "./EventNdjsonLogger.ts"; import { assistantUsageFields, beginCopilotTurn, @@ -46,19 +78,24 @@ import { recordTurnUsage, type CopilotTurnTrackingState, } from "./copilotTurnTracking.ts"; -import { resolveBundledCopilotCliPath, withSanitizedCopilotDesktopEnv } from "./copilotCliPath.ts"; +import { + resolveBundledCopilotCliPath, + withSanitizedCopilotDesktopEnv, +} from "./copilotCliPath.ts"; import { CopilotAdapter, type CopilotAdapterShape } from "../Services/CopilotAdapter.ts"; import { toMessage } from "../toMessage.ts"; import type { ProviderThreadSnapshot, ProviderThreadTurnSnapshot, } from "../Services/ProviderAdapter.ts"; +import type { CopilotSettings } from "../Drivers/CopilotSettings.ts"; -const PROVIDER = "copilot" as const; +const PROVIDER = ProviderDriverKind.make("copilot"); const USER_INPUT_QUESTION_ID = "answer"; const USER_INPUT_QUESTION_HEADER = "Question"; export interface CopilotAdapterLiveOptions { + readonly instanceId?: ProviderInstanceId; readonly nativeEventLogger?: EventNdjsonLogger; readonly clientFactory?: (options: CopilotClientOptions) => CopilotClientHandle; } @@ -100,7 +137,7 @@ interface ActiveCopilotSession extends CopilotTurnTrackingState { cwd: string | undefined; configDir: string | undefined; model: string | undefined; - reasoningEffort: CodexReasoningEffort | undefined; + reasoningEffort: CopilotReasoningEffort | undefined; interactionMode: "default" | "plan" | undefined; updatedAt: string; lastError: string | undefined; @@ -194,15 +231,36 @@ function mapSupportedModelsById(models: ReadonlyArray) { return new Map(models.map((model) => [model.id, model])); } -function getCopilotReasoningEffort(modelOptions: unknown) { - const record = asRecord(modelOptions); - const copilot = asRecord(record?.copilot); - const reasoningEffort = normalizeString(copilot?.reasoningEffort); - return reasoningEffort === "low" || - reasoningEffort === "medium" || - reasoningEffort === "high" || - reasoningEffort === "xhigh" - ? reasoningEffort +/** + * Read the Copilot reasoning-effort selection from the model selection's + * options. Accepts the SDK's four legal values (`low`/`medium`/`high`/ + * `xhigh`) and returns `undefined` for anything else. + */ +function getCopilotReasoningEffort( + modelOptions: + | ReadonlyArray<{ readonly id: string; readonly value: string | boolean }> + | undefined, +): CopilotReasoningEffort | undefined { + if (!modelOptions) return undefined; + // Tolerate both shapes seen in the wild: a typed + // `ProviderOptionSelections` array (current) and an old + // `{ copilot: { reasoningEffort } }` envelope (legacy persisted state). + let raw: string | undefined; + for (const option of modelOptions) { + if (option.id === "reasoningEffort" && typeof option.value === "string") { + raw = option.value; + break; + } + } + if (!raw) { + // Legacy `{ copilot: { reasoningEffort } }` shape — only reachable if a + // caller hands us an unknown blob (not the typed contract). + const record = asRecord(modelOptions as unknown); + const copilot = asRecord(record?.copilot); + raw = normalizeString(copilot?.reasoningEffort); + } + return raw === "low" || raw === "medium" || raw === "high" || raw === "xhigh" + ? raw : undefined; } @@ -413,7 +471,7 @@ function createSessionRecord(input: { readonly cwd: string | undefined; readonly configDir: string | undefined; readonly model: string | undefined; - readonly reasoningEffort: CodexReasoningEffort | undefined; + readonly reasoningEffort: CopilotReasoningEffort | undefined; }): ActiveCopilotSession { return { client: input.client, @@ -441,914 +499,911 @@ function createSessionRecord(input: { }; } -const makeCopilotAdapter = (options?: CopilotAdapterLiveOptions) => - Effect.gen(function* () { - const serverConfig = yield* ServerConfig; - const serverSettingsService = yield* ServerSettingsService; - const nativeEventLogger = options?.nativeEventLogger; - const runtimeEventQueue = yield* Queue.unbounded(); - const sessions = new Map(); - - const emitRuntimeEvents = (events: ReadonlyArray) => - Effect.runPromise(Queue.offerAll(runtimeEventQueue, events).pipe(Effect.asVoid)).catch( - () => undefined, - ); - - const writeNativeEvent = (threadId: ThreadId, event: SessionEvent) => { - if (!nativeEventLogger) return Promise.resolve(); - return Effect.runPromise(nativeEventLogger.write(event, threadId)).catch(() => undefined); - }; +/** + * Per-instance Copilot adapter factory. + * + * `copilotSettings` is the typed config decoded by the registry. Two + * instances of the Copilot driver pass two independent `copilotSettings` + * payloads here and get two adapter shapes that share no mutable state + * (sessions map, runtime event queue, approval resolvers, turn tracking). + */ +export const makeCopilotAdapter = Effect.fn("makeCopilotAdapter")(function* ( + copilotSettings: CopilotSettings, + options?: CopilotAdapterLiveOptions, +) { + const _boundInstanceId = options?.instanceId ?? ProviderInstanceId.make("copilot"); + const serverConfig = yield* ServerConfig; + const nativeEventLogger = options?.nativeEventLogger; + const runtimeEventQueue = yield* Queue.unbounded(); + const sessions = new Map(); + + const emitRuntimeEvents = (events: ReadonlyArray) => + Effect.runPromise(Queue.offerAll(runtimeEventQueue, events).pipe(Effect.asVoid)).catch( + () => undefined, + ); - const currentSyntheticTurnId = (record: ActiveCopilotSession) => - completionTurnRefs(record).turnId ?? record.currentTurnId; + const writeNativeEvent = (threadId: ThreadId, event: SessionEvent) => { + if (!nativeEventLogger) return Promise.resolve(); + return Effect.runPromise(nativeEventLogger.write(event, threadId)).catch(() => undefined); + }; - const syncInteractionMode = ( - record: ActiveCopilotSession, - interactionMode: "default" | "plan", - ) => { - if (record.interactionMode === interactionMode) { - return Effect.void; - } - return Effect.tryPromise({ - try: async () => { - await record.session.rpc.mode.set({ - mode: toCopilotSessionMode(interactionMode), - }); - record.interactionMode = interactionMode; - }, - catch: (cause) => - new ProviderAdapterRequestError({ - provider: PROVIDER, - method: "session.mode.set", - detail: toMessage(cause, "Failed to switch GitHub Copilot interaction mode."), - cause, - }), - }); - }; + const currentSyntheticTurnId = (record: ActiveCopilotSession) => + completionTurnRefs(record).turnId ?? record.currentTurnId; - const emitLatestProposedPlan = (record: ActiveCopilotSession) => - Effect.tryPromise({ - try: () => record.session.rpc.plan.read(), - catch: (cause) => - new ProviderAdapterRequestError({ - provider: PROVIDER, - method: "session.plan.read", - detail: toMessage(cause, "Failed to read the GitHub Copilot plan."), - cause, - }), - }).pipe( - Effect.flatMap((plan) => { - const planMarkdown = trimToUndefined(plan.content ?? undefined); - if (!plan.exists || !planMarkdown) { - return Effect.void; - } - return Queue.offer( - runtimeEventQueue, - makeSyntheticEvent( - record.threadId, - "turn.proposed.completed", - { - planMarkdown, - }, - { turnId: currentSyntheticTurnId(record) }, - ), - ).pipe(Effect.asVoid); + const syncInteractionMode = ( + record: ActiveCopilotSession, + interactionMode: "default" | "plan", + ) => { + if (record.interactionMode === interactionMode) { + return Effect.void; + } + return Effect.tryPromise({ + try: async () => { + await record.session.rpc.mode.set({ + mode: toCopilotSessionMode(interactionMode), + }); + record.interactionMode = interactionMode; + }, + catch: (cause) => + new ProviderAdapterRequestError({ + provider: PROVIDER, + method: "session.mode.set", + detail: toMessage(cause, "Failed to switch GitHub Copilot interaction mode."), + cause, }), - ); + }); + }; - const mapSessionEvent = ( - record: ActiveCopilotSession, - event: SessionEvent, - ): ReadonlyArray => { - const currentTurnId = record.currentTurnId; - const currentProviderTurnId = record.currentProviderTurnId; - const resolveOrchestrationTurnId = ( - providerTurnId: TurnId | undefined, - ): TurnId | undefined => { - if (providerTurnId && currentProviderTurnId && providerTurnId === currentProviderTurnId) { - return currentTurnId ?? providerTurnId; + const emitLatestProposedPlan = (record: ActiveCopilotSession) => + Effect.tryPromise({ + try: () => record.session.rpc.plan.read(), + catch: (cause) => + new ProviderAdapterRequestError({ + provider: PROVIDER, + method: "session.plan.read", + detail: toMessage(cause, "Failed to read the GitHub Copilot plan."), + cause, + }), + }).pipe( + Effect.flatMap((plan) => { + const planMarkdown = trimToUndefined(plan.content ?? undefined); + if (!plan.exists || !planMarkdown) { + return Effect.void; } - return currentTurnId ?? providerTurnId; - }; - const base = (input?: { - readonly turnId?: TurnId | undefined; - readonly providerTurnId?: TurnId | undefined; - readonly itemId?: string | undefined; - readonly requestId?: string | undefined; - }) => - withRefs({ - threadId: record.threadId, - eventId: EventId.make(event.id), - createdAt: event.timestamp, - turnId: resolveOrchestrationTurnId(input?.providerTurnId ?? input?.turnId), - providerTurnId: input?.providerTurnId ?? input?.turnId, - itemId: input?.itemId, - requestId: input?.requestId, - rawMethod: event.type, - rawPayload: event, - }); - - switch (event.type) { - case "session.start": - case "session.resume": - return [ + return Queue.offer( + runtimeEventQueue, + makeSyntheticEvent( + record.threadId, + "turn.proposed.completed", { - ...base(), - type: "session.started", - payload: { - message: - event.type === "session.resume" - ? "Resumed GitHub Copilot session" - : "Started GitHub Copilot session", - resume: event.data, - }, + planMarkdown, }, - { - ...base(), - type: "thread.started", - payload: { - providerThreadId: - event.type === "session.start" ? event.data.sessionId : record.session.sessionId, - }, + { turnId: currentSyntheticTurnId(record) }, + ), + ).pipe(Effect.asVoid); + }), + ); + + const mapSessionEvent = ( + record: ActiveCopilotSession, + event: SessionEvent, + ): ReadonlyArray => { + const currentTurnId = record.currentTurnId; + const currentProviderTurnId = record.currentProviderTurnId; + const resolveOrchestrationTurnId = ( + providerTurnId: TurnId | undefined, + ): TurnId | undefined => { + if (providerTurnId && currentProviderTurnId && providerTurnId === currentProviderTurnId) { + return currentTurnId ?? providerTurnId; + } + return currentTurnId ?? providerTurnId; + }; + const base = (input?: { + readonly turnId?: TurnId | undefined; + readonly providerTurnId?: TurnId | undefined; + readonly itemId?: string | undefined; + readonly requestId?: string | undefined; + }) => + withRefs({ + threadId: record.threadId, + eventId: EventId.make(event.id), + createdAt: event.timestamp, + turnId: resolveOrchestrationTurnId(input?.providerTurnId ?? input?.turnId), + providerTurnId: input?.providerTurnId ?? input?.turnId, + itemId: input?.itemId, + requestId: input?.requestId, + rawMethod: event.type, + rawPayload: event, + }); + + switch (event.type) { + case "session.start": + case "session.resume": + return [ + { + ...base(), + type: "session.started", + payload: { + message: + event.type === "session.resume" + ? "Resumed GitHub Copilot session" + : "Started GitHub Copilot session", + resume: event.data, }, - ]; - case "session.info": - return [ - { - ...base(), - type: "runtime.warning", - payload: { - message: event.data.message, - detail: event.data, - }, + }, + { + ...base(), + type: "thread.started", + payload: { + providerThreadId: + event.type === "session.start" ? event.data.sessionId : record.session.sessionId, }, - ]; - case "session.warning": - return [ - { - ...base(), - type: "runtime.warning", - payload: { - message: event.data.message, - detail: event.data, - }, + }, + ]; + case "session.info": + return [ + { + ...base(), + type: "runtime.warning", + payload: { + message: event.data.message, + detail: event.data, }, - ]; - case "session.error": - return [ - { - ...base(), - type: "runtime.error", - payload: { - message: event.data.message, - class: "provider_error", - detail: event.data, - }, + }, + ]; + case "session.warning": + return [ + { + ...base(), + type: "runtime.warning", + payload: { + message: event.data.message, + detail: event.data, }, - { - ...base(), - type: "session.state.changed", - payload: { - state: "error", - reason: "session.error", - detail: event.data, - }, + }, + ]; + case "session.error": + return [ + { + ...base(), + type: "runtime.error", + payload: { + message: event.data.message, + class: "provider_error", + detail: event.data, }, - ]; - case "session.idle": { - const idleCompletionRefs = completionTurnRefs(record); - const idleCompletionEvents: ProviderRuntimeEvent[] = - idleCompletionRefs.turnId || idleCompletionRefs.providerTurnId - ? [ - { - ...base(idleCompletionRefs), - type: "turn.completed", - payload: { - state: "completed", - ...assistantUsageFields(record.pendingTurnUsage), - }, - } satisfies ProviderRuntimeEvent, - ] - : []; - return [ - ...idleCompletionEvents, - { - ...base(), - type: "session.state.changed", - payload: { - state: "ready", - reason: "session.idle", - }, + }, + { + ...base(), + type: "session.state.changed", + payload: { + state: "error", + reason: "session.error", + detail: event.data, }, - { - ...base(), - type: "thread.state.changed", - payload: { - state: "idle", - detail: event.data, - }, + }, + ]; + case "session.idle": { + const idleCompletionRefs = completionTurnRefs(record); + const idleCompletionEvents: ProviderRuntimeEvent[] = + idleCompletionRefs.turnId || idleCompletionRefs.providerTurnId + ? [ + { + ...base(idleCompletionRefs), + type: "turn.completed", + payload: { + state: "completed", + ...assistantUsageFields(record.pendingTurnUsage), + }, + } satisfies ProviderRuntimeEvent, + ] + : []; + return [ + ...idleCompletionEvents, + { + ...base(), + type: "session.state.changed", + payload: { + state: "ready", + reason: "session.idle", }, - ]; - } - case "session.title_changed": - return [ - { - ...base(), - type: "thread.metadata.updated", - payload: { - name: event.data.title, - metadata: event.data, - }, + }, + { + ...base(), + type: "thread.state.changed", + payload: { + state: "idle", + detail: event.data, }, - ]; - case "session.model_change": - return [ - { - ...base(), - type: "model.rerouted", - payload: { - fromModel: event.data.previousModel ?? "unknown", - toModel: event.data.newModel, - reason: "session.model_change", - }, + }, + ]; + } + case "session.title_changed": + return [ + { + ...base(), + type: "thread.metadata.updated", + payload: { + name: event.data.title, + metadata: event.data, }, - ]; - case "session.plan_changed": - return [ - { - ...base(), - type: "turn.plan.updated", - payload: { - explanation: `Plan ${event.data.operation}d`, - plan: [], - }, + }, + ]; + case "session.model_change": + return [ + { + ...base(), + type: "model.rerouted", + payload: { + fromModel: event.data.previousModel ?? "unknown", + toModel: event.data.newModel, + reason: "session.model_change", }, - ]; - case "session.workspace_file_changed": - return [ - { - ...base(), - type: "files.persisted", - payload: { - files: [ - { - filename: event.data.path, - fileId: event.data.path, - }, - ], - }, + }, + ]; + case "session.plan_changed": + return [ + { + ...base(), + type: "turn.plan.updated", + payload: { + explanation: `Plan ${event.data.operation}d`, + plan: [], }, - ]; - case "session.context_changed": - return [ - { - ...base(), - type: "thread.metadata.updated", - payload: { - metadata: event.data, - }, + }, + ]; + case "session.workspace_file_changed": + return [ + { + ...base(), + type: "files.persisted", + payload: { + files: [ + { + filename: event.data.path, + fileId: event.data.path, + }, + ], }, - ]; - case "session.usage_info": - return [ - { - ...base(), - type: "thread.token-usage.updated", - payload: { - usage: event.data, - }, + }, + ]; + case "session.context_changed": + return [ + { + ...base(), + type: "thread.metadata.updated", + payload: { + metadata: event.data, }, - ]; - case "session.task_complete": - return [ - { - ...base(), - type: "task.completed", - payload: { - taskId: toRuntimeTaskId(record.threadId) ?? RuntimeTaskId.make(record.threadId), - status: "completed", - ...(trimToUndefined(event.data.summary) ? { summary: event.data.summary } : {}), + }, + ]; + case "session.usage_info": { + const usedTokens = Math.max(0, Math.floor(event.data.currentTokens)); + const tokenLimit = Math.floor(event.data.tokenLimit); + return [ + { + ...base(), + type: "thread.token-usage.updated", + payload: { + usage: { + usedTokens, + ...(tokenLimit > 0 ? { maxTokens: tokenLimit } : {}), }, }, - ]; - case "assistant.turn_start": - return [ - { - ...base({ providerTurnId: toTurnId(event.data.turnId) }), - type: "turn.started", - payload: record.model ? { model: record.model } : {}, + }, + ]; + } + case "session.task_complete": + return [ + { + ...base(), + type: "task.completed", + payload: { + taskId: toRuntimeTaskId(record.threadId) ?? RuntimeTaskId.make(record.threadId), + status: "completed", + ...(trimToUndefined(event.data.summary) ? { summary: event.data.summary } : {}), }, - { - ...base({ providerTurnId: toTurnId(event.data.turnId) }), - type: "session.state.changed", - payload: { - state: "running", - reason: "assistant.turn_start", - }, + }, + ]; + case "assistant.turn_start": + return [ + { + ...base({ providerTurnId: toTurnId(event.data.turnId) }), + type: "turn.started", + payload: record.model ? { model: record.model } : {}, + }, + { + ...base({ providerTurnId: toTurnId(event.data.turnId) }), + type: "session.state.changed", + payload: { + state: "running", + reason: "assistant.turn_start", }, - ]; - case "assistant.reasoning": - return [ - { - ...base({ itemId: event.data.reasoningId }), - type: "item.completed", - payload: { - itemType: "reasoning", - status: "completed", - title: "Reasoning", - detail: trimToUndefined(event.data.content), - data: event.data, - }, + }, + ]; + case "assistant.reasoning": + return [ + { + ...base({ itemId: event.data.reasoningId }), + type: "item.completed", + payload: { + itemType: "reasoning", + status: "completed", + title: "Reasoning", + detail: trimToUndefined(event.data.content), + data: event.data, }, - ]; - case "assistant.reasoning_delta": - return [ - { - ...base({ itemId: event.data.reasoningId }), - type: "content.delta", - payload: { - streamKind: "reasoning_text", - delta: event.data.deltaContent, - }, + }, + ]; + case "assistant.reasoning_delta": + return [ + { + ...base({ itemId: event.data.reasoningId }), + type: "content.delta", + payload: { + streamKind: "reasoning_text", + delta: event.data.deltaContent, }, - ]; - case "assistant.message": - return [ - { - ...base({ itemId: event.data.messageId }), - type: "item.completed", - payload: { - itemType: "assistant_message", - status: "completed", - title: "Assistant message", - detail: trimToUndefined(event.data.content), - data: event.data, - }, + }, + ]; + case "assistant.message": + return [ + { + ...base({ itemId: event.data.messageId }), + type: "item.completed", + payload: { + itemType: "assistant_message", + status: "completed", + title: "Assistant message", + detail: trimToUndefined(event.data.content), + data: event.data, }, - ]; - case "assistant.message_delta": - return [ - { - ...base({ itemId: event.data.messageId }), - type: "content.delta", - payload: { - streamKind: "assistant_text", - delta: event.data.deltaContent, - }, + }, + ]; + case "assistant.message_delta": + return [ + { + ...base({ itemId: event.data.messageId }), + type: "content.delta", + payload: { + streamKind: "assistant_text", + delta: event.data.deltaContent, }, - ]; - case "assistant.turn_end": - return []; - case "assistant.usage": { - const completionRefs = completionTurnRefs(record); - const completionBase = - completionRefs.turnId || completionRefs.providerTurnId ? base(completionRefs) : base(); - return [ - { - ...completionBase, - type: "thread.token-usage.updated", - payload: { - usage: event.data, + }, + ]; + case "assistant.turn_end": + return []; + case "assistant.usage": { + const completionRefs = completionTurnRefs(record); + const completionBase = + completionRefs.turnId || completionRefs.providerTurnId ? base(completionRefs) : base(); + const inputTokens = event.data.inputTokens; + const outputTokens = event.data.outputTokens; + const cachedInputTokens = event.data.cacheReadTokens; + const durationMs = + event.data.duration !== undefined ? Math.max(0, Math.floor(event.data.duration)) : undefined; + const usedTokens = Math.max(0, (inputTokens ?? 0) + (outputTokens ?? 0)); + return [ + { + ...completionBase, + type: "thread.token-usage.updated", + payload: { + usage: { + usedTokens, + ...(inputTokens !== undefined ? { inputTokens } : {}), + ...(cachedInputTokens !== undefined ? { cachedInputTokens } : {}), + ...(outputTokens !== undefined ? { outputTokens } : {}), + ...(usedTokens > 0 ? { lastUsedTokens: usedTokens } : {}), + ...(inputTokens !== undefined ? { lastInputTokens: inputTokens } : {}), + ...(cachedInputTokens !== undefined + ? { lastCachedInputTokens: cachedInputTokens } + : {}), + ...(outputTokens !== undefined ? { lastOutputTokens: outputTokens } : {}), + ...(durationMs !== undefined ? { durationMs } : {}), }, }, - ]; - } - case "abort": { - const abortedTurnRefs = completionTurnRefs(record); - const abortedBase = - abortedTurnRefs.turnId || abortedTurnRefs.providerTurnId - ? base(abortedTurnRefs) - : base(); - return [ - { - ...abortedBase, - type: "turn.aborted", - payload: { - reason: event.data.reason, - }, + }, + ]; + } + case "abort": { + const abortedTurnRefs = completionTurnRefs(record); + const abortedBase = + abortedTurnRefs.turnId || abortedTurnRefs.providerTurnId + ? base(abortedTurnRefs) + : base(); + return [ + { + ...abortedBase, + type: "turn.aborted", + payload: { + reason: event.data.reason, }, - ]; - } - case "tool.execution_start": - return [ - { - ...base({ itemId: event.data.toolCallId }), - type: "item.started", - payload: { - itemType: itemTypeFromToolEvent(event), - status: "inProgress", - title: event.data.toolName ?? "Tool call", - ...(toolDetailFromEvent(event.data) - ? { detail: toolDetailFromEvent(event.data) } - : {}), - data: event.data, - }, + }, + ]; + } + case "tool.execution_start": + return [ + { + ...base({ itemId: event.data.toolCallId }), + type: "item.started", + payload: { + itemType: itemTypeFromToolEvent(event), + status: "inProgress", + title: event.data.toolName ?? "Tool call", + ...(toolDetailFromEvent(event.data) + ? { detail: toolDetailFromEvent(event.data) } + : {}), + data: event.data, }, - ]; - case "tool.execution_progress": - return [ - { - ...base({ itemId: event.data.toolCallId }), - type: "tool.progress", - payload: { - toolUseId: event.data.toolCallId, - summary: event.data.progressMessage, - }, + }, + ]; + case "tool.execution_progress": + return [ + { + ...base({ itemId: event.data.toolCallId }), + type: "tool.progress", + payload: { + toolUseId: event.data.toolCallId, + summary: event.data.progressMessage, }, - ]; - case "tool.execution_partial_result": - return [ - { - ...base({ itemId: event.data.toolCallId }), - type: "tool.progress", - payload: { - toolUseId: event.data.toolCallId, - summary: event.data.partialOutput, - }, + }, + ]; + case "tool.execution_partial_result": + return [ + { + ...base({ itemId: event.data.toolCallId }), + type: "tool.progress", + payload: { + toolUseId: event.data.toolCallId, + summary: event.data.partialOutput, }, - ]; - case "tool.execution_complete": - return [ - { - ...base({ itemId: event.data.toolCallId }), - type: "item.completed", - payload: { - itemType: event.data.result?.contents?.some( - (content: { type: string }) => content.type === "terminal", - ) - ? "command_execution" - : "dynamic_tool_call", - status: event.data.success ? "completed" : "failed", - title: record.toolTitlesByCallId.get(event.data.toolCallId) ?? "Tool call", - ...(trimToUndefined(event.data.result?.content) - ? { detail: event.data.result?.content } - : {}), - data: event.data, - }, + }, + ]; + case "tool.execution_complete": + return [ + { + ...base({ itemId: event.data.toolCallId }), + type: "item.completed", + payload: { + itemType: event.data.result?.contents?.some( + (content: { type: string }) => content.type === "terminal", + ) + ? "command_execution" + : "dynamic_tool_call", + status: event.data.success ? "completed" : "failed", + title: record.toolTitlesByCallId.get(event.data.toolCallId) ?? "Tool call", + ...(trimToUndefined(event.data.result?.content) + ? { detail: event.data.result?.content } + : {}), + data: event.data, }, - ...(trimToUndefined(event.data.result?.content) - ? [ - { - ...base({ itemId: event.data.toolCallId }), - type: "tool.summary" as const, - payload: { - summary: event.data.result?.content ?? "", - precedingToolUseIds: [event.data.toolCallId], - }, + }, + ...(trimToUndefined(event.data.result?.content) + ? [ + { + ...base({ itemId: event.data.toolCallId }), + type: "tool.summary" as const, + payload: { + summary: event.data.result?.content ?? "", + precedingToolUseIds: [event.data.toolCallId], }, - ] - : []), - ]; - case "skill.invoked": - return [ - { - ...base(), - type: "task.progress", - payload: { - taskId: toRuntimeTaskId(event.data.name) ?? RuntimeTaskId.make(event.data.name), - description: `Invoked skill ${event.data.name}`, - }, + }, + ] + : []), + ]; + case "skill.invoked": + return [ + { + ...base(), + type: "task.progress", + payload: { + taskId: toRuntimeTaskId(event.data.name) ?? RuntimeTaskId.make(event.data.name), + description: `Invoked skill ${event.data.name}`, }, - ]; - case "subagent.started": - return [ - { - ...base(), - type: "task.started", - payload: { - taskId: - toRuntimeTaskId(event.data.toolCallId) ?? - RuntimeTaskId.make(event.data.toolCallId), - description: trimToUndefined(event.data.agentDescription), - taskType: "subagent", - }, + }, + ]; + case "subagent.started": + return [ + { + ...base(), + type: "task.started", + payload: { + taskId: + toRuntimeTaskId(event.data.toolCallId) ?? + RuntimeTaskId.make(event.data.toolCallId), + description: trimToUndefined(event.data.agentDescription), + taskType: "subagent", }, - ]; - case "subagent.completed": - return [ - { - ...base(), - type: "task.completed", - payload: { - taskId: - toRuntimeTaskId(event.data.toolCallId) ?? - RuntimeTaskId.make(event.data.toolCallId), - status: "completed", - ...(trimToUndefined(event.data.agentDisplayName) - ? { summary: event.data.agentDisplayName } - : {}), - }, + }, + ]; + case "subagent.completed": + return [ + { + ...base(), + type: "task.completed", + payload: { + taskId: + toRuntimeTaskId(event.data.toolCallId) ?? + RuntimeTaskId.make(event.data.toolCallId), + status: "completed", + ...(trimToUndefined(event.data.agentDisplayName) + ? { summary: event.data.agentDisplayName } + : {}), }, - ]; - case "subagent.failed": - return [ - { - ...base(), - type: "task.completed", - payload: { - taskId: - toRuntimeTaskId(event.data.toolCallId) ?? - RuntimeTaskId.make(event.data.toolCallId), - status: "failed", - ...(trimToUndefined(event.data.error) ? { summary: event.data.error } : {}), - }, + }, + ]; + case "subagent.failed": + return [ + { + ...base(), + type: "task.completed", + payload: { + taskId: + toRuntimeTaskId(event.data.toolCallId) ?? + RuntimeTaskId.make(event.data.toolCallId), + status: "failed", + ...(trimToUndefined(event.data.error) ? { summary: event.data.error } : {}), }, - ]; - default: - return []; - } - }; + }, + ]; + default: + return []; + } + }; - const createInteractionHandlers = ( - threadId: ThreadId, - getCurrentTurnId: () => TurnId | undefined, - getRuntimeMode: () => ProviderSession["runtimeMode"], - pendingApprovalResolvers: Map, - pendingUserInputResolvers: Map, - ) => { - const onPermissionRequest = (request: PermissionRequest) => - getRuntimeMode() === "full-access" - ? Promise.resolve({ kind: "approved" }) - : new Promise((resolve) => { - const requestId = `copilot-approval-${randomUUID()}`; - const turnId = getCurrentTurnId(); - pendingApprovalResolvers.set(requestId, { - requestType: requestTypeFromPermissionRequest(request), - turnId, - resolve, - }); - void emitRuntimeEvents([ - makeSyntheticEvent( - threadId, - "request.opened", - { - requestType: requestTypeFromPermissionRequest(request), - ...(requestDetailFromPermissionRequest(request) - ? { detail: requestDetailFromPermissionRequest(request) } - : {}), - args: request, - }, - { requestId, turnId }, - ), - ]); + const createInteractionHandlers = ( + threadId: ThreadId, + getCurrentTurnId: () => TurnId | undefined, + getRuntimeMode: () => ProviderSession["runtimeMode"], + pendingApprovalResolvers: Map, + pendingUserInputResolvers: Map, + ) => { + const onPermissionRequest = (request: PermissionRequest) => + getRuntimeMode() === "full-access" + ? Promise.resolve({ kind: "approved" }) + : new Promise((resolve) => { + const requestId = `copilot-approval-${randomUUID()}`; + const turnId = getCurrentTurnId(); + pendingApprovalResolvers.set(requestId, { + requestType: requestTypeFromPermissionRequest(request), + turnId, + resolve, }); - - const onUserInputRequest = (request: CopilotUserInputRequest) => - new Promise((resolve) => { - const requestId = `copilot-user-input-${randomUUID()}`; - const turnId = getCurrentTurnId(); - pendingUserInputResolvers.set(requestId, { - request, - turnId, - resolve, + void emitRuntimeEvents([ + makeSyntheticEvent( + threadId, + "request.opened", + { + requestType: requestTypeFromPermissionRequest(request), + ...(requestDetailFromPermissionRequest(request) + ? { detail: requestDetailFromPermissionRequest(request) } + : {}), + args: request, + }, + { requestId, turnId }, + ), + ]); }); - void emitRuntimeEvents([ - makeSyntheticEvent( - threadId, - "user-input.requested", - { - questions: [ - { - id: USER_INPUT_QUESTION_ID, - header: USER_INPUT_QUESTION_HEADER, - question: request.question, - options: (request.choices ?? []).map((choice: string) => ({ - label: choice, - description: choice, - })), - }, - ], - }, - { requestId, turnId }, - ), - ]); + + const onUserInputRequest = (request: CopilotUserInputRequest) => + new Promise((resolve) => { + const requestId = `copilot-user-input-${randomUUID()}`; + const turnId = getCurrentTurnId(); + pendingUserInputResolvers.set(requestId, { + request, + turnId, + resolve, }); + void emitRuntimeEvents([ + makeSyntheticEvent( + threadId, + "user-input.requested", + { + questions: [ + { + id: USER_INPUT_QUESTION_ID, + header: USER_INPUT_QUESTION_HEADER, + question: request.question, + options: (request.choices ?? []).map((choice: string) => ({ + label: choice, + description: choice, + })), + }, + ], + }, + { requestId, turnId }, + ), + ]); + }); - return { - onPermissionRequest, - onUserInputRequest, - }; + return { + onPermissionRequest, + onUserInputRequest, }; + }; - const validateSessionConfiguration = (input: { - readonly client: CopilotClientHandle; - readonly threadId: ThreadId; - readonly model: string | undefined; - readonly reasoningEffort: CodexReasoningEffort | undefined; - }) => - Effect.gen(function* () { - if (!input.model && !input.reasoningEffort) { - return; - } + const validateSessionConfiguration = (input: { + readonly client: CopilotClientHandle; + readonly threadId: ThreadId; + readonly model: string | undefined; + readonly reasoningEffort: CopilotReasoningEffort | undefined; + }) => + Effect.gen(function* () { + if (!input.model && !input.reasoningEffort) { + return; + } + + yield* Effect.tryPromise({ + try: () => withSanitizedCopilotDesktopEnv(() => input.client.start()), + catch: (cause) => + new ProviderAdapterProcessError({ + provider: PROVIDER, + threadId: input.threadId, + detail: toMessage(cause, "Failed to start GitHub Copilot client."), + cause, + }), + }); + const supportedModels = mapSupportedModelsById( yield* Effect.tryPromise({ - try: () => withSanitizedCopilotDesktopEnv(() => input.client.start()), + try: () => withSanitizedCopilotDesktopEnv(() => input.client.listModels()), catch: (cause) => new ProviderAdapterProcessError({ provider: PROVIDER, threadId: input.threadId, - detail: toMessage(cause, "Failed to start GitHub Copilot client."), + detail: toMessage(cause, "Failed to load GitHub Copilot model metadata."), cause, }), + }), + ); + const selectedModel = input.model ? supportedModels.get(input.model) : undefined; + + if (input.model && !selectedModel) { + return yield* new ProviderAdapterValidationError({ + provider: PROVIDER, + operation: "session.model", + issue: `GitHub Copilot model '${input.model}' is not available in the current Copilot runtime.`, }); + } - const supportedModels = mapSupportedModelsById( - yield* Effect.tryPromise({ - try: () => withSanitizedCopilotDesktopEnv(() => input.client.listModels()), - catch: (cause) => - new ProviderAdapterProcessError({ - provider: PROVIDER, - threadId: input.threadId, - detail: toMessage(cause, "Failed to load GitHub Copilot model metadata."), - cause, - }), - }), - ); - const selectedModel = input.model ? supportedModels.get(input.model) : undefined; + if (!input.reasoningEffort) { + return; + } - if (input.model && !selectedModel) { - return yield* new ProviderAdapterValidationError({ - provider: PROVIDER, - operation: "session.model", - issue: `GitHub Copilot model '${input.model}' is not available in the current Copilot runtime.`, - }); - } + if (!selectedModel) { + return yield* new ProviderAdapterValidationError({ + provider: PROVIDER, + operation: "session.reasoningEffort", + issue: + "GitHub Copilot reasoning effort requires an explicit supported model selection.", + }); + } - if (!input.reasoningEffort) { - return; - } + const supportedReasoningEfforts = selectedModel.supportedReasoningEfforts ?? []; + if (supportedReasoningEfforts.length === 0) { + return yield* new ProviderAdapterValidationError({ + provider: PROVIDER, + operation: "session.reasoningEffort", + issue: `GitHub Copilot model '${selectedModel.id}' does not support reasoning effort configuration.`, + }); + } - if (!selectedModel) { - return yield* new ProviderAdapterValidationError({ - provider: PROVIDER, - operation: "session.reasoningEffort", - issue: - "GitHub Copilot reasoning effort requires an explicit supported model selection.", - }); - } + if (!supportedReasoningEfforts.includes(input.reasoningEffort)) { + return yield* new ProviderAdapterValidationError({ + provider: PROVIDER, + operation: "session.reasoningEffort", + issue: `GitHub Copilot model '${selectedModel.id}' does not support reasoning effort '${input.reasoningEffort}'.`, + }); + } + }); - const supportedReasoningEfforts = selectedModel.supportedReasoningEfforts ?? []; - if (supportedReasoningEfforts.length === 0) { - return yield* new ProviderAdapterValidationError({ - provider: PROVIDER, - operation: "session.reasoningEffort", - issue: `GitHub Copilot model '${selectedModel.id}' does not support reasoning effort configuration.`, - }); + const reconfigureSession = ( + record: ActiveCopilotSession, + input: { + readonly model: string | undefined; + readonly reasoningEffort: CopilotReasoningEffort | undefined; + }, + ) => + Effect.tryPromise({ + try: async () => { + const sessionId = record.session.sessionId; + const previousSession = record.session; + const previousUnsubscribe = record.unsubscribe; + previousUnsubscribe(); + // Best-effort teardown -- must not block new session creation + try { + await previousSession.destroy(); + } catch { + // ignored } - if (!supportedReasoningEfforts.includes(input.reasoningEffort)) { - return yield* new ProviderAdapterValidationError({ - provider: PROVIDER, - operation: "session.reasoningEffort", - issue: `GitHub Copilot model '${selectedModel.id}' does not support reasoning effort '${input.reasoningEffort}'.`, - }); - } - }); + const handlers = createInteractionHandlers( + record.threadId, + () => record.currentTurnId, + () => record.runtimeMode, + record.pendingApprovalResolvers, + record.pendingUserInputResolvers, + ); + const nextSession = await withSanitizedCopilotDesktopEnv(() => + record.client.resumeSession(sessionId, { + ...handlers, + ...(input.model ? { model: input.model } : {}), + ...(input.reasoningEffort ? { reasoningEffort: input.reasoningEffort } : {}), + ...(record.cwd ? { workingDirectory: record.cwd } : {}), + ...(record.configDir ? { configDir: record.configDir } : {}), + streaming: true, + }), + ); - const reconfigureSession = ( - record: ActiveCopilotSession, - input: { - readonly model: string | undefined; - readonly reasoningEffort: CodexReasoningEffort | undefined; + record.session = nextSession; + record.interactionMode = undefined; + record.model = input.model; + record.reasoningEffort = input.reasoningEffort; + record.updatedAt = new Date().toISOString(); + record.unsubscribe = nextSession.on((event) => { + handleSessionEvent(record, event); + }); }, - ) => - Effect.tryPromise({ - try: async () => { - const sessionId = record.session.sessionId; - const previousSession = record.session; - const previousUnsubscribe = record.unsubscribe; - previousUnsubscribe(); - // Best-effort teardown -- must not block new session creation - try { - await previousSession.destroy(); - } catch { - // ignored - } + catch: (cause) => + new ProviderAdapterRequestError({ + provider: PROVIDER, + method: "session.reconfigure", + detail: toMessage(cause, "Failed to reconfigure GitHub Copilot session."), + cause, + }), + }); - const handlers = createInteractionHandlers( - record.threadId, - () => record.currentTurnId, - () => record.runtimeMode, - record.pendingApprovalResolvers, - record.pendingUserInputResolvers, - ); - const nextSession = await withSanitizedCopilotDesktopEnv(() => - record.client.resumeSession(sessionId, { - ...handlers, - ...(input.model ? { model: input.model } : {}), - ...(input.reasoningEffort ? { reasoningEffort: input.reasoningEffort } : {}), - ...(record.cwd ? { workingDirectory: record.cwd } : {}), - ...(record.configDir ? { configDir: record.configDir } : {}), - streaming: true, - }), - ); + const handleSessionEvent = (record: ActiveCopilotSession, event: SessionEvent) => { + record.updatedAt = event.timestamp; + if (event.type === "assistant.turn_start") { + beginCopilotTurn(record, TurnId.make(event.data.turnId)); + } + if (event.type === "assistant.usage") { + recordTurnUsage(record, event.data); + } + if (event.type === "session.error") { + record.lastError = event.data.message; + } + if (event.type === "session.model_change") { + record.model = event.data.newModel; + } + if (event.type === "session.mode_changed") { + record.interactionMode = toInteractionMode(event.data.newMode); + } + if (event.type === "tool.execution_start" && trimToUndefined(event.data.toolName)) { + record.toolTitlesByCallId.set(event.data.toolCallId, trimToUndefined(event.data.toolName)!); + } - record.session = nextSession; - record.interactionMode = undefined; - record.model = input.model; - record.reasoningEffort = input.reasoningEffort; - record.updatedAt = new Date().toISOString(); - record.unsubscribe = nextSession.on((event) => { - handleSessionEvent(record, event); - }); - }, - catch: (cause) => - new ProviderAdapterRequestError({ - provider: PROVIDER, - method: "session.reconfigure", - detail: toMessage(cause, "Failed to reconfigure GitHub Copilot session."), - cause, - }), + void writeNativeEvent(record.threadId, event); + const runtimeEvents = mapSessionEvent(record, event); + if (runtimeEvents.length > 0) { + void emitRuntimeEvents(runtimeEvents); + } + if (event.type === "session.plan_changed" && event.data.operation !== "delete") { + void Effect.runPromise(emitLatestProposedPlan(record)).catch((cause) => { + void emitRuntimeEvents([ + makeSyntheticEvent( + record.threadId, + "runtime.warning", + { + message: "Failed to read GitHub Copilot plan.", + detail: toMessage(cause, "Failed to read GitHub Copilot plan."), + }, + { turnId: currentSyntheticTurnId(record) }, + ), + ]); }); + } + if (event.type === "tool.execution_complete") { + record.toolTitlesByCallId.delete(event.data.toolCallId); + } + if (event.type === "assistant.turn_end") { + markTurnAwaitingCompletion(record); + } + if (event.type === "abort" || event.type === "session.idle") { + clearTurnTracking(record); + } + }; - const handleSessionEvent = (record: ActiveCopilotSession, event: SessionEvent) => { - record.updatedAt = event.timestamp; - if (event.type === "assistant.turn_start") { - beginCopilotTurn(record, TurnId.make(event.data.turnId)); - } - if (event.type === "assistant.usage") { - recordTurnUsage(record, event.data); - } - if (event.type === "session.error") { - record.lastError = event.data.message; - } - if (event.type === "session.model_change") { - record.model = event.data.newModel; - } - if (event.type === "session.mode_changed") { - record.interactionMode = toInteractionMode(event.data.newMode); - } - if (event.type === "tool.execution_start" && trimToUndefined(event.data.toolName)) { - record.toolTitlesByCallId.set(event.data.toolCallId, trimToUndefined(event.data.toolName)!); - } - - void writeNativeEvent(record.threadId, event); - const runtimeEvents = mapSessionEvent(record, event); - if (runtimeEvents.length > 0) { - void emitRuntimeEvents(runtimeEvents); - } - if (event.type === "session.plan_changed" && event.data.operation !== "delete") { - void Effect.runPromise(emitLatestProposedPlan(record)).catch((cause) => { - void emitRuntimeEvents([ - makeSyntheticEvent( - record.threadId, - "runtime.warning", - { - message: "Failed to read GitHub Copilot plan.", - detail: toMessage(cause, "Failed to read GitHub Copilot plan."), - }, - { turnId: currentSyntheticTurnId(record) }, - ), - ]); - }); - } - if (event.type === "tool.execution_complete") { - record.toolTitlesByCallId.delete(event.data.toolCallId); - } - if (event.type === "assistant.turn_end") { - markTurnAwaitingCompletion(record); - } - if (event.type === "abort" || event.type === "session.idle") { - clearTurnTracking(record); - } - }; + const getSessionRecord = (threadId: ThreadId) => { + const record = sessions.get(threadId); + if (!record) { + return Effect.fail( + new ProviderAdapterSessionNotFoundError({ provider: PROVIDER, threadId }), + ); + } + return Effect.succeed(record); + }; - const getSessionRecord = (threadId: ThreadId) => { - const record = sessions.get(threadId); - if (!record) { - return Effect.fail( - new ProviderAdapterSessionNotFoundError({ provider: PROVIDER, threadId }), - ); - } - return Effect.succeed(record); - }; + const stopRecord = async (record: ActiveCopilotSession) => { + record.unsubscribe(); + try { + await record.session.destroy(); + } catch { + // best effort + } + try { + await record.client.stop(); + } catch { + // best effort + } + void emitRuntimeEvents([ + makeSyntheticEvent(record.threadId, "session.exited", { + reason: "Session stopped", + exitKind: "graceful", + }), + ]); + for (const pending of record.pendingApprovalResolvers.values()) { + pending.resolve({ kind: "denied-interactively-by-user" }); + } + record.pendingApprovalResolvers.clear(); + for (const pending of record.pendingUserInputResolvers.values()) { + pending.resolve({ answer: "", wasFreeform: true }); + } + record.pendingUserInputResolvers.clear(); + sessions.delete(record.threadId); + }; - const stopRecord = async (record: ActiveCopilotSession) => { - record.unsubscribe(); - try { - await record.session.destroy(); - } catch { - // best effort - } - try { - await record.client.stop(); - } catch { - // best effort - } - void emitRuntimeEvents([ - makeSyntheticEvent(record.threadId, "session.exited", { - reason: "Session stopped", - exitKind: "graceful", - }), - ]); - for (const pending of record.pendingApprovalResolvers.values()) { - pending.resolve({ kind: "denied-interactively-by-user" }); + const startSession: CopilotAdapterShape["startSession"] = (input) => + Effect.gen(function* () { + const existing = sessions.get(input.threadId); + if (existing) { + return { + provider: PROVIDER, + status: "ready", + runtimeMode: existing.runtimeMode, + ...(existing.cwd ? { cwd: existing.cwd } : {}), + ...(existing.model ? { model: existing.model } : {}), + threadId: input.threadId, + resumeCursor: existing.session.sessionId, + createdAt: existing.createdAt, + updatedAt: existing.updatedAt, + ...(existing.lastError ? { lastError: existing.lastError } : {}), + } satisfies ProviderSession; } - record.pendingApprovalResolvers.clear(); - for (const pending of record.pendingUserInputResolvers.values()) { - pending.resolve({ answer: "", wasFreeform: true }); + if (!copilotSettings.enabled) { + return yield* new ProviderAdapterValidationError({ + provider: PROVIDER, + operation: "startSession", + issue: "GitHub Copilot provider is disabled in server settings.", + }); } - record.pendingUserInputResolvers.clear(); - sessions.delete(record.threadId); - }; - - /** Resolved CLI path from server settings; updated on each startSession call. */ - let resolvedCliPath: string | undefined; - - const startSession: CopilotAdapterShape["startSession"] = (input) => - Effect.gen(function* () { - if (input.provider !== undefined && input.provider !== PROVIDER) { - return yield* new ProviderAdapterValidationError({ - provider: PROVIDER, - operation: "startSession", - issue: `Expected provider '${PROVIDER}', received '${input.provider}'.`, - }); - } - - const copilotSettings = yield* serverSettingsService.getSettings.pipe( - Effect.map((s) => s.providers.copilot), - Effect.mapError( - (error) => - new ProviderAdapterProcessError({ - provider: PROVIDER, - threadId: input.threadId, - detail: error.message, - cause: error, - }), - ), - ); - const existing = sessions.get(input.threadId); - if (existing) { - return { - provider: PROVIDER, - status: "ready", - runtimeMode: existing.runtimeMode, - ...(existing.cwd ? { cwd: existing.cwd } : {}), - ...(existing.model ? { model: existing.model } : {}), - threadId: input.threadId, - resumeCursor: existing.session.sessionId, - createdAt: existing.createdAt, - updatedAt: existing.updatedAt, - ...(existing.lastError ? { lastError: existing.lastError } : {}), - } satisfies ProviderSession; - } - if (!copilotSettings.enabled) { - return yield* new ProviderAdapterValidationError({ - provider: PROVIDER, - operation: "startSession", - issue: "Copilot provider is disabled in server settings.", - }); - } - const settingsBinaryPath = copilotSettings.binaryPath.trim(); - const cliPath = settingsBinaryPath || resolveBundledCopilotCliPath(); - resolvedCliPath = cliPath; - const configDir = trimToUndefined(copilotSettings.configDir); - const resumeSessionId = extractResumeSessionId(input.resumeCursor); - const clientOptions: CopilotClientOptions = { - ...(cliPath ? { cliPath } : {}), - ...(input.cwd ? { cwd: input.cwd } : {}), - logLevel: "error", - }; - const { CopilotClient } = yield* Effect.promise(() => import("@github/copilot-sdk")); - const client = options?.clientFactory?.(clientOptions) ?? new CopilotClient(clientOptions); - const pendingApprovalResolvers = new Map(); - const pendingUserInputResolvers = new Map(); - const copilotOptions = - input.modelSelection?.provider === "copilot" ? input.modelSelection.options : undefined; - const model = input.modelSelection?.model; - const reasoningEffort = getCopilotReasoningEffort(copilotOptions); - let sessionRecord: ActiveCopilotSession | undefined; - const handlers = createInteractionHandlers( - input.threadId, - () => sessionRecord?.currentTurnId, - () => sessionRecord?.runtimeMode ?? input.runtimeMode, - pendingApprovalResolvers, - pendingUserInputResolvers, - ); + const settingsBinaryPath = copilotSettings.binaryPath.trim(); + const cliPath = settingsBinaryPath || resolveBundledCopilotCliPath(); + const configDir = trimToUndefined(copilotSettings.configDir); + const resumeSessionId = extractResumeSessionId(input.resumeCursor); + const clientOptions: CopilotClientOptions = { + ...(cliPath ? { cliPath } : {}), + ...(input.cwd ? { cwd: input.cwd } : {}), + logLevel: "error", + }; + const { CopilotClient } = yield* Effect.promise(() => import("@github/copilot-sdk")); + const client = options?.clientFactory?.(clientOptions) ?? new CopilotClient(clientOptions); + const pendingApprovalResolvers = new Map(); + const pendingUserInputResolvers = new Map(); + const model = input.modelSelection?.model; + const reasoningEffort = getCopilotReasoningEffort(input.modelSelection?.options); + let sessionRecord: ActiveCopilotSession | undefined; + const handlers = createInteractionHandlers( + input.threadId, + () => sessionRecord?.currentTurnId, + () => sessionRecord?.runtimeMode ?? input.runtimeMode, + pendingApprovalResolvers, + pendingUserInputResolvers, + ); - yield* validateSessionConfiguration({ - client, - threadId: input.threadId, - model, - reasoningEffort, - }); + yield* validateSessionConfiguration({ + client, + threadId: input.threadId, + model, + reasoningEffort, + }); - const session = yield* Effect.tryPromise({ - try: async () => { - if (resumeSessionId) { - return withSanitizedCopilotDesktopEnv(() => - client.resumeSession(resumeSessionId, { - ...handlers, - ...(model ? { model } : {}), - ...(reasoningEffort ? { reasoningEffort } : {}), - ...(input.cwd ? { workingDirectory: input.cwd } : {}), - ...(configDir ? { configDir } : {}), - streaming: true, - }), - ); - } + const session = yield* Effect.tryPromise({ + try: async () => { + if (resumeSessionId) { return withSanitizedCopilotDesktopEnv(() => - client.createSession({ + client.resumeSession(resumeSessionId, { ...handlers, ...(model ? { model } : {}), ...(reasoningEffort ? { reasoningEffort } : {}), @@ -1357,356 +1412,404 @@ const makeCopilotAdapter = (options?: CopilotAdapterLiveOptions) => streaming: true, }), ); - }, - catch: (cause) => - new ProviderAdapterProcessError({ - provider: PROVIDER, - threadId: input.threadId, - detail: toMessage(cause, "Failed to start GitHub Copilot session."), - cause, - }), - }).pipe(Effect.tapError(() => Effect.promise(() => client.stop().catch(() => undefined)))); - - const record = createSessionRecord({ - threadId: input.threadId, - client, - session, - runtimeMode: input.runtimeMode, - pendingApprovalResolvers, - pendingUserInputResolvers, - cwd: input.cwd, - configDir, - model, - reasoningEffort, - }); - const unsubscribe = session.on((event: unknown) => { - handleSessionEvent(record, event); - }); - record.unsubscribe = unsubscribe; - sessionRecord = record; - sessions.set(input.threadId, record); - - yield* Queue.offerAll(runtimeEventQueue, [ - makeSyntheticEvent(input.threadId, "session.started", { - message: resumeSessionId - ? "Resumed GitHub Copilot session" - : "Started GitHub Copilot session", - resume: { sessionId: session.sessionId }, - }), - makeSyntheticEvent(input.threadId, "session.configured", { - config: { - ...(input.cwd ? { cwd: input.cwd } : {}), + } + return withSanitizedCopilotDesktopEnv(() => + client.createSession({ + ...handlers, ...(model ? { model } : {}), ...(reasoningEffort ? { reasoningEffort } : {}), + ...(input.cwd ? { workingDirectory: input.cwd } : {}), ...(configDir ? { configDir } : {}), streaming: true, - }, - }), - makeSyntheticEvent(input.threadId, "thread.started", { - providerThreadId: session.sessionId, - }), - makeSyntheticEvent(input.threadId, "session.state.changed", { - state: "ready", - reason: "session.started", + }), + ); + }, + catch: (cause) => + new ProviderAdapterProcessError({ + provider: PROVIDER, + threadId: input.threadId, + detail: toMessage(cause, "Failed to start GitHub Copilot session."), + cause, }), - ]); - - return { - provider: PROVIDER, - status: "ready", - runtimeMode: input.runtimeMode, - ...(input.cwd ? { cwd: input.cwd } : {}), - ...(model ? { model } : {}), - threadId: input.threadId, - resumeCursor: session.sessionId, - createdAt: record.createdAt, - updatedAt: record.updatedAt, - } satisfies ProviderSession; + }).pipe(Effect.tapError(() => Effect.promise(() => client.stop().catch(() => undefined)))); + + const record = createSessionRecord({ + threadId: input.threadId, + client, + session, + runtimeMode: input.runtimeMode, + pendingApprovalResolvers, + pendingUserInputResolvers, + cwd: input.cwd, + configDir, + model, + reasoningEffort, }); + const unsubscribe = session.on((event: unknown) => { + handleSessionEvent(record, event as SessionEvent); + }); + record.unsubscribe = unsubscribe; + sessionRecord = record; + sessions.set(input.threadId, record); + + yield* Queue.offerAll(runtimeEventQueue, [ + makeSyntheticEvent(input.threadId, "session.started", { + message: resumeSessionId + ? "Resumed GitHub Copilot session" + : "Started GitHub Copilot session", + resume: { sessionId: session.sessionId }, + }), + makeSyntheticEvent(input.threadId, "session.configured", { + config: { + ...(input.cwd ? { cwd: input.cwd } : {}), + ...(model ? { model } : {}), + ...(reasoningEffort ? { reasoningEffort } : {}), + ...(configDir ? { configDir } : {}), + streaming: true, + }, + }), + makeSyntheticEvent(input.threadId, "thread.started", { + providerThreadId: session.sessionId, + }), + makeSyntheticEvent(input.threadId, "session.state.changed", { + state: "ready", + reason: "session.started", + }), + ]); - const sendTurn: CopilotAdapterShape["sendTurn"] = (input) => - Effect.gen(function* () { - const record = yield* getSessionRecord(input.threadId); - const turnCopilotOptions = - input.modelSelection?.provider === "copilot" ? input.modelSelection.options : undefined; - const turnModel = input.modelSelection?.model; - const explicitReasoningEffort = getCopilotReasoningEffort(turnCopilotOptions); - const nextModel = turnModel ?? record.model; - const nextReasoningEffort = - explicitReasoningEffort !== undefined - ? explicitReasoningEffort - : turnModel && turnModel !== record.model - ? undefined - : record.reasoningEffort; - const attachments = yield* Effect.forEach(input.attachments ?? [], (attachment) => { - const attachmentPath = resolveAttachmentPath({ - attachmentsDir: serverConfig.attachmentsDir, - attachment, - }); - if (!attachmentPath) { - return Effect.fail( - new ProviderAdapterRequestError({ - provider: PROVIDER, - method: "session.send", - detail: `Invalid attachment id '${attachment.id}'.`, - }), - ); - } - return Effect.succeed({ - type: "file" as const, - path: attachmentPath, - displayName: attachment.name, - }); - }); + return { + provider: PROVIDER, + status: "ready", + runtimeMode: input.runtimeMode, + ...(input.cwd ? { cwd: input.cwd } : {}), + ...(model ? { model } : {}), + threadId: input.threadId, + resumeCursor: session.sessionId, + createdAt: record.createdAt, + updatedAt: record.updatedAt, + } satisfies ProviderSession; + }); - yield* validateSessionConfiguration({ - client: record.client, - threadId: input.threadId, - model: nextModel, - reasoningEffort: nextReasoningEffort, + const sendTurn: CopilotAdapterShape["sendTurn"] = (input) => + Effect.gen(function* () { + const record = yield* getSessionRecord(input.threadId); + const turnModel = input.modelSelection?.model; + const explicitReasoningEffort = getCopilotReasoningEffort(input.modelSelection?.options); + const nextModel = turnModel ?? record.model; + const nextReasoningEffort = + explicitReasoningEffort !== undefined + ? explicitReasoningEffort + : turnModel && turnModel !== record.model + ? undefined + : record.reasoningEffort; + const attachments = yield* Effect.forEach(input.attachments ?? [], (attachment) => { + const attachmentPath = resolveAttachmentPath({ + attachmentsDir: serverConfig.attachmentsDir, + attachment, }); - - if (nextModel !== record.model || nextReasoningEffort !== record.reasoningEffort) { - yield* reconfigureSession(record, { - model: nextModel, - reasoningEffort: nextReasoningEffort, - }); - } - - const interactionMode = input.interactionMode ?? record.interactionMode ?? "default"; - yield* syncInteractionMode(record, interactionMode); - - const turnId = TurnId.make(`copilot-turn-${randomUUID()}`); - record.pendingTurnIds.push(turnId); - record.currentTurnId = turnId; - record.currentProviderTurnId = undefined; - - yield* Effect.tryPromise({ - try: () => - record.session.send({ - prompt: input.input ?? "", - ...(attachments.length > 0 ? { attachments } : {}), - mode: "immediate", - }), - catch: (cause) => + if (!attachmentPath) { + return Effect.fail( new ProviderAdapterRequestError({ provider: PROVIDER, method: "session.send", - detail: toMessage(cause, "Failed to send GitHub Copilot turn."), - cause, - }), - }).pipe( - Effect.tapError(() => - Effect.sync(() => { - record.pendingTurnIds = record.pendingTurnIds.filter( - (candidate) => candidate !== turnId, - ); - if (record.currentTurnId === turnId) { - record.currentTurnId = undefined; - } + detail: `Invalid attachment id '${attachment.id}'.`, }), - ), - ); - - record.updatedAt = new Date().toISOString(); + ); + } + return Effect.succeed({ + type: "file" as const, + path: attachmentPath, + displayName: attachment.name, + }); + }); - return { - threadId: input.threadId, - turnId, - resumeCursor: record.session.sessionId, - } satisfies ProviderTurnStartResult; + yield* validateSessionConfiguration({ + client: record.client, + threadId: input.threadId, + model: nextModel, + reasoningEffort: nextReasoningEffort, }); - const interruptTurn: CopilotAdapterShape["interruptTurn"] = (threadId) => - Effect.gen(function* () { - const record = yield* getSessionRecord(threadId); - yield* Effect.tryPromise({ - try: () => record.session.abort(), - catch: (cause) => - new ProviderAdapterRequestError({ - provider: PROVIDER, - method: "session.abort", - detail: toMessage(cause, "Failed to interrupt GitHub Copilot turn."), - cause, - }), + if (nextModel !== record.model || nextReasoningEffort !== record.reasoningEffort) { + yield* reconfigureSession(record, { + model: nextModel, + reasoningEffort: nextReasoningEffort, }); - }); + } - const respondToRequest: CopilotAdapterShape["respondToRequest"] = ( - threadId, - requestId, - decision, - ) => - Effect.gen(function* () { - const record = yield* getSessionRecord(threadId); - const pending = record.pendingApprovalResolvers.get(requestId); - if (!pending) { - return yield* new ProviderAdapterRequestError({ + const interactionMode = input.interactionMode ?? record.interactionMode ?? "default"; + yield* syncInteractionMode(record, interactionMode); + + const turnId = TurnId.make(`copilot-turn-${randomUUID()}`); + record.pendingTurnIds.push(turnId); + record.currentTurnId = turnId; + record.currentProviderTurnId = undefined; + + yield* Effect.tryPromise({ + try: () => + record.session.send({ + prompt: input.input ?? "", + ...(attachments.length > 0 ? { attachments } : {}), + mode: "immediate", + }), + catch: (cause) => + new ProviderAdapterRequestError({ provider: PROVIDER, - method: "session.permission.respond", - detail: `Unknown pending GitHub Copilot approval request '${requestId}'.`, - }); - } - record.pendingApprovalResolvers.delete(requestId); - pending.resolve(approvalDecisionToPermissionResult(decision)); - yield* Queue.offer( - runtimeEventQueue, - makeSyntheticEvent( - threadId, - "request.resolved", - { - requestType: pending.requestType, - decision, - resolution: approvalDecisionToPermissionResult(decision), - }, - { requestId, turnId: pending.turnId }, - ), - ); - }); + method: "session.send", + detail: toMessage(cause, "Failed to send GitHub Copilot turn."), + cause, + }), + }).pipe( + Effect.tapError(() => + Effect.sync(() => { + record.pendingTurnIds = record.pendingTurnIds.filter( + (candidate) => candidate !== turnId, + ); + if (record.currentTurnId === turnId) { + record.currentTurnId = undefined; + } + }), + ), + ); - const respondToUserInput: CopilotAdapterShape["respondToUserInput"] = ( - threadId, - requestId, - answers, - ) => - Effect.gen(function* () { - const record = yield* getSessionRecord(threadId); - const pending = record.pendingUserInputResolvers.get(requestId); - if (!pending) { - return yield* new ProviderAdapterRequestError({ + record.updatedAt = new Date().toISOString(); + + return { + threadId: input.threadId, + turnId, + resumeCursor: record.session.sessionId, + } satisfies ProviderTurnStartResult; + }); + + const interruptTurn: CopilotAdapterShape["interruptTurn"] = (threadId) => + Effect.gen(function* () { + const record = yield* getSessionRecord(threadId); + yield* Effect.tryPromise({ + try: () => record.session.abort(), + catch: (cause) => + new ProviderAdapterRequestError({ provider: PROVIDER, - method: "session.userInput.respond", - detail: `Unknown pending GitHub Copilot user-input request '${requestId}'.`, - }); - } - record.pendingUserInputResolvers.delete(requestId); - pending.resolve(resolveUserInputAnswer(pending, answers)); - yield* Queue.offer( - runtimeEventQueue, - makeSyntheticEvent( - threadId, - "user-input.resolved", - { - answers, - }, - { requestId, turnId: pending.turnId }, - ), - ); + method: "session.abort", + detail: toMessage(cause, "Failed to interrupt GitHub Copilot turn."), + cause, + }), }); + }); - const stopSession: CopilotAdapterShape["stopSession"] = (threadId) => - Effect.gen(function* () { - const record = yield* getSessionRecord(threadId); - yield* Effect.tryPromise({ - try: async () => { - await stopRecord(record); - }, - catch: (cause) => - new ProviderAdapterProcessError({ - provider: PROVIDER, - threadId, - detail: toMessage(cause, "Failed to stop GitHub Copilot session."), - cause, - }), + const respondToRequest: CopilotAdapterShape["respondToRequest"] = ( + threadId, + requestId, + decision, + ) => + Effect.gen(function* () { + const record = yield* getSessionRecord(threadId); + const pending = record.pendingApprovalResolvers.get(requestId); + if (!pending) { + return yield* new ProviderAdapterRequestError({ + provider: PROVIDER, + method: "session.permission.respond", + detail: `Unknown pending GitHub Copilot approval request '${requestId}'.`, }); - }); - - const listSessions: CopilotAdapterShape["listSessions"] = () => - Effect.sync(() => - Array.from(sessions.values()).map((record) => - Object.assign( - { - provider: PROVIDER, - status: record.currentTurnId ? "running" : "ready", - runtimeMode: record.runtimeMode, - threadId: record.threadId, - resumeCursor: record.session.sessionId, - createdAt: record.createdAt, - updatedAt: record.updatedAt, - } as ProviderSession, - record.cwd ? { cwd: record.cwd } : undefined, - record.model ? { model: record.model } : undefined, - record.currentTurnId ? { activeTurnId: record.currentTurnId } : undefined, - record.lastError ? { lastError: record.lastError } : undefined, - ), + } + record.pendingApprovalResolvers.delete(requestId); + pending.resolve(approvalDecisionToPermissionResult(decision)); + yield* Queue.offer( + runtimeEventQueue, + makeSyntheticEvent( + threadId, + "request.resolved", + { + requestType: pending.requestType, + decision, + resolution: approvalDecisionToPermissionResult(decision), + }, + { requestId, turnId: pending.turnId }, ), ); + }); - const hasSession: CopilotAdapterShape["hasSession"] = (threadId) => - Effect.sync(() => sessions.has(threadId)); - - const readThread: CopilotAdapterShape["readThread"] = (threadId) => - Effect.gen(function* () { - const record = yield* getSessionRecord(threadId); - return yield* Effect.tryPromise({ - try: async () => { - const messages = await record.session.getMessages(); - return mapHistoryToTurns(threadId, messages); - }, - catch: (cause) => - new ProviderAdapterRequestError({ - provider: PROVIDER, - method: "session.getMessages", - detail: toMessage(cause, "Failed to read GitHub Copilot thread history."), - cause, - }), - }); - }); - - const rollbackThread: CopilotAdapterShape["rollbackThread"] = (_threadId) => - Effect.fail( - new ProviderAdapterRequestError({ + const respondToUserInput: CopilotAdapterShape["respondToUserInput"] = ( + threadId, + requestId, + answers, + ) => + Effect.gen(function* () { + const record = yield* getSessionRecord(threadId); + const pending = record.pendingUserInputResolvers.get(requestId); + if (!pending) { + return yield* new ProviderAdapterRequestError({ provider: PROVIDER, - method: "thread.rollback", - detail: - "GitHub Copilot SDK does not expose a supported conversation rollback API for existing sessions.", - }), + method: "session.userInput.respond", + detail: `Unknown pending GitHub Copilot user-input request '${requestId}'.`, + }); + } + record.pendingUserInputResolvers.delete(requestId); + pending.resolve(resolveUserInputAnswer(pending, answers)); + yield* Queue.offer( + runtimeEventQueue, + makeSyntheticEvent( + threadId, + "user-input.resolved", + { + answers, + }, + { requestId, turnId: pending.turnId }, + ), ); + }); - const stopAll: CopilotAdapterShape["stopAll"] = () => - Effect.tryPromise({ + const stopSession: CopilotAdapterShape["stopSession"] = (threadId) => + Effect.gen(function* () { + const record = yield* getSessionRecord(threadId); + yield* Effect.tryPromise({ try: async () => { - await Promise.all(Array.from(sessions.values()).map((record) => stopRecord(record))); + await stopRecord(record); }, catch: (cause) => new ProviderAdapterProcessError({ provider: PROVIDER, - threadId: ThreadId.make("_all"), - detail: toMessage(cause, "Failed to stop GitHub Copilot sessions."), + threadId, + detail: toMessage(cause, "Failed to stop GitHub Copilot session."), cause, }), }); + }); - yield* Effect.addFinalizer(() => - Effect.forEach( - sessions, - ([, record]) => Effect.promise(() => stopRecord(record).catch(() => undefined)), - { discard: true }, - ).pipe(Effect.tap(() => Queue.shutdown(runtimeEventQueue))), + const listSessions: CopilotAdapterShape["listSessions"] = () => + Effect.sync(() => + Array.from(sessions.values()).map((record) => + Object.assign( + { + provider: PROVIDER, + status: record.currentTurnId ? "running" : "ready", + runtimeMode: record.runtimeMode, + threadId: record.threadId, + resumeCursor: record.session.sessionId, + createdAt: record.createdAt, + updatedAt: record.updatedAt, + } as ProviderSession, + record.cwd ? { cwd: record.cwd } : undefined, + record.model ? { model: record.model } : undefined, + record.currentTurnId ? { activeTurnId: record.currentTurnId } : undefined, + record.lastError ? { lastError: record.lastError } : undefined, + ), + ), ); - return { - provider: PROVIDER, - capabilities: getProviderCapabilities(PROVIDER), - startSession, - sendTurn, - interruptTurn, - respondToRequest, - respondToUserInput, - stopSession, - listSessions, - hasSession, - readThread, - rollbackThread, - stopAll, - streamEvents: Stream.fromQueue(runtimeEventQueue), - } satisfies CopilotAdapterShape; - }); - -export const CopilotAdapterLive = Layer.effect(CopilotAdapter, makeCopilotAdapter()); + const hasSession: CopilotAdapterShape["hasSession"] = (threadId) => + Effect.sync(() => sessions.has(threadId)); + + const readThread: CopilotAdapterShape["readThread"] = (threadId) => + Effect.gen(function* () { + const record = yield* getSessionRecord(threadId); + return yield* Effect.tryPromise({ + try: async () => { + const messages = await record.session.getMessages(); + return mapHistoryToTurns(threadId, messages); + }, + catch: (cause) => + new ProviderAdapterRequestError({ + provider: PROVIDER, + method: "session.getMessages", + detail: toMessage(cause, "Failed to read GitHub Copilot thread history."), + cause, + }), + }); + }); + + const rollbackThread: CopilotAdapterShape["rollbackThread"] = (_threadId) => + Effect.fail( + new ProviderAdapterRequestError({ + provider: PROVIDER, + method: "thread.rollback", + detail: + "GitHub Copilot SDK does not expose a supported conversation rollback API for existing sessions.", + }), + ); + + const stopAll: CopilotAdapterShape["stopAll"] = () => + Effect.tryPromise({ + try: async () => { + await Promise.all(Array.from(sessions.values()).map((record) => stopRecord(record))); + }, + catch: (cause) => + new ProviderAdapterProcessError({ + provider: PROVIDER, + threadId: ThreadId.make("_all"), + detail: toMessage(cause, "Failed to stop GitHub Copilot sessions."), + cause, + }), + }); + yield* Effect.addFinalizer(() => + Effect.forEach( + sessions, + ([, record]) => Effect.promise(() => stopRecord(record).catch(() => undefined)), + { discard: true }, + ).pipe(Effect.tap(() => Queue.shutdown(runtimeEventQueue))), + ); + + return { + provider: PROVIDER, + capabilities: { sessionModelSwitch: "in-session" }, + startSession, + sendTurn, + interruptTurn, + respondToRequest, + respondToUserInput, + stopSession, + listSessions, + hasSession, + readThread, + rollbackThread, + stopAll, + streamEvents: Stream.fromQueue(runtimeEventQueue), + } satisfies CopilotAdapterShape; +}); + +/** + * Back-compat Layer: binds `makeCopilotAdapter` to the legacy + * `CopilotAdapter` Service tag. Kept so the conformance test, the desktop + * boot graph, and any other consumers that still resolve adapters through + * Context can keep working until they migrate to driver-bundled instances. + * + * Reads `copilotSettings` from `ServerSettingsService` so the legacy + * single-instance path continues to follow the persisted server settings. + */ export function makeCopilotAdapterLive(options?: CopilotAdapterLiveOptions) { - return Layer.effect(CopilotAdapter, makeCopilotAdapter(options)); + return Layer.effect( + CopilotAdapter, + Effect.gen(function* () { + // Lazy-import to avoid a hard dependency on ServerSettingsService inside + // the per-instance `makeCopilotAdapter` factory (drivers pass typed + // config directly). + const { ServerSettingsService } = yield* Effect.promise( + () => import("../../serverSettings.ts"), + ); + const serverSettingsService = yield* ServerSettingsService; + const settings = yield* serverSettingsService.getSettings.pipe( + Effect.map((s) => s.providers.copilot), + Effect.orElseSucceed( + () => + ({ + enabled: true, + binaryPath: "", + configDir: "", + customModels: [] as ReadonlyArray, + }) as const, + ), + ); + // The contracts schema is `GenericProviderSettings`; coerce its shape + // into our local `CopilotSettings` view (same fields). + const copilotSettings = { + enabled: settings.enabled, + binaryPath: settings.binaryPath, + configDir: settings.configDir, + customModels: settings.customModels, + } satisfies CopilotSettings; + return yield* makeCopilotAdapter(copilotSettings, options); + }), + ); } // ── Dynamic model discovery & usage (consumed by wsServer) ───────── @@ -1718,7 +1821,6 @@ export async function fetchCopilotModels(overrideCliPath?: string): Promise | null> { try { const { CopilotClient } = await import("@github/copilot-sdk"); - const { resolveBundledCopilotCliPath } = await import("./copilotCliPath.ts"); const cliPath = overrideCliPath?.trim() || resolveBundledCopilotCliPath(); const client = new CopilotClient({ ...(cliPath ? { cliPath } : {}), @@ -1756,7 +1858,6 @@ export async function fetchCopilotUsage(overrideCliPath?: string): Promise<{ }> { try { const { CopilotClient } = await import("@github/copilot-sdk"); - const { resolveBundledCopilotCliPath } = await import("./copilotCliPath.ts"); const cliPath = overrideCliPath?.trim() || resolveBundledCopilotCliPath(); const client = new CopilotClient({ ...(cliPath ? { cliPath } : {}), diff --git a/apps/server/src/provider/Layers/CopilotProvider.ts b/apps/server/src/provider/Layers/CopilotProvider.ts new file mode 100644 index 00000000000..dc72eb4010a --- /dev/null +++ b/apps/server/src/provider/Layers/CopilotProvider.ts @@ -0,0 +1,306 @@ +/** + * CopilotProvider — snapshot probe for the GitHub Copilot driver. + * + * Mirrors the shape of `ClaudeProvider` / `OpenCodeProvider`: exports a + * `checkCopilotProviderStatus` Effect that probes the resolved binary + * (version) and best-effort auth state via the Copilot SDK, plus a + * `makePendingCopilotProvider` snapshot used until the first probe lands. + * + * The probe purposefully tolerates failure — Copilot is often optional and + * a missing CLI / unauthenticated SDK should still produce a valid (but + * not-ready) snapshot rather than tear the driver down. + */ +import { + type ModelCapabilities, + ProviderDriverKind, + type ServerProviderModel, +} from "@t3tools/contracts"; +import { Data, Effect, Option, Result } from "effect"; +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; +import { createModelCapabilities } from "@t3tools/shared/model"; + +import { + buildServerProvider, + DEFAULT_TIMEOUT_MS, + detailFromResult, + isCommandMissingCause, + parseGenericCliVersion, + providerModelsFromSettings, + spawnAndCollect, + type ServerProviderDraft, +} from "../providerSnapshot.ts"; +import { + resolveBundledCopilotCliPath, + withSanitizedCopilotDesktopEnv, +} from "./copilotCliPath.ts"; +import type { CopilotSettings } from "../Drivers/CopilotSettings.ts"; + +const PROVIDER = ProviderDriverKind.make("copilot"); + +const COPILOT_PRESENTATION = { + displayName: "GitHub Copilot", + showInteractionModeToggle: true, +} as const; + +const DEFAULT_COPILOT_MODEL_CAPABILITIES: ModelCapabilities = createModelCapabilities({ + optionDescriptors: [], +}); + +/** + * Resolve the binary path the runtime would actually invoke for the given + * settings. An explicit `binaryPath` always wins; otherwise we fall back + * to the bundled CLI (Electron desktop builds ship one per platform). + */ +function resolveCopilotBinaryPath(settings: CopilotSettings): string | undefined { + const explicit = settings.binaryPath.trim(); + if (explicit.length > 0) { + return explicit; + } + return resolveBundledCopilotCliPath(); +} + +const runCopilotVersionCommand = Effect.fn("runCopilotVersionCommand")(function* ( + binaryPath: string, + environment: NodeJS.ProcessEnv, +) { + const command = ChildProcess.make(binaryPath, ["--version"], { + env: environment, + shell: process.platform === "win32", + }); + return yield* spawnAndCollect(binaryPath, command); +}); + +interface CopilotAuthProbeResult { + readonly authenticated: boolean; + readonly login?: string; + readonly detail?: string; +} + +/** + * Best-effort SDK probe: starts a transient `CopilotClient` against the + * resolved binary just long enough to enumerate models. A non-empty model + * list implies an authenticated GitHub account; failure (any kind) is + * folded back into "unknown auth" without surfacing as a driver error. + * + * Uses `withSanitizedCopilotDesktopEnv` so the Electron host environment + * (`ELECTRON_RUN_AS_NODE` etc.) doesn't leak into the spawned binary. + */ +class CopilotAuthProbeError extends Data.TaggedError("CopilotAuthProbeError")<{ + readonly message: string; + readonly cause?: unknown; +}> {} + +const probeCopilotAuth = (binaryPath: string | undefined): Effect.Effect => + Effect.tryPromise({ + try: async (): Promise => { + const { CopilotClient } = await import("@github/copilot-sdk"); + const client = new CopilotClient({ + ...(binaryPath ? { cliPath: binaryPath } : {}), + logLevel: "error", + }); + try { + await withSanitizedCopilotDesktopEnv(() => client.start()); + const models = await withSanitizedCopilotDesktopEnv(() => + client.listModels().catch(() => undefined), + ); + const authenticated = !!(models && models.length > 0); + return { authenticated }; + } finally { + await client.stop().catch(() => undefined); + } + }, + catch: (cause) => + new CopilotAuthProbeError({ + message: cause instanceof Error ? cause.message : String(cause), + cause, + }), + }).pipe( + Effect.timeoutOption("8 seconds"), + Effect.result, + Effect.map((result): CopilotAuthProbeResult => { + if (Result.isFailure(result)) { + return { + authenticated: false, + detail: result.failure.message, + }; + } + return Option.isSome(result.success) + ? result.success.value + : { authenticated: false, detail: "Copilot SDK probe timed out." }; + }), + ); + +export const makePendingCopilotProvider = (settings: CopilotSettings): ServerProviderDraft => { + const checkedAt = new Date().toISOString(); + const models = providerModelsFromSettings( + [], + PROVIDER, + settings.customModels, + DEFAULT_COPILOT_MODEL_CAPABILITIES, + ); + + if (!settings.enabled) { + return buildServerProvider({ + presentation: COPILOT_PRESENTATION, + enabled: false, + checkedAt, + models, + probe: { + installed: false, + version: null, + status: "warning", + auth: { status: "unknown" }, + message: "GitHub Copilot is disabled in T3 Code settings.", + }, + }); + } + + return buildServerProvider({ + presentation: COPILOT_PRESENTATION, + enabled: true, + checkedAt, + models, + probe: { + installed: false, + version: null, + status: "warning", + auth: { status: "unknown" }, + message: "GitHub Copilot status has not been checked in this session yet.", + }, + }); +}; + +export const checkCopilotProviderStatus = Effect.fn("checkCopilotProviderStatus")(function* ( + settings: CopilotSettings, + environment: NodeJS.ProcessEnv = process.env, +): Effect.fn.Return { + const checkedAt = new Date().toISOString(); + const models = providerModelsFromSettings( + [], + PROVIDER, + settings.customModels, + DEFAULT_COPILOT_MODEL_CAPABILITIES, + ); + + if (!settings.enabled) { + return buildServerProvider({ + presentation: COPILOT_PRESENTATION, + enabled: false, + checkedAt, + models, + probe: { + installed: false, + version: null, + status: "warning", + auth: { status: "unknown" }, + message: "GitHub Copilot is disabled in T3 Code settings.", + }, + }); + } + + const binaryPath = resolveCopilotBinaryPath(settings); + if (!binaryPath) { + return buildServerProvider({ + presentation: COPILOT_PRESENTATION, + enabled: true, + checkedAt, + models, + probe: { + installed: false, + version: null, + status: "error", + auth: { status: "unknown" }, + message: + "GitHub Copilot CLI is not installed and no binary path is configured. " + + "Install the GitHub Copilot CLI or set a binary path in settings.", + }, + }); + } + + const versionProbe = yield* runCopilotVersionCommand(binaryPath, environment).pipe( + Effect.timeoutOption(DEFAULT_TIMEOUT_MS), + Effect.result, + ); + + if (Result.isFailure(versionProbe)) { + const error = versionProbe.failure; + return buildServerProvider({ + presentation: COPILOT_PRESENTATION, + enabled: true, + checkedAt, + models, + probe: { + installed: !isCommandMissingCause(error), + version: null, + status: "error", + auth: { status: "unknown" }, + message: isCommandMissingCause(error) + ? "GitHub Copilot CLI (`copilot`) is not installed or not on PATH." + : `Failed to execute GitHub Copilot CLI health check: ${error instanceof Error ? error.message : String(error)}.`, + }, + }); + } + + if (Option.isNone(versionProbe.success)) { + return buildServerProvider({ + presentation: COPILOT_PRESENTATION, + enabled: true, + checkedAt, + models, + probe: { + installed: true, + version: null, + status: "error", + auth: { status: "unknown" }, + message: "GitHub Copilot CLI is installed but timed out while reporting its version.", + }, + }); + } + + const versionResult = versionProbe.success.value; + const parsedVersion = parseGenericCliVersion(`${versionResult.stdout}\n${versionResult.stderr}`); + if (versionResult.code !== 0) { + const detail = detailFromResult(versionResult); + return buildServerProvider({ + presentation: COPILOT_PRESENTATION, + enabled: true, + checkedAt, + models, + probe: { + installed: true, + version: parsedVersion, + status: "error", + auth: { status: "unknown" }, + message: detail + ? `GitHub Copilot CLI is installed but failed to run. ${detail}` + : "GitHub Copilot CLI is installed but failed to run.", + }, + }); + } + + const auth = yield* probeCopilotAuth(binaryPath); + + return buildServerProvider({ + presentation: COPILOT_PRESENTATION, + enabled: true, + checkedAt, + models, + probe: { + installed: true, + version: parsedVersion, + status: auth.authenticated ? "ready" : "warning", + auth: auth.authenticated + ? { status: "authenticated", type: "github" } + : { status: "unknown" }, + ...(auth.authenticated + ? {} + : { + message: + auth.detail ?? + "GitHub Copilot CLI is installed but no signed-in account was detected. Run `copilot auth login`.", + }), + }, + }); +}); + +export type { ServerProviderModel }; diff --git a/apps/server/src/provider/Layers/CursorAdapter.test.ts b/apps/server/src/provider/Layers/CursorAdapter.test.ts index e6bbd7569a4..375eec049a2 100644 --- a/apps/server/src/provider/Layers/CursorAdapter.test.ts +++ b/apps/server/src/provider/Layers/CursorAdapter.test.ts @@ -5,14 +5,27 @@ import { fileURLToPath } from "node:url"; import * as NodeServices from "@effect/platform-node/NodeServices"; import { assert, it } from "@effect/vitest"; -import { Deferred, Effect, Fiber, Layer, Stream } from "effect"; - -import { ApprovalRequestId, type ProviderRuntimeEvent, ThreadId } from "@t3tools/contracts"; +import { Context, Deferred, Effect, Fiber, Layer, Schema, Stream } from "effect"; +import { createModelSelection } from "@t3tools/shared/model"; + +import { + ApprovalRequestId, + CursorSettings, + ProviderDriverKind, + type ProviderRuntimeEvent, + ThreadId, + ProviderInstanceId, +} from "@t3tools/contracts"; import { ServerConfig } from "../../config.ts"; import { ServerSettingsService } from "../../serverSettings.ts"; -import { CursorAdapter } from "../Services/CursorAdapter.ts"; -import { makeCursorAdapterLive } from "./CursorAdapter.ts"; +import type { CursorAdapterShape } from "../Services/CursorAdapter.ts"; +import { makeCursorAdapter } from "./CursorAdapter.ts"; + +// Test-local service tag so the rest of the file can keep using `yield* CursorAdapter`. +class CursorAdapter extends Context.Service()( + "test/CursorAdapter", +) {} const __dirname = path.dirname(fileURLToPath(import.meta.url)); const mockAgentPath = path.join(__dirname, "../../../scripts/acp-mock-agent.ts"); @@ -90,8 +103,31 @@ async function waitForFileContent(filePath: string, attempts = 40) { throw new Error(`Timed out waiting for file content at ${filePath}`); } +// Tests mutate `ServerSettingsService` mid-flight (e.g. setting +// `providers.cursor.binaryPath` to a mock ACP wrapper). The adapter +// captures `cursorSettings` once at construction, so without a resolver +// the mutation is invisible — sessions would spawn the constructor's +// (empty) binary path. Wiring `resolveSettings` through +// `ServerSettingsService.getSettings` makes each session read the latest +// snapshot, matching the old "always read live" behavior that these +// tests assumed. +const makeResolveCursorSettings = Effect.gen(function* () { + const serverSettings = yield* ServerSettingsService; + return serverSettings.getSettings.pipe( + Effect.map((snapshot) => snapshot.providers.cursor), + Effect.orDie, + ); +}); + const cursorAdapterTestLayer = it.layer( - makeCursorAdapterLive().pipe( + Layer.effect( + CursorAdapter, + Effect.gen(function* () { + const cursorConfig = Schema.decodeSync(CursorSettings)({}); + const resolveSettings = yield* makeResolveCursorSettings; + return yield* makeCursorAdapter(cursorConfig, { resolveSettings }); + }), + ).pipe( Layer.provideMerge(ServerSettingsService.layerTest()), Layer.provideMerge( ServerConfig.layerTest(process.cwd(), { @@ -119,10 +155,10 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { const session = yield* adapter.startSession({ threadId, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), cwd: process.cwd(), runtimeMode: "full-access", - modelSelection: { provider: "cursor", model: "default" }, + modelSelection: { instanceId: ProviderInstanceId.make("cursor"), model: "default" }, }); assert.equal(session.provider, "cursor"); @@ -204,10 +240,10 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { yield* adapter.startSession({ threadId, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), cwd: process.cwd(), runtimeMode: "full-access", - modelSelection: { provider: "cursor", model: "default" }, + modelSelection: { instanceId: ProviderInstanceId.make("cursor"), model: "default" }, }); yield* adapter.stopSession(threadId); @@ -243,17 +279,17 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { [ adapter.startSession({ threadId, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), cwd: process.cwd(), runtimeMode: "full-access", - modelSelection: { provider: "cursor", model: "default" }, + modelSelection: { instanceId: ProviderInstanceId.make("cursor"), model: "default" }, }), adapter.startSession({ threadId, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), cwd: process.cwd(), runtimeMode: "full-access", - modelSelection: { provider: "cursor", model: "default" }, + modelSelection: { instanceId: ProviderInstanceId.make("cursor"), model: "default" }, }), ], { concurrency: "unbounded" }, @@ -275,7 +311,7 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { const result = yield* adapter .startSession({ threadId: ThreadId.make("bad-provider"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), cwd: process.cwd(), runtimeMode: "full-access", }) @@ -301,10 +337,10 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { yield* adapter.startSession({ threadId, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), cwd: process.cwd(), runtimeMode: "full-access", - modelSelection: { provider: "cursor", model: "composer-2" }, + modelSelection: { instanceId: ProviderInstanceId.make("cursor"), model: "composer-2" }, }); yield* adapter.sendTurn({ @@ -357,19 +393,15 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { providers: { cursor: { binaryPath: wrapperPath } }, }); - const modelSelection = { - provider: "cursor" as const, - model: "gpt-5.4", - options: { - reasoning: "xhigh" as const, - contextWindow: "1m", - fastMode: true, - }, - }; + const modelSelection = createModelSelection(ProviderInstanceId.make("cursor"), "gpt-5.4", [ + { id: "reasoning", value: "xhigh" }, + { id: "contextWindow", value: "1m" }, + { id: "fastMode", value: true }, + ]); yield* adapter.startSession({ threadId, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), cwd: process.cwd(), runtimeMode: "full-access", modelSelection, @@ -463,10 +495,10 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { const program = Effect.gen(function* () { yield* adapter.startSession({ threadId, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), cwd: process.cwd(), runtimeMode: "approval-required", - modelSelection: { provider: "cursor", model: "default" }, + modelSelection: { instanceId: ProviderInstanceId.make("cursor"), model: "default" }, }); const turn = yield* adapter.sendTurn({ @@ -563,7 +595,14 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { ); }).pipe( Effect.provide( - makeCursorAdapterLive().pipe( + Layer.effect( + CursorAdapter, + Effect.gen(function* () { + const cursorConfig = Schema.decodeSync(CursorSettings)({}); + const resolveSettings = yield* makeResolveCursorSettings; + return yield* makeCursorAdapter(cursorConfig, { resolveSettings }); + }), + ).pipe( Layer.provideMerge(ServerSettingsService.layerTest()), Layer.provideMerge( ServerConfig.layerTest(process.cwd(), { @@ -618,10 +657,10 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { yield* adapter.startSession({ threadId, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), cwd: process.cwd(), runtimeMode: "full-access", - modelSelection: { provider: "cursor", model: "default" }, + modelSelection: { instanceId: ProviderInstanceId.make("cursor"), model: "default" }, }); const turn = yield* adapter.sendTurn({ @@ -717,10 +756,10 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { yield* adapter.startSession({ threadId, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), cwd: process.cwd(), runtimeMode: "full-access", - modelSelection: { provider: "cursor", model: "default" }, + modelSelection: { instanceId: ProviderInstanceId.make("cursor"), model: "default" }, }); const turn = yield* adapter.sendTurn({ @@ -839,10 +878,10 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { yield* adapter.startSession({ threadId, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), cwd: process.cwd(), runtimeMode: "approval-required", - modelSelection: { provider: "cursor", model: "default" }, + modelSelection: { instanceId: ProviderInstanceId.make("cursor"), model: "default" }, }); const sendTurnFiber = yield* adapter @@ -909,10 +948,10 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { yield* adapter.startSession({ threadId, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), cwd: process.cwd(), runtimeMode: "approval-required", - modelSelection: { provider: "cursor", model: "default" }, + modelSelection: { instanceId: ProviderInstanceId.make("cursor"), model: "default" }, }); const sendTurnFiber = yield* adapter @@ -952,10 +991,10 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { yield* adapter.startSession({ threadId, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), cwd: process.cwd(), runtimeMode: "full-access", - modelSelection: { provider: "cursor", model: "default" }, + modelSelection: { instanceId: ProviderInstanceId.make("cursor"), model: "default" }, }); const sendTurnFiber = yield* adapter @@ -995,10 +1034,10 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { yield* adapter.startSession({ threadId, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), cwd: process.cwd(), runtimeMode: "full-access", - modelSelection: { provider: "cursor", model: "default" }, + modelSelection: { instanceId: ProviderInstanceId.make("cursor"), model: "default" }, }); const sendTurnFiber = yield* adapter @@ -1038,10 +1077,10 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { yield* adapter.startSession({ threadId, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), cwd: process.cwd(), runtimeMode: "full-access", - modelSelection: { provider: "cursor", model: "default" }, + modelSelection: { instanceId: ProviderInstanceId.make("cursor"), model: "default" }, }); const firstEvents = Array.from(yield* Fiber.join(firstConsumer)); @@ -1076,10 +1115,10 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { yield* adapter.startSession({ threadId, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), cwd: process.cwd(), runtimeMode: "full-access", - modelSelection: { provider: "cursor", model: "composer-2" }, + modelSelection: { instanceId: ProviderInstanceId.make("cursor"), model: "composer-2" }, }); yield* adapter.sendTurn({ @@ -1092,7 +1131,9 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { threadId, input: "second turn after switching model", attachments: [], - modelSelection: { provider: "cursor", model: "composer-2", options: { fastMode: true } }, + modelSelection: createModelSelection(ProviderInstanceId.make("cursor"), "composer-2", [ + { id: "fastMode", value: true }, + ]), }); const argvRuns = yield* Effect.promise(() => readArgvLog(argvLogPath)); @@ -1137,24 +1178,28 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { yield* adapter.startSession({ threadId, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), cwd: process.cwd(), runtimeMode: "full-access", - modelSelection: { provider: "cursor", model: "composer-2" }, + modelSelection: { instanceId: ProviderInstanceId.make("cursor"), model: "composer-2" }, }); yield* adapter.sendTurn({ threadId, input: "first turn with fast mode", attachments: [], - modelSelection: { provider: "cursor", model: "composer-2", options: { fastMode: true } }, + modelSelection: createModelSelection(ProviderInstanceId.make("cursor"), "composer-2", [ + { id: "fastMode", value: true }, + ]), }); yield* adapter.sendTurn({ threadId, input: "second turn without fast mode", attachments: [], - modelSelection: { provider: "cursor", model: "composer-2", options: { fastMode: false } }, + modelSelection: createModelSelection(ProviderInstanceId.make("cursor"), "composer-2", [ + { id: "fastMode", value: false }, + ]), }); const requests = yield* Effect.promise(() => readJsonLines(requestLogPath)); @@ -1171,4 +1216,91 @@ cursorAdapterTestLayer("CursorAdapterLive", (it) => { yield* adapter.stopSession(threadId); }), ); + + it.effect( + "applies fast mode on the first turn when modelSelection uses a non-default instance id", + () => { + const customInstanceId = ProviderInstanceId.make("cursor_secondary"); + // Custom-instance cases can't share the suite-level `CursorAdapter` + // layer because that one binds `instanceId: "cursor"`. We build a + // fresh layer graph — including a fresh `ServerSettingsService` — so + // mid-test `updateSettings` calls target the same service instance the + // adapter's `resolveSettings` reads from, and so the outer + // `yield* ServerSettingsService` sees the same snapshot as well. + const customAdapterLayer = Layer.effect( + CursorAdapter, + Effect.gen(function* () { + const cursorConfig = Schema.decodeSync(CursorSettings)({}); + const resolveSettings = yield* makeResolveCursorSettings; + return yield* makeCursorAdapter(cursorConfig, { + instanceId: customInstanceId, + resolveSettings, + }); + }), + ).pipe( + Layer.provideMerge(ServerSettingsService.layerTest()), + Layer.provideMerge( + ServerConfig.layerTest(process.cwd(), { + prefix: "t3code-cursor-adapter-custom-instance-", + }), + ), + Layer.provideMerge(NodeServices.layer), + ); + + return Effect.gen(function* () { + const adapter = yield* CursorAdapter; + const serverSettings = yield* ServerSettingsService; + const threadId = ThreadId.make("cursor-fast-mode-custom-instance"); + const tempDir = yield* Effect.promise(() => mkdtemp(path.join(os.tmpdir(), "cursor-acp-"))); + const requestLogPath = path.join(tempDir, "requests.ndjson"); + const argvLogPath = path.join(tempDir, "argv.txt"); + yield* Effect.promise(() => writeFile(requestLogPath, "", "utf8")); + const wrapperPath = yield* Effect.promise(() => + makeProbeWrapper(requestLogPath, argvLogPath), + ); + yield* serverSettings.updateSettings({ + providers: { cursor: { binaryPath: wrapperPath } }, + }); + + yield* adapter.startSession({ + threadId, + provider: ProviderDriverKind.make("cursor"), + cwd: process.cwd(), + runtimeMode: "full-access", + modelSelection: { + instanceId: customInstanceId, + model: "composer-2", + }, + }); + + yield* adapter.sendTurn({ + threadId, + input: "first turn with fast mode", + attachments: [], + modelSelection: { + ...createModelSelection(ProviderInstanceId.make("cursor"), "composer-2", [ + { id: "fastMode", value: true }, + ]), + instanceId: customInstanceId, + }, + }); + + const requests = yield* Effect.promise(() => readJsonLines(requestLogPath)); + const fastConfigRequests = requests.filter( + (entry) => + entry.method === "session/set_config_option" && + (entry.params as Record | undefined)?.configId === "fast", + ); + assert.isAbove( + fastConfigRequests.length, + 0, + "fast mode should apply when instance id matches the adapter binding", + ); + const lastFastConfig = fastConfigRequests[fastConfigRequests.length - 1]; + assert.equal((lastFastConfig?.params as Record)?.value, "true"); + + yield* adapter.stopSession(threadId); + }).pipe(Effect.provide(customAdapterLayer)); + }, + ); }); diff --git a/apps/server/src/provider/Layers/CursorAdapter.ts b/apps/server/src/provider/Layers/CursorAdapter.ts index 03e12a174a1..34d1221b022 100644 --- a/apps/server/src/provider/Layers/CursorAdapter.ts +++ b/apps/server/src/provider/Layers/CursorAdapter.ts @@ -7,13 +7,16 @@ import * as nodePath from "node:path"; import { ApprovalRequestId, - type CursorModelOptions, + type CursorSettings, + type ProviderOptionSelection, EventId, type ProviderApprovalDecision, type ProviderInteractionMode, type ProviderRuntimeEvent, type ProviderSession, type ProviderUserInputAnswers, + ProviderDriverKind, + ProviderInstanceId, RuntimeRequestId, type RuntimeMode, type ThreadId, @@ -26,7 +29,6 @@ import { Exit, Fiber, FileSystem, - Layer, Option, PubSub, Random, @@ -40,7 +42,6 @@ import type * as EffectAcpSchema from "effect-acp/schema"; import { resolveAttachmentPath } from "../../attachmentStore.ts"; import { ServerConfig } from "../../config.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; import { ProviderAdapterProcessError, ProviderAdapterRequestError, @@ -72,20 +73,37 @@ import { extractPlanMarkdown, extractTodosAsPlan, } from "../acp/CursorAcpExtension.ts"; -import { CursorAdapter, type CursorAdapterShape } from "../Services/CursorAdapter.ts"; -import { getProviderCapabilities } from "../Services/ProviderAdapter.ts"; +import { type CursorAdapterShape } from "../Services/CursorAdapter.ts"; import { resolveCursorAcpBaseModelId } from "./CursorProvider.ts"; import { type EventNdjsonLogger, makeEventNdjsonLogger } from "./EventNdjsonLogger.ts"; -const PROVIDER = "cursor" as const; +const PROVIDER = ProviderDriverKind.make("cursor"); const CURSOR_RESUME_VERSION = 1 as const; const ACP_PLAN_MODE_ALIASES = ["plan", "architect"]; const ACP_IMPLEMENT_MODE_ALIASES = ["code", "agent", "default", "chat", "implement"]; const ACP_APPROVAL_MODE_ALIASES = ["ask"]; export interface CursorAdapterLiveOptions { + readonly environment?: NodeJS.ProcessEnv; readonly nativeEventLogPath?: string; readonly nativeEventLogger?: EventNdjsonLogger; + /** + * Selections are honored when `modelSelection.instanceId` matches this value. + * Defaults to the legacy built-in instance id (`cursor`). + */ + readonly instanceId?: typeof ProviderInstanceId.Type; + /** + * Optional per-session settings resolver. When provided the adapter yields + * this effect at the start of every session and uses the result instead of + * the `cursorSettings` captured at construction. + * + * Production instances bind settings to the instance scope (the hydration + * layer rebuilds the adapter on config change) and leave this undefined. + * Test suites that mutate `ServerSettingsService` mid-flight — e.g. to + * swap `binaryPath` to a mock ACP wrapper — pass a resolver that reads + * the latest snapshot so the closure isn't stale. + */ + readonly resolveSettings?: Effect.Effect; } interface PendingApproval { @@ -223,7 +241,7 @@ function applyRequestedSessionConfiguration(input: { readonly modelSelection: | { readonly model: string; - readonly options?: CursorModelOptions | null | undefined; + readonly options?: ReadonlyArray | null | undefined; } | undefined; readonly mapError: (context: { @@ -236,7 +254,7 @@ function applyRequestedSessionConfiguration(input: { yield* applyCursorAcpModelSelection({ runtime: input.runtime, model: input.modelSelection.model, - modelOptions: input.modelSelection.options, + selections: input.modelSelection.options, mapError: ({ cause }) => input.mapError({ cause, @@ -281,12 +299,15 @@ function selectAutoApprovedPermissionOption( return undefined; } -function makeCursorAdapter(options?: CursorAdapterLiveOptions) { +export function makeCursorAdapter( + cursorSettings: CursorSettings, + options?: CursorAdapterLiveOptions, +) { return Effect.gen(function* () { + const boundInstanceId = options?.instanceId ?? ProviderInstanceId.make("cursor"); const fileSystem = yield* FileSystem.FileSystem; const childProcessSpawner = yield* ChildProcessSpawner.ChildProcessSpawner; const serverConfig = yield* Effect.service(ServerConfig); - const serverSettingsService = yield* ServerSettingsService; const nativeEventLogger = options?.nativeEventLogger ?? (options?.nativeEventLogPath !== undefined @@ -441,25 +462,12 @@ function makeCursorAdapter(options?: CursorAdapterLiveOptions) { const cwd = nodePath.resolve(input.cwd.trim()); const cursorModelSelection = - input.modelSelection?.provider === "cursor" ? input.modelSelection : undefined; + input.modelSelection?.instanceId === boundInstanceId ? input.modelSelection : undefined; const existing = sessions.get(input.threadId); if (existing && !existing.stopped) { yield* stopSessionInternal(existing); } - const cursorSettings = yield* serverSettingsService.getSettings.pipe( - Effect.map((settings) => settings.providers.cursor), - Effect.mapError( - (error) => - new ProviderAdapterProcessError({ - provider: PROVIDER, - threadId: input.threadId, - detail: error.message, - cause: error, - }), - ), - ); - const pendingApprovals = new Map(); const pendingUserInputs = new Map(); const sessionScope = yield* Scope.make("sequential"); @@ -476,8 +484,21 @@ function makeCursorAdapter(options?: CursorAdapterLiveOptions) { threadId: input.threadId, }); + // Resolve the CursorSettings used to spawn the ACP child. Production + // leaves `options.resolveSettings` undefined so we use the value + // captured at adapter construction — per-instance isolation is + // enforced by the hydration layer rebuilding this adapter whenever + // its config changes. Tests set `resolveSettings` to pull the latest + // snapshot from `ServerSettingsService` so that mid-suite + // `updateSettings({ providers: { cursor: { binaryPath } } })` calls + // actually take effect when the next session spawns. + const effectiveCursorSettings = options?.resolveSettings + ? yield* options.resolveSettings + : cursorSettings; + const acp = yield* makeCursorAcpRuntime({ - cursorSettings, + cursorSettings: effectiveCursorSettings, + ...(options?.environment ? { environment: options.environment } : {}), childProcessSpawner, cwd, ...(resumeSessionId ? { resumeSessionId } : {}), @@ -667,6 +688,7 @@ function makeCursorAdapter(options?: CursorAdapterLiveOptions) { const now = yield* nowIso; const session: ProviderSession = { provider: PROVIDER, + providerInstanceId: boundInstanceId, status: "ready", runtimeMode: input.runtimeMode, cwd, @@ -816,7 +838,7 @@ function makeCursorAdapter(options?: CursorAdapterLiveOptions) { const ctx = yield* requireSession(input.threadId); const turnId = TurnId.make(crypto.randomUUID()); const turnModelSelection = - input.modelSelection?.provider === "cursor" ? input.modelSelection : undefined; + input.modelSelection?.instanceId === boundInstanceId ? input.modelSelection : undefined; const model = turnModelSelection?.model ?? ctx.session.model; const resolvedModel = resolveCursorAcpBaseModelId(model); yield* applyRequestedSessionConfiguration({ @@ -1034,7 +1056,7 @@ function makeCursorAdapter(options?: CursorAdapterLiveOptions) { return { provider: PROVIDER, - capabilities: getProviderCapabilities(PROVIDER), + capabilities: { sessionModelSwitch: "in-session" }, startSession, sendTurn, interruptTurn, @@ -1050,9 +1072,3 @@ function makeCursorAdapter(options?: CursorAdapterLiveOptions) { } satisfies CursorAdapterShape; }); } - -export const CursorAdapterLive = Layer.effect(CursorAdapter, makeCursorAdapter()); - -export function makeCursorAdapterLive(opts?: CursorAdapterLiveOptions) { - return Layer.effect(CursorAdapter, makeCursorAdapter(opts)); -} diff --git a/apps/server/src/provider/Layers/CursorProvider.test.ts b/apps/server/src/provider/Layers/CursorProvider.test.ts index 6cbfb1078b9..33ef20acf21 100644 --- a/apps/server/src/provider/Layers/CursorProvider.test.ts +++ b/apps/server/src/provider/Layers/CursorProvider.test.ts @@ -1,18 +1,17 @@ -import * as path from "node:path"; -import * as os from "node:os"; -import { chmod, mkdtemp, readFile, writeFile } from "node:fs/promises"; -import { fileURLToPath } from "node:url"; +import * as NodeOS from "node:os"; import * as NodeServices from "@effect/platform-node/NodeServices"; -import { Effect } from "effect"; +import { Effect, FileSystem, Path } from "effect"; import { describe, expect, it } from "vitest"; import type * as EffectAcpSchema from "effect-acp/schema"; import type { CursorSettings, ServerProviderModel } from "@t3tools/contracts"; +import { createModelCapabilities } from "@t3tools/shared/model"; import { buildCursorProviderSnapshot, buildCursorCapabilitiesFromConfigOptions, buildCursorDiscoveredModelsFromConfigOptions, + checkCursorProviderStatus, discoverCursorModelCapabilitiesViaAcp, discoverCursorModelsViaAcp, getCursorFallbackModels, @@ -24,11 +23,50 @@ import { resolveCursorAcpConfigUpdates, } from "./CursorProvider.ts"; -const __dirname = path.dirname(fileURLToPath(import.meta.url)); -const mockAgentPath = path.join(__dirname, "../../../scripts/acp-mock-agent.ts"); +const runNode = ( + effect: Effect.Effect, +): Promise
=> Effect.runPromise(effect.pipe(Effect.provide(NodeServices.layer))); -async function makeMockAgentWrapper(extraEnv?: Record) { - const dir = await mkdtemp(path.join(os.tmpdir(), "cursor-provider-mock-")); +const resolveMockAgentPath = Effect.fn("resolveMockAgentPath")(function* () { + const path = yield* Path.Path; + return yield* path.fromFileUrl(new URL("../../../scripts/acp-mock-agent.ts", import.meta.url)); +}); + +function selectDescriptor( + id: string, + label: string, + options: ReadonlyArray<{ id: string; label: string; isDefault?: boolean }>, +) { + return { + id, + label, + type: "select" as const, + options: [...options], + ...(options.find((option) => option.isDefault)?.id + ? { currentValue: options.find((option) => option.isDefault)?.id } + : {}), + }; +} + +function booleanDescriptor(id: string, label: string, currentValue?: boolean) { + return { + id, + label, + type: "boolean" as const, + ...(typeof currentValue === "boolean" ? { currentValue } : {}), + }; +} + +const makeMockAgentWrapper = Effect.fn("makeMockAgentWrapper")(function* ( + extraEnv?: Record, +) { + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const mockAgentPath = yield* resolveMockAgentPath(); + const dir = yield* fileSystem.makeTempDirectory({ + directory: NodeOS.tmpdir(), + prefix: "cursor-provider-mock-", + }); const wrapperPath = path.join(dir, "fake-agent.sh"); const envExports = Object.entries(extraEnv ?? {}) .map(([key, value]) => `export ${key}=${JSON.stringify(value)}`) @@ -37,23 +75,80 @@ async function makeMockAgentWrapper(extraEnv?: Record) { ${envExports} exec ${JSON.stringify("bun")} ${JSON.stringify(mockAgentPath)} "$@" `; - await writeFile(wrapperPath, script, "utf8"); - await chmod(wrapperPath, 0o755); + yield* fileSystem.writeFileString(wrapperPath, script); + yield* fileSystem.chmod(wrapperPath, 0o755); return wrapperPath; -} +}); -async function waitForFileContent(filePath: string, attempts = 40): Promise { +const makeMockAgentWithAboutWrapper = Effect.fn("makeMockAgentWithAboutWrapper")(function* () { + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const mockAgentPath = yield* resolveMockAgentPath(); + const dir = yield* fileSystem.makeTempDirectory({ + directory: NodeOS.tmpdir(), + prefix: "cursor-provider-about-mock-", + }); + const wrapperPath = path.join(dir, "fake-agent.sh"); + const script = `#!/bin/sh +if [ "$1" = "about" ]; then + printf 'CLI Version 2026.04.09-f2b0fcd\\n' + printf 'User Email cursor@example.com\\n' + exit 0 +fi +exec ${JSON.stringify("bun")} ${JSON.stringify(mockAgentPath)} "$@" +`; + yield* fileSystem.writeFileString(wrapperPath, script); + yield* fileSystem.chmod(wrapperPath, 0o755); + return wrapperPath; +}); + +const waitForFileContent = Effect.fn("waitForFileContent")(function* ( + filePath: string, + attempts = 40, +) { + const fileSystem = yield* FileSystem.FileSystem; for (let attempt = 0; attempt < attempts; attempt += 1) { - try { - const content = await readFile(filePath, "utf8"); + const content = yield* fileSystem + .readFileString(filePath) + .pipe(Effect.catch(() => Effect.void)); + if (content !== undefined) { if (content.trim().length > 0) { return content; } - } catch {} - await new Promise((resolve) => setTimeout(resolve, 50)); + } + yield* Effect.sleep("50 millis"); } - throw new Error(`Timed out waiting for file content at ${filePath}`); -} + return yield* Effect.fail(new Error(`Timed out waiting for file content at ${filePath}`)); +}); + +const makeProviderStatusEnvFixture = Effect.fn("makeProviderStatusEnvFixture")(function* () { + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const tempDir = yield* fileSystem.makeTempDirectory({ + directory: NodeOS.tmpdir(), + prefix: "cursor-provider-status-env-", + }); + return { + requestLogPath: path.join(tempDir, "requests.ndjson"), + wrapperPath: yield* makeMockAgentWithAboutWrapper(), + }; +}); + +const makeExitLogFixture = Effect.fn("makeExitLogFixture")(function* (prefix: string) { + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const tempDir = yield* fileSystem.makeTempDirectory({ + directory: NodeOS.tmpdir(), + prefix, + }); + const exitLogPath = path.join(tempDir, "exit.log"); + return { + exitLogPath, + wrapperPath: yield* makeMockAgentWrapper({ + T3_ACP_EXIT_LOG_PATH: exitLogPath, + }), + }; +}); const parameterizedGpt54ConfigOptions = [ { @@ -239,13 +334,7 @@ const baseCursorSettings: CursorSettings = { customModels: [], }; -const emptyCapabilities = { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], -} as const; +const emptyCapabilities = createModelCapabilities({ optionDescriptors: [] }); describe("getCursorFallbackModels", () => { it("does not publish any built-in cursor models before ACP discovery", () => { @@ -309,52 +398,57 @@ describe("buildCursorProviderSnapshot", () => { describe("buildCursorCapabilitiesFromConfigOptions", () => { it("derives model capabilities from parameterized Cursor ACP config options", () => { - expect(buildCursorCapabilitiesFromConfigOptions(parameterizedGpt54ConfigOptions)).toEqual({ - reasoningEffortLevels: [ - { value: "low", label: "Low" }, - { value: "medium", label: "Medium", isDefault: true }, - { value: "high", label: "High" }, - { value: "xhigh", label: "Extra High" }, - ], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [ - { value: "272k", label: "272K", isDefault: true }, - { value: "1m", label: "1M" }, - ], - promptInjectedEffortLevels: [], - }); + expect(buildCursorCapabilitiesFromConfigOptions(parameterizedGpt54ConfigOptions)).toEqual( + createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("reasoning", "Reasoning", [ + { id: "low", label: "Low" }, + { id: "medium", label: "Medium", isDefault: true }, + { id: "high", label: "High" }, + { id: "xhigh", label: "Extra High" }, + ]), + selectDescriptor("contextWindow", "Context", [ + { id: "272k", label: "272K", isDefault: true }, + { id: "1m", label: "1M" }, + ]), + booleanDescriptor("fastMode", "Fast", false), + ], + }), + ); }); it("detects boolean thinking toggles from model_config options", () => { - expect(buildCursorCapabilitiesFromConfigOptions(parameterizedClaudeConfigOptions)).toEqual({ - reasoningEffortLevels: [ - { value: "low", label: "Low" }, - { value: "medium", label: "Medium" }, - { value: "high", label: "High", isDefault: true }, - ], - supportsFastMode: false, - supportsThinkingToggle: true, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }); + expect(buildCursorCapabilitiesFromConfigOptions(parameterizedClaudeConfigOptions)).toEqual( + createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("reasoning", "Reasoning", [ + { id: "low", label: "Low" }, + { id: "medium", label: "Medium" }, + { id: "high", label: "High", isDefault: true }, + ]), + booleanDescriptor("thinking", "Thinking", true), + ], + }), + ); }); it("prefers the newer model_option effort control over legacy thought_level", () => { expect( buildCursorCapabilitiesFromConfigOptions(parameterizedClaudeModelOptionConfigOptions), - ).toEqual({ - reasoningEffortLevels: [ - { value: "low", label: "Low" }, - { value: "medium", label: "Medium" }, - { value: "high", label: "High" }, - { value: "max", label: "Max", isDefault: true }, - ], - supportsFastMode: true, - supportsThinkingToggle: true, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }); + ).toEqual( + createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("reasoning", "Effort", [ + { id: "low", label: "Low" }, + { id: "medium", label: "Medium" }, + { id: "high", label: "High" }, + { id: "max", label: "Max", isDefault: true }, + ]), + booleanDescriptor("fastMode", "Fast", true), + booleanDescriptor("thinking", "Thinking", true), + ], + }), + ); }); }); @@ -365,81 +459,76 @@ describe("buildCursorDiscoveredModelsFromConfigOptions", () => { slug: "default", name: "Auto", isCustom: false, - capabilities: { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: emptyCapabilities, }, { slug: "composer-2", name: "Composer 2", isCustom: false, - capabilities: { - reasoningEffortLevels: [], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: createModelCapabilities({ + optionDescriptors: [booleanDescriptor("fastMode", "Fast", true)], + }), }, { slug: "gpt-5.4", name: "GPT-5.4", isCustom: false, - capabilities: { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: emptyCapabilities, }, { slug: "claude-sonnet-4-6", name: "Sonnet 4.6", isCustom: false, - capabilities: { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: emptyCapabilities, }, { slug: "claude-opus-4-6", name: "Opus 4.6", isCustom: false, - capabilities: { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: emptyCapabilities, }, { slug: "gpt-5.3-codex-spark", name: "Codex 5.3 Spark", isCustom: false, - capabilities: { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: emptyCapabilities, }, ]); }); }); +describe("checkCursorProviderStatus", () => { + it("passes the injected environment to ACP model discovery", async () => { + const { requestLogPath, wrapperPath } = await runNode(makeProviderStatusEnvFixture()); + + const provider = await Effect.runPromise( + checkCursorProviderStatus( + { + enabled: true, + binaryPath: wrapperPath, + apiEndpoint: "", + customModels: [], + }, + { + ...process.env, + T3_ACP_REQUEST_LOG_PATH: requestLogPath, + }, + ).pipe(Effect.provide(NodeServices.layer)), + ); + + expect(provider.models.map((model) => model.slug)).toEqual([ + "default", + "composer-2", + "gpt-5.4", + "claude-opus-4-6", + ]); + await expect(runNode(waitForFileContent(requestLogPath))).resolves.toContain("initialize"); + }); +}); + describe("discoverCursorModelsViaAcp", () => { it("keeps the ACP probe runtime alive long enough to discover models", async () => { - const wrapperPath = await makeMockAgentWrapper(); + const wrapperPath = await runNode(makeMockAgentWrapper()); const models = await Effect.runPromise( discoverCursorModelsViaAcp({ @@ -459,11 +548,9 @@ describe("discoverCursorModelsViaAcp", () => { }); it("closes the ACP probe runtime after discovery completes", async () => { - const tempDir = await mkdtemp(path.join(os.tmpdir(), "cursor-provider-exit-log-")); - const exitLogPath = path.join(tempDir, "exit.log"); - const wrapperPath = await makeMockAgentWrapper({ - T3_ACP_EXIT_LOG_PATH: exitLogPath, - }); + const { exitLogPath, wrapperPath } = await runNode( + makeExitLogFixture("cursor-provider-exit-log-"), + ); await Effect.runPromise( discoverCursorModelsViaAcp({ @@ -474,18 +561,16 @@ describe("discoverCursorModelsViaAcp", () => { }).pipe(Effect.provide(NodeServices.layer)), ); - const exitLog = await waitForFileContent(exitLogPath); + const exitLog = await runNode(waitForFileContent(exitLogPath)); expect(exitLog).toContain("SIGTERM"); }); }); describe("discoverCursorModelCapabilitiesViaAcp", () => { it("closes all ACP probe runtimes after capability enrichment completes", async () => { - const tempDir = await mkdtemp(path.join(os.tmpdir(), "cursor-capabilities-exit-log-")); - const exitLogPath = path.join(tempDir, "exit.log"); - const wrapperPath = await makeMockAgentWrapper({ - T3_ACP_EXIT_LOG_PATH: exitLogPath, - }); + const { exitLogPath, wrapperPath } = await runNode( + makeExitLogFixture("cursor-capabilities-exit-log-"), + ); const existingModels: ReadonlyArray = [ { slug: "default", name: "Auto", isCustom: false, capabilities: emptyCapabilities }, { slug: "composer-2", name: "Composer 2", isCustom: false, capabilities: emptyCapabilities }, @@ -517,7 +602,7 @@ describe("discoverCursorModelCapabilitiesViaAcp", () => { "claude-opus-4-6", ]); - const exitLog = await waitForFileContent(exitLogPath); + const exitLog = await runNode(waitForFileContent(exitLogPath)); expect(exitLog.match(/SIGTERM/g)?.length ?? 0).toBe(4); }); }); @@ -539,6 +624,7 @@ describe("parseCursorAboutOutput", () => { status: "ready", auth: { status: "authenticated", + email: "jmarminge@gmail.com", type: "Team", label: "Cursor Team Subscription", }, @@ -645,11 +731,11 @@ describe("resolveCursorAcpBaseModelId", () => { describe("resolveCursorAcpConfigUpdates", () => { it("maps Cursor model options onto separate ACP config option updates", () => { expect( - resolveCursorAcpConfigUpdates(parameterizedGpt54ConfigOptions, { - reasoning: "xhigh", - fastMode: true, - contextWindow: "1m", - }), + resolveCursorAcpConfigUpdates(parameterizedGpt54ConfigOptions, [ + { id: "reasoning", value: "xhigh" }, + { id: "fastMode", value: true }, + { id: "contextWindow", value: "1m" }, + ]), ).toEqual([ { configId: "reasoning", value: "extra-high" }, { configId: "context", value: "1m" }, @@ -659,28 +745,28 @@ describe("resolveCursorAcpConfigUpdates", () => { it("maps boolean thinking toggles when the model exposes them separately", () => { expect( - resolveCursorAcpConfigUpdates(parameterizedClaudeConfigOptions, { - thinking: false, - }), + resolveCursorAcpConfigUpdates(parameterizedClaudeConfigOptions, [ + { id: "thinking", value: false }, + ]), ).toEqual([{ configId: "thinking", value: false }]); }); it("maps explicit fastMode: false so the adapter can clear a prior fast selection", () => { expect( - resolveCursorAcpConfigUpdates(parameterizedGpt54ConfigOptions, { - fastMode: false, - }), + resolveCursorAcpConfigUpdates(parameterizedGpt54ConfigOptions, [ + { id: "fastMode", value: false }, + ]), ).toEqual([{ configId: "fast", value: "false" }]); }); it("writes Cursor effort changes through the newer model_option config when available", () => { expect( - resolveCursorAcpConfigUpdates(parameterizedClaudeModelOptionConfigOptions, { - reasoning: "high", - thinking: false, - }), + resolveCursorAcpConfigUpdates(parameterizedClaudeModelOptionConfigOptions, [ + { id: "reasoning", value: "max" }, + { id: "thinking", value: false }, + ]), ).toEqual([ - { configId: "effort", value: "high" }, + { configId: "effort", value: "max" }, { configId: "thinking", value: "false" }, ]); }); diff --git a/apps/server/src/provider/Layers/CursorProvider.ts b/apps/server/src/provider/Layers/CursorProvider.ts index 70d5656b3ec..ad52f63fbb2 100644 --- a/apps/server/src/provider/Layers/CursorProvider.ts +++ b/apps/server/src/provider/Layers/CursorProvider.ts @@ -1,46 +1,49 @@ -import * as nodeFs from "node:fs"; import * as nodeOs from "node:os"; -import * as nodePath from "node:path"; import type { - CursorModelOptions, CursorSettings, ModelCapabilities, + ProviderOptionSelection, ServerProvider, ServerProviderAuth, ServerProviderModel, ServerProviderState, - ServerSettingsError, } from "@t3tools/contracts"; +import { ProviderDriverKind } from "@t3tools/contracts"; import type * as EffectAcpSchema from "effect-acp/schema"; -import { Cause, Effect, Equal, Exit, Layer, Option, Result, Stream } from "effect"; +import { Cause, Effect, Exit, FileSystem, Layer, Option, Path, Result } from "effect"; import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; +import { + createModelCapabilities, + getProviderOptionBooleanSelectionValue, + getProviderOptionStringSelectionValue, +} from "@t3tools/shared/model"; import { + buildBooleanOptionDescriptor, + buildSelectOptionDescriptor, buildServerProvider, collectStreamAsString, isCommandMissingCause, providerModelsFromSettings, type CommandResult, + type ServerProviderDraft, } from "../providerSnapshot.ts"; -import { makeManagedServerProvider } from "../makeManagedServerProvider.ts"; -import { CursorProvider } from "../Services/CursorProvider.ts"; import { AcpSessionRuntime } from "../acp/AcpSessionRuntime.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; - -const PROVIDER = "cursor" as const; -const EMPTY_CAPABILITIES: ModelCapabilities = { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], -}; + +const PROVIDER = ProviderDriverKind.make("cursor"); +const CURSOR_PRESENTATION = { + displayName: "Cursor", + badgeLabel: "Early Access", + showInteractionModeToggle: true, +} as const; +const EMPTY_CAPABILITIES: ModelCapabilities = createModelCapabilities({ + optionDescriptors: [], +}); const CURSOR_ACP_MODEL_DISCOVERY_TIMEOUT_MS = 15_000; const CURSOR_ACP_MODEL_CAPABILITY_TIMEOUT = "4 seconds"; const CURSOR_ACP_MODEL_DISCOVERY_CONCURRENCY = 4; -const CURSOR_REFRESH_INTERVAL = "1 hour"; const CURSOR_PARAMETERIZED_MODEL_PICKER_MIN_VERSION_DATE = 2026_04_08; export const CURSOR_PARAMETERIZED_MODEL_PICKER_CAPABILITIES = { _meta: { @@ -48,13 +51,15 @@ export const CURSOR_PARAMETERIZED_MODEL_PICKER_CAPABILITIES = { }, } satisfies NonNullable; -function buildInitialCursorProviderSnapshot(cursorSettings: CursorSettings): ServerProvider { +export function buildInitialCursorProviderSnapshot( + cursorSettings: CursorSettings, +): ServerProviderDraft { const checkedAt = new Date().toISOString(); const models = getCursorFallbackModels(cursorSettings); if (!cursorSettings.enabled) { return buildServerProvider({ - provider: PROVIDER, + presentation: CURSOR_PRESENTATION, enabled: false, checkedAt, models, @@ -69,7 +74,7 @@ function buildInitialCursorProviderSnapshot(cursorSettings: CursorSettings): Ser } return buildServerProvider({ - provider: PROVIDER, + presentation: CURSOR_PRESENTATION, enabled: true, checkedAt, models, @@ -102,7 +107,12 @@ function flattenSessionConfigSelectOptions( } return configOption.options.flatMap((entry) => "value" in entry - ? [{ value: entry.value.trim(), name: entry.name.trim() } satisfies CursorSessionSelectOption] + ? [ + { + value: entry.value.trim(), + name: entry.name.trim(), + } satisfies CursorSessionSelectOption, + ] : entry.options.map( (option) => ({ @@ -198,6 +208,28 @@ function isBooleanLikeConfigOption(option: EffectAcpSchema.SessionConfigOption): return values.has("true") && values.has("false"); } +function getBooleanCurrentValue( + option: EffectAcpSchema.SessionConfigOption | undefined, +): boolean | undefined { + if (!option) { + return undefined; + } + if (option.type === "boolean") { + return option.currentValue; + } + if (option.type !== "select") { + return undefined; + } + const normalized = option.currentValue?.trim().toLowerCase(); + if (normalized === "true") { + return true; + } + if (normalized === "false") { + return false; + } + return undefined; +} + export function buildCursorCapabilitiesFromConfigOptions( configOptions: ReadonlyArray | null | undefined, ): ModelCapabilities { @@ -251,14 +283,60 @@ export function buildCursorCapabilitiesFromConfigOptions( const thinkingOption = configOptions.find( (option) => option.category === "model_config" && isCursorThinkingConfigOption(option), ); + const fastCurrentValue = getBooleanCurrentValue(fastOption); + const thinkingCurrentValue = getBooleanCurrentValue(thinkingOption); + const optionDescriptors = [ + ...(reasoningEffortLevels.length > 0 + ? [ + buildSelectOptionDescriptor({ + id: "reasoning", + label: reasoningConfig?.name?.trim() || "Reasoning", + options: reasoningEffortLevels, + }), + ] + : []), + ...(contextWindowOptions.length > 0 + ? [ + buildSelectOptionDescriptor({ + id: "contextWindow", + label: contextOption?.name?.trim() || "Context Window", + options: contextWindowOptions, + }), + ] + : []), + ...(fastOption && isBooleanLikeConfigOption(fastOption) + ? [ + typeof fastCurrentValue === "boolean" + ? buildBooleanOptionDescriptor({ + id: "fastMode", + label: fastOption.name?.trim() || "Fast Mode", + currentValue: fastCurrentValue, + }) + : buildBooleanOptionDescriptor({ + id: "fastMode", + label: fastOption.name?.trim() || "Fast Mode", + }), + ] + : []), + ...(thinkingOption && isBooleanLikeConfigOption(thinkingOption) + ? [ + typeof thinkingCurrentValue === "boolean" + ? buildBooleanOptionDescriptor({ + id: "thinking", + label: thinkingOption.name?.trim() || "Thinking", + currentValue: thinkingCurrentValue, + }) + : buildBooleanOptionDescriptor({ + id: "thinking", + label: thinkingOption.name?.trim() || "Thinking", + }), + ] + : []), + ]; - return { - reasoningEffortLevels, - supportsFastMode: fastOption ? isBooleanLikeConfigOption(fastOption) : false, - supportsThinkingToggle: thinkingOption ? isBooleanLikeConfigOption(thinkingOption) : false, - contextWindowOptions, - promptInjectedEffortLevels: [], - }; + return createModelCapabilities({ + optionDescriptors, + }); } function buildCursorDiscoveredModels( @@ -282,13 +360,7 @@ function buildCursorDiscoveredModels( } function hasCursorModelCapabilities(model: Pick): boolean { - return ( - (model.capabilities?.reasoningEffortLevels.length ?? 0) > 0 || - model.capabilities?.supportsFastMode === true || - model.capabilities?.supportsThinkingToggle === true || - (model.capabilities?.contextWindowOptions.length ?? 0) > 0 || - (model.capabilities?.promptInjectedEffortLevels.length ?? 0) > 0 - ); + return (model.capabilities?.optionDescriptors?.length ?? 0) > 0; } export function buildCursorDiscoveredModelsFromConfigOptions( @@ -320,7 +392,10 @@ export function buildCursorDiscoveredModelsFromConfigOptions( ); } -const makeCursorAcpProbeRuntime = (cursorSettings: CursorSettings) => +const makeCursorAcpProbeRuntime = ( + cursorSettings: CursorSettings, + environment: NodeJS.ProcessEnv = process.env, +) => Effect.gen(function* () { const spawner = yield* ChildProcessSpawner.ChildProcessSpawner; const acpContext = yield* Layer.build( @@ -332,6 +407,7 @@ const makeCursorAcpProbeRuntime = (cursorSettings: CursorSettings) => "acp", ], cwd: process.cwd(), + env: environment, }, cwd: process.cwd(), clientInfo: { name: "t3-code-provider-probe", version: "0.0.0" }, @@ -345,7 +421,12 @@ const makeCursorAcpProbeRuntime = (cursorSettings: CursorSettings) => const withCursorAcpProbeRuntime = ( cursorSettings: CursorSettings, useRuntime: (acp: AcpSessionRuntime["Service"]) => Effect.Effect, -) => makeCursorAcpProbeRuntime(cursorSettings).pipe(Effect.flatMap(useRuntime), Effect.scoped); + environment: NodeJS.ProcessEnv = process.env, +) => + makeCursorAcpProbeRuntime(cursorSettings, environment).pipe( + Effect.flatMap(useRuntime), + Effect.scoped, + ); function normalizeCursorConfigOptionToken(value: string | null | undefined): string { return ( @@ -387,16 +468,24 @@ export function resolveCursorAcpBaseModelId(model: string | null | undefined): s export function resolveCursorAcpConfigUpdates( configOptions: ReadonlyArray | null | undefined, - modelOptions: CursorModelOptions | null | undefined, -): ReadonlyArray<{ readonly configId: string; readonly value: string | boolean }> { + selections: ReadonlyArray | null | undefined, +): ReadonlyArray<{ + readonly configId: string; + readonly value: string | boolean; +}> { if (!configOptions || configOptions.length === 0) { return []; } - const updates: Array<{ readonly configId: string; readonly value: string | boolean }> = []; + const updates: Array<{ + readonly configId: string; + readonly value: string | boolean; + }> = []; const reasoningOption = findCursorEffortConfigOption(configOptions); - const requestedReasoning = normalizeCursorReasoningValue(modelOptions?.reasoning); + const requestedReasoning = normalizeCursorReasoningValue( + getProviderOptionStringSelectionValue(selections, "reasoning"), + ); if (reasoningOption && requestedReasoning) { const value = findCursorSelectOptionValue(reasoningOption, (option) => { const normalizedValue = normalizeCursorReasoningValue(option.value); @@ -411,14 +500,15 @@ export function resolveCursorAcpConfigUpdates( const contextOption = configOptions.find( (option) => option.category === "model_config" && isCursorContextConfigOption(option), ); - if (contextOption && modelOptions?.contextWindow) { + const requestedContextWindow = getProviderOptionStringSelectionValue(selections, "contextWindow"); + if (contextOption && requestedContextWindow) { const value = findCursorSelectOptionValue( contextOption, (option) => normalizeCursorConfigOptionToken(option.value) === - normalizeCursorConfigOptionToken(modelOptions.contextWindow) || + normalizeCursorConfigOptionToken(requestedContextWindow) || normalizeCursorConfigOptionToken(option.name) === - normalizeCursorConfigOptionToken(modelOptions.contextWindow), + normalizeCursorConfigOptionToken(requestedContextWindow), ); if (value) { updates.push({ configId: contextOption.id, value }); @@ -428,8 +518,9 @@ export function resolveCursorAcpConfigUpdates( const fastOption = configOptions.find( (option) => option.category === "model_config" && isCursorFastConfigOption(option), ); - if (fastOption && typeof modelOptions?.fastMode === "boolean") { - const value = findCursorBooleanConfigValue(fastOption, modelOptions.fastMode); + const requestedFastMode = getProviderOptionBooleanSelectionValue(selections, "fastMode"); + if (fastOption && typeof requestedFastMode === "boolean") { + const value = findCursorBooleanConfigValue(fastOption, requestedFastMode); if (value !== undefined) { updates.push({ configId: fastOption.id, value }); } @@ -438,8 +529,9 @@ export function resolveCursorAcpConfigUpdates( const thinkingOption = configOptions.find( (option) => option.category === "model_config" && isCursorThinkingConfigOption(option), ); - if (thinkingOption && typeof modelOptions?.thinking === "boolean") { - const value = findCursorBooleanConfigValue(thinkingOption, modelOptions.thinking); + const requestedThinking = getProviderOptionBooleanSelectionValue(selections, "thinking"); + if (thinkingOption && typeof requestedThinking === "boolean") { + const value = findCursorBooleanConfigValue(thinkingOption, requestedThinking); if (value !== undefined) { updates.push({ configId: thinkingOption.id, value }); } @@ -448,43 +540,129 @@ export function resolveCursorAcpConfigUpdates( return updates; } -export const discoverCursorModelsViaAcp = (cursorSettings: CursorSettings) => - withCursorAcpProbeRuntime(cursorSettings, (acp) => - Effect.map(acp.start(), (started) => - buildCursorDiscoveredModelsFromConfigOptions(started.sessionSetupResult.configOptions ?? []), - ), +export const discoverCursorModelsViaAcp = ( + cursorSettings: CursorSettings, + environment: NodeJS.ProcessEnv = process.env, +) => + withCursorAcpProbeRuntime( + cursorSettings, + (acp) => + Effect.map(acp.start(), (started) => + buildCursorDiscoveredModelsFromConfigOptions( + started.sessionSetupResult.configOptions ?? [], + ), + ), + environment, ); export const discoverCursorModelCapabilitiesViaAcp = ( cursorSettings: CursorSettings, existingModels: ReadonlyArray, + environment: NodeJS.ProcessEnv = process.env, ) => - withCursorAcpProbeRuntime(cursorSettings, (acp) => - Effect.gen(function* () { - const started = yield* acp.start(); - const initialConfigOptions = started.sessionSetupResult.configOptions ?? []; - const modelOption = findCursorModelConfigOption(initialConfigOptions); - const modelChoices = flattenSessionConfigSelectOptions(modelOption); - if (!modelOption || modelChoices.length === 0) { - return []; - } + withCursorAcpProbeRuntime( + cursorSettings, + (acp) => + Effect.gen(function* () { + const started = yield* acp.start(); + const initialConfigOptions = started.sessionSetupResult.configOptions ?? []; + const modelOption = findCursorModelConfigOption(initialConfigOptions); + const modelChoices = flattenSessionConfigSelectOptions(modelOption); + if (!modelOption || modelChoices.length === 0) { + return []; + } - const currentModelValue = - modelOption.type === "select" ? modelOption.currentValue?.trim() || undefined : undefined; - const capabilitiesBySlug = new Map(); - if (currentModelValue) { - capabilitiesBySlug.set( - currentModelValue, - buildCursorCapabilitiesFromConfigOptions(initialConfigOptions), + const currentModelValue = + modelOption.type === "select" ? modelOption.currentValue?.trim() || undefined : undefined; + const capabilitiesBySlug = new Map(); + if (currentModelValue) { + capabilitiesBySlug.set( + currentModelValue, + buildCursorCapabilitiesFromConfigOptions(initialConfigOptions), + ); + } + + const targetModelSlugs = new Set( + existingModels + .filter((model) => !model.isCustom && !hasCursorModelCapabilities(model)) + .map((model) => model.slug), ); - } + if (targetModelSlugs.size === 0) { + return buildCursorDiscoveredModels( + modelChoices.map((modelChoice) => ({ + slug: modelChoice.value.trim(), + name: modelChoice.name.trim(), + capabilities: capabilitiesBySlug.get(modelChoice.value.trim()) ?? EMPTY_CAPABILITIES, + })), + ); + } + + const probedCapabilities = yield* Effect.forEach( + modelChoices, + (modelChoice) => { + const modelSlug = modelChoice.value.trim(); + if ( + !modelSlug || + !targetModelSlugs.has(modelSlug) || + capabilitiesBySlug.has(modelSlug) + ) { + return Effect.void.pipe( + Effect.as(undefined), + ); + } + + return withCursorAcpProbeRuntime( + cursorSettings, + (probeAcp) => + Effect.gen(function* () { + const probeStarted = yield* probeAcp.start(); + const probeConfigOptions = probeStarted.sessionSetupResult.configOptions ?? []; + const probeModelOption = findCursorModelConfigOption(probeConfigOptions); + const probeCurrentModelValue = + probeModelOption?.type === "select" + ? probeModelOption.currentValue?.trim() || undefined + : undefined; + yield* Effect.annotateCurrentSpan({ + "cursor.acp.model.value": modelSlug, + "cursor.acp.model.currentValue": probeCurrentModelValue, + "cursor.acp.config_option_id": probeModelOption?.id ?? modelOption.id, + }); + const nextConfigOptions = + probeCurrentModelValue === modelSlug + ? probeConfigOptions + : yield* probeAcp + .setConfigOption(probeModelOption?.id ?? modelOption.id, modelSlug) + .pipe( + Effect.map((response) => response.configOptions ?? probeConfigOptions), + ); + return [ + modelSlug, + buildCursorCapabilitiesFromConfigOptions(nextConfigOptions), + ] as const; + }), + environment, + ).pipe( + Effect.timeout(CURSOR_ACP_MODEL_CAPABILITY_TIMEOUT), + Effect.retry({ times: 3 }), + Effect.withSpan("cursor-acp-model-capability-probe"), + Effect.catchCause((cause) => + Effect.logWarning("Cursor ACP capability probe failed", { + modelSlug, + cause: Cause.pretty(cause), + }), + ), + ); + }, + { concurrency: CURSOR_ACP_MODEL_DISCOVERY_CONCURRENCY }, + ); + + for (const entry of probedCapabilities) { + if (!entry) { + continue; + } + capabilitiesBySlug.set(entry[0], entry[1]); + } - const targetModelSlugs = new Set( - existingModels - .filter((model) => !model.isCustom && !hasCursorModelCapabilities(model)) - .map((model) => model.slug), - ); - if (targetModelSlugs.size === 0) { return buildCursorDiscoveredModels( modelChoices.map((modelChoice) => ({ slug: modelChoice.value.trim(), @@ -492,73 +670,8 @@ export const discoverCursorModelCapabilitiesViaAcp = ( capabilities: capabilitiesBySlug.get(modelChoice.value.trim()) ?? EMPTY_CAPABILITIES, })), ); - } - - const probedCapabilities = yield* Effect.forEach( - modelChoices, - (modelChoice) => { - const modelSlug = modelChoice.value.trim(); - if (!modelSlug || !targetModelSlugs.has(modelSlug) || capabilitiesBySlug.has(modelSlug)) { - return Effect.void.pipe( - Effect.as(undefined), - ); - } - - return withCursorAcpProbeRuntime(cursorSettings, (probeAcp) => - Effect.gen(function* () { - const probeStarted = yield* probeAcp.start(); - const probeConfigOptions = probeStarted.sessionSetupResult.configOptions ?? []; - const probeModelOption = findCursorModelConfigOption(probeConfigOptions); - const probeCurrentModelValue = - probeModelOption?.type === "select" - ? probeModelOption.currentValue?.trim() || undefined - : undefined; - yield* Effect.annotateCurrentSpan({ - "cursor.acp.model.value": modelSlug, - "cursor.acp.model.currentValue": probeCurrentModelValue, - "cursor.acp.config_option_id": probeModelOption?.id ?? modelOption.id, - }); - const nextConfigOptions = - probeCurrentModelValue === modelSlug - ? probeConfigOptions - : yield* probeAcp - .setConfigOption(probeModelOption?.id ?? modelOption.id, modelSlug) - .pipe(Effect.map((response) => response.configOptions ?? probeConfigOptions)); - return [ - modelSlug, - buildCursorCapabilitiesFromConfigOptions(nextConfigOptions), - ] as const; - }), - ).pipe( - Effect.timeout(CURSOR_ACP_MODEL_CAPABILITY_TIMEOUT), - Effect.retry({ times: 3 }), - Effect.withSpan("cursor-acp-model-capability-probe"), - Effect.catchCause((cause) => - Effect.logWarning("Cursor ACP capability probe failed", { - modelSlug, - cause: Cause.pretty(cause), - }), - ), - ); - }, - { concurrency: CURSOR_ACP_MODEL_DISCOVERY_CONCURRENCY }, - ); - - for (const entry of probedCapabilities) { - if (!entry) { - continue; - } - capabilitiesBySlug.set(entry[0], entry[1]); - } - - return buildCursorDiscoveredModels( - modelChoices.map((modelChoice) => ({ - slug: modelChoice.value.trim(), - name: modelChoice.name.trim(), - capabilities: capabilitiesBySlug.get(modelChoice.value.trim()) ?? EMPTY_CAPABILITIES, - })), - ); - }).pipe(Effect.withSpan("cursor-acp-model-capability-discovery", {})), + }).pipe(Effect.withSpan("cursor-acp-model-capability-discovery", {})), + environment, ); export function getCursorFallbackModels( @@ -606,10 +719,10 @@ export function buildCursorProviderSnapshot(input: { readonly parsed: CursorAboutResult; readonly discoveredModels?: ReadonlyArray; readonly discoveryWarning?: string; -}): ServerProvider { +}): ServerProviderDraft { const message = joinProviderMessages(input.parsed.message, input.discoveryWarning); return buildServerProvider({ - provider: PROVIDER, + presentation: CURSOR_PRESENTATION, enabled: input.cursorSettings.enabled, checkedAt: input.checkedAt, models: providerModelsFromSettings( @@ -733,14 +846,13 @@ function isCursorAboutJsonFormatUnsupported(result: CommandResult): boolean { ); } -function readCursorCliConfigChannel(): string | undefined { - try { - const configPath = nodePath.join(nodeOs.homedir(), ".cursor", "cli-config.json"); - return parseCursorCliConfigChannel(nodeFs.readFileSync(configPath, "utf8")); - } catch { - return undefined; - } -} +const readCursorCliConfigChannel = Effect.fn("readCursorCliConfigChannel")(function* () { + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const configPath = path.join(nodeOs.homedir(), ".cursor", "cli-config.json"); + const raw = yield* fileSystem.readFileString(configPath).pipe(Effect.orElseSucceed(() => "")); + return parseCursorCliConfigChannel(raw); +}); export function getCursorParameterizedModelPickerUnsupportedMessage(input: { readonly version: string | null | undefined; @@ -856,6 +968,7 @@ export function parseCursorAboutOutput(result: CommandResult): CursorAboutResult status: "ready", auth: { status: "authenticated", + email: userEmail, ...authMetadata, }, }; @@ -911,17 +1024,22 @@ export function parseCursorAboutOutput(result: CommandResult): CursorAboutResult } // Any non-empty email value means authenticated. - return { version, status: "ready", auth: { status: "authenticated" } }; + return { + version, + status: "ready", + auth: { status: "authenticated", email: userEmail }, + }; } -const runCursorCommand = (args: ReadonlyArray) => +const runCursorCommand = ( + cursorSettings: CursorSettings, + args: ReadonlyArray, + environment: NodeJS.ProcessEnv = process.env, +) => Effect.gen(function* () { const spawner = yield* ChildProcessSpawner.ChildProcessSpawner; - const cursorSettings = yield* Effect.service(ServerSettingsService).pipe( - Effect.flatMap((service) => service.getSettings), - Effect.map((settings) => settings.providers.cursor), - ); const command = ChildProcess.make(cursorSettings.binaryPath, [...args], { + env: environment, shell: process.platform === "win32", }); @@ -938,199 +1056,202 @@ const runCursorCommand = (args: ReadonlyArray) => return { stdout, stderr, code: exitCode } satisfies CommandResult; }).pipe(Effect.scoped); -const runCursorAboutCommand = Effect.gen(function* () { - const jsonResult = yield* runCursorCommand(["about", "--format", "json"]); - if (!isCursorAboutJsonFormatUnsupported(jsonResult)) { - return jsonResult; - } - return yield* runCursorCommand(["about"]); -}); - -export const checkCursorProviderStatus = Effect.fn("checkCursorProviderStatus")( - function* (): Effect.fn.Return< - ServerProvider, - ServerSettingsError, - ChildProcessSpawner.ChildProcessSpawner | ServerSettingsService - > { - const cursorSettings = yield* Effect.service(ServerSettingsService).pipe( - Effect.flatMap((service) => service.getSettings), - Effect.map((settings) => settings.providers.cursor), +const runCursorAboutCommand = ( + cursorSettings: CursorSettings, + environment: NodeJS.ProcessEnv = process.env, +) => + Effect.gen(function* () { + const jsonResult = yield* runCursorCommand( + cursorSettings, + ["about", "--format", "json"], + environment, ); - const checkedAt = new Date().toISOString(); - const fallbackModels = getCursorFallbackModels(cursorSettings); - - if (!cursorSettings.enabled) { - return buildServerProvider({ - provider: PROVIDER, - enabled: false, - checkedAt, - models: fallbackModels, - probe: { - installed: false, - version: null, - status: "warning", - auth: { status: "unknown" }, - message: "Cursor is disabled in T3 Code settings.", - }, - }); + if (!isCursorAboutJsonFormatUnsupported(jsonResult)) { + return jsonResult; } + return yield* runCursorCommand(cursorSettings, ["about"], environment); + }); - // Single `agent about` probe: returns version + auth status in one call. - const aboutProbe = yield* runCursorAboutCommand.pipe( - Effect.timeoutOption(ABOUT_TIMEOUT_MS), - Effect.result, - ); +export const checkCursorProviderStatus = Effect.fn("checkCursorProviderStatus")(function* ( + cursorSettings: CursorSettings, + environment: NodeJS.ProcessEnv = process.env, +): Effect.fn.Return< + ServerProviderDraft, + never, + ChildProcessSpawner.ChildProcessSpawner | FileSystem.FileSystem | Path.Path +> { + const checkedAt = new Date().toISOString(); + const fallbackModels = getCursorFallbackModels(cursorSettings); - if (Result.isFailure(aboutProbe)) { - const error = aboutProbe.failure; - return buildServerProvider({ - provider: PROVIDER, - enabled: cursorSettings.enabled, - checkedAt, - models: fallbackModels, - probe: { - installed: !isCommandMissingCause(error), - version: null, - status: "error", - auth: { status: "unknown" }, - message: isCommandMissingCause(error) - ? "Cursor Agent CLI (`agent`) is not installed or not on PATH." - : `Failed to execute Cursor Agent CLI health check: ${error instanceof Error ? error.message : String(error)}.`, - }, - }); - } + if (!cursorSettings.enabled) { + return buildServerProvider({ + presentation: CURSOR_PRESENTATION, + enabled: false, + checkedAt, + models: fallbackModels, + probe: { + installed: false, + version: null, + status: "warning", + auth: { status: "unknown" }, + message: "Cursor is disabled in T3 Code settings.", + }, + }); + } - if (Option.isNone(aboutProbe.success)) { - return buildServerProvider({ - provider: PROVIDER, - enabled: cursorSettings.enabled, - checkedAt, - models: fallbackModels, - probe: { - installed: true, - version: null, - status: "error", - auth: { status: "unknown" }, - message: "Cursor Agent CLI is installed but timed out while running `agent about`.", - }, - }); - } + // Single `agent about` probe: returns version + auth status in one call. + const aboutProbe = yield* runCursorAboutCommand(cursorSettings, environment).pipe( + Effect.timeoutOption(ABOUT_TIMEOUT_MS), + Effect.result, + ); - const parsed = parseCursorAboutOutput(aboutProbe.success.value); - const parameterizedModelPickerUnsupportedMessage = - getCursorParameterizedModelPickerUnsupportedMessage({ - version: parsed.version, - channel: readCursorCliConfigChannel(), - }); - if (parameterizedModelPickerUnsupportedMessage) { - return buildServerProvider({ - provider: PROVIDER, - enabled: cursorSettings.enabled, - checkedAt, - models: fallbackModels, - probe: { - installed: true, - version: parsed.version, - status: "error", - auth: parsed.auth, - message: - parsed.auth.status === "unauthenticated" && parsed.message - ? `${parameterizedModelPickerUnsupportedMessage} ${parsed.message}` - : parameterizedModelPickerUnsupportedMessage, - }, - }); - } - let discoveredModels = Option.none>(); - let discoveryWarning: string | undefined; - if (parsed.auth.status !== "unauthenticated") { - const discoveryExit = yield* Effect.exit( - discoverCursorModelsViaAcp(cursorSettings).pipe( - Effect.timeoutOption(CURSOR_ACP_MODEL_DISCOVERY_TIMEOUT_MS), - ), - ); - if (Exit.isFailure(discoveryExit)) { - yield* Effect.logWarning("Cursor ACP model discovery failed", { - cause: Cause.pretty(discoveryExit.cause), - }); - discoveryWarning = "Cursor ACP model discovery failed. Check server logs for details."; - } else if (Option.isNone(discoveryExit.value)) { - discoveryWarning = `Cursor ACP model discovery timed out after ${CURSOR_ACP_MODEL_DISCOVERY_TIMEOUT_MS}ms.`; - } else if (discoveryExit.value.value.length === 0) { - discoveryWarning = "Cursor ACP model discovery returned no built-in models."; - } else { - discoveredModels = discoveryExit.value; - } - } - return buildCursorProviderSnapshot({ + if (Result.isFailure(aboutProbe)) { + const error = aboutProbe.failure; + return buildServerProvider({ + presentation: CURSOR_PRESENTATION, + enabled: cursorSettings.enabled, checkedAt, - cursorSettings, - parsed, - discoveredModels: Option.getOrElse( - Option.filter(discoveredModels, (models) => models.length > 0), - () => [] as const, - ), - ...(discoveryWarning ? { discoveryWarning } : {}), + models: fallbackModels, + probe: { + installed: !isCommandMissingCause(error), + version: null, + status: "error", + auth: { status: "unknown" }, + message: isCommandMissingCause(error) + ? "Cursor Agent CLI (`agent`) is not installed or not on PATH." + : `Failed to execute Cursor Agent CLI health check: ${error instanceof Error ? error.message : String(error)}.`, + }, }); - }, -); + } -export const CursorProviderLive = Layer.effect( - CursorProvider, - Effect.gen(function* () { - const serverSettings = yield* ServerSettingsService; - const spawner = yield* ChildProcessSpawner.ChildProcessSpawner; + if (Option.isNone(aboutProbe.success)) { + return buildServerProvider({ + presentation: CURSOR_PRESENTATION, + enabled: cursorSettings.enabled, + checkedAt, + models: fallbackModels, + probe: { + installed: true, + version: null, + status: "error", + auth: { status: "unknown" }, + message: "Cursor Agent CLI is installed but timed out while running `agent about`.", + }, + }); + } - const checkProvider = checkCursorProviderStatus().pipe( - Effect.provideService(ServerSettingsService, serverSettings), - Effect.provideService(ChildProcessSpawner.ChildProcessSpawner, spawner), + const parsed = parseCursorAboutOutput(aboutProbe.success.value); + const cursorCliConfigChannel = yield* readCursorCliConfigChannel(); + const parameterizedModelPickerUnsupportedMessage = + getCursorParameterizedModelPickerUnsupportedMessage({ + version: parsed.version, + channel: cursorCliConfigChannel, + }); + if (parameterizedModelPickerUnsupportedMessage) { + return buildServerProvider({ + presentation: CURSOR_PRESENTATION, + enabled: cursorSettings.enabled, + checkedAt, + models: fallbackModels, + probe: { + installed: true, + version: parsed.version, + status: "error", + auth: parsed.auth, + message: + parsed.auth.status === "unauthenticated" && parsed.message + ? `${parameterizedModelPickerUnsupportedMessage} ${parsed.message}` + : parameterizedModelPickerUnsupportedMessage, + }, + }); + } + let discoveredModels = Option.none>(); + let discoveryWarning: string | undefined; + if (parsed.auth.status !== "unauthenticated") { + const discoveryExit = yield* Effect.exit( + discoverCursorModelsViaAcp(cursorSettings, environment).pipe( + Effect.timeoutOption(CURSOR_ACP_MODEL_DISCOVERY_TIMEOUT_MS), + ), ); + if (Exit.isFailure(discoveryExit)) { + yield* Effect.logWarning("Cursor ACP model discovery failed", { + cause: Cause.pretty(discoveryExit.cause), + }); + discoveryWarning = "Cursor ACP model discovery failed. Check server logs for details."; + } else if (Option.isNone(discoveryExit.value)) { + discoveryWarning = `Cursor ACP model discovery timed out after ${CURSOR_ACP_MODEL_DISCOVERY_TIMEOUT_MS}ms.`; + } else if (discoveryExit.value.value.length === 0) { + discoveryWarning = "Cursor ACP model discovery returned no built-in models."; + } else { + discoveredModels = discoveryExit.value; + } + } + return buildCursorProviderSnapshot({ + checkedAt, + cursorSettings, + parsed, + discoveredModels: Option.getOrElse( + Option.filter(discoveredModels, (models) => models.length > 0), + () => [] as const, + ), + ...(discoveryWarning ? { discoveryWarning } : {}), + }); +}); - return yield* makeManagedServerProvider({ - getSettings: serverSettings.getSettings.pipe( - Effect.map((settings) => settings.providers.cursor), - Effect.orDie, - ), - streamSettings: serverSettings.streamChanges.pipe( - Stream.map((settings) => settings.providers.cursor), - ), - haveSettingsChanged: (previous, next) => !Equal.equals(previous, next), - initialSnapshot: buildInitialCursorProviderSnapshot, - checkProvider, - enrichSnapshot: ({ settings, snapshot, publishSnapshot }) => { - if ( - !settings.enabled || - snapshot.auth.status === "unauthenticated" || - !snapshot.models.some((model) => !model.isCustom && !hasCursorModelCapabilities(model)) - ) { - return Effect.void; - } +export function hasUncapturedCursorModels(snapshot: Pick): boolean { + return snapshot.models.some((model) => !model.isCustom && !hasCursorModelCapabilities(model)); +} - return discoverCursorModelCapabilitiesViaAcp(settings, snapshot.models).pipe( - Effect.provideService(ChildProcessSpawner.ChildProcessSpawner, spawner), - Effect.flatMap((discoveredModels) => { - if (discoveredModels.length === 0) { - return Effect.void; - } +/** + * Background capability enrichment for a Cursor snapshot. + * + * Used by `CursorDriver` as the `makeManagedServerProvider.enrichSnapshot` + * hook: runs the slow ACP per-model capability probe, and republishes the + * snapshot through `publishSnapshot` when new capabilities arrive. Skips + * the probe when the provider is disabled, unauthenticated, or has no + * uncaptured models. Keeps `EMPTY_CAPABILITIES` and the `PROVIDER` literal + * private to this module. + */ +export const enrichCursorSnapshot = (input: { + readonly settings: CursorSettings; + readonly environment?: NodeJS.ProcessEnv; + readonly snapshot: ServerProvider; + readonly publishSnapshot: (snapshot: ServerProvider) => Effect.Effect; + readonly stampIdentity?: (snapshot: ServerProvider) => ServerProvider; +}): Effect.Effect => { + const { settings, snapshot, publishSnapshot } = input; + const stampIdentity = input.stampIdentity ?? ((value) => value); - return publishSnapshot({ - ...snapshot, - models: providerModelsFromSettings( - discoveredModels, - PROVIDER, - settings.customModels, - EMPTY_CAPABILITIES, - ), - }); - }), - Effect.catchCause((cause) => - Effect.logWarning("Cursor ACP background capability enrichment failed", { - models: snapshot.models.map((model) => model.slug), - cause: Cause.pretty(cause), - }).pipe(Effect.asVoid), + if ( + !settings.enabled || + snapshot.auth.status === "unauthenticated" || + !hasUncapturedCursorModels(snapshot) + ) { + return Effect.void; + } + + return discoverCursorModelCapabilitiesViaAcp(settings, snapshot.models, input.environment).pipe( + Effect.flatMap((discoveredModels) => { + if (discoveredModels.length === 0) { + return Effect.void; + } + return publishSnapshot( + stampIdentity({ + ...snapshot, + models: providerModelsFromSettings( + discoveredModels, + PROVIDER, + settings.customModels, + EMPTY_CAPABILITIES, ), - ); - }, - refreshInterval: CURSOR_REFRESH_INTERVAL, - }); - }), -); + }), + ); + }), + Effect.catchCause((cause) => + Effect.logWarning("Cursor ACP background capability enrichment failed", { + models: snapshot.models.map((model) => model.slug), + cause: Cause.pretty(cause), + }).pipe(Effect.asVoid), + ), + ); +}; diff --git a/apps/server/src/provider/Layers/GeminiCliAdapter.test.ts b/apps/server/src/provider/Layers/GeminiCliAdapter.test.ts index ce0bdfa3014..e4d05de30cc 100644 --- a/apps/server/src/provider/Layers/GeminiCliAdapter.test.ts +++ b/apps/server/src/provider/Layers/GeminiCliAdapter.test.ts @@ -3,6 +3,7 @@ import assert from "node:assert/strict"; import { ApprovalRequestId, EventId, + GenericProviderSettings, RuntimeItemId, ThreadId, TurnId, @@ -12,19 +13,28 @@ import { type ProviderTurnStartResult, type ProviderUserInputAnswers, } from "@t3tools/contracts"; -import { it, vi } from "@effect/vitest"; -import { Effect, Layer, Stream } from "effect"; +import { it } from "@effect/vitest"; +import { Context, Effect, Layer, Schema, Stream } from "effect"; +import { vi } from "vitest"; import { GeminiCliServerManager } from "../../geminiCliServerManager.ts"; -import { GeminiCliAdapter } from "../Services/GeminiCliAdapter.ts"; -import { makeGeminiCliAdapterLive } from "./GeminiCliAdapter.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; +import { + makeGeminiCliAdapter, + type GeminiCliAdapterShape, +} from "./GeminiCliAdapter.ts"; const asThreadId = (value: string): ThreadId => ThreadId.make(value); const asTurnId = (value: string): TurnId => TurnId.make(value); const asEventId = (value: string): EventId => EventId.make(value); const asItemId = (value: string): RuntimeItemId => RuntimeItemId.make(value); +// Test-local service tag mirroring the OpenCode/Claude adapter test pattern: +// the new factory returns a shape directly, so tests inject it through a +// throwaway Context.Service tag. +class GeminiCliAdapter extends Context.Service()( + "test/GeminiCliAdapter", +) {} + class FakeGeminiCliManager extends GeminiCliServerManager { public startSessionImpl = vi.fn(async (threadId: ThreadId): Promise => { const now = new Date().toISOString(); @@ -105,81 +115,103 @@ class FakeGeminiCliManager extends GeminiCliServerManager { } } -const manager = new FakeGeminiCliManager(); -const layer = it.layer( - makeGeminiCliAdapterLive({ manager }).pipe(Layer.provideMerge(ServerSettingsService.layerTest())), +const enabledConfig = Schema.decodeSync(GenericProviderSettings)({ enabled: true }); +const disabledConfig = Schema.decodeSync(GenericProviderSettings)({ enabled: false }); + +const makeAdapterLayer = (manager: FakeGeminiCliManager, config = enabledConfig) => + Layer.effect(GeminiCliAdapter, makeGeminiCliAdapter(config, { manager })); + +it.effect("delegates session startup to the manager", () => + Effect.gen(function* () { + const manager = new FakeGeminiCliManager(); + const adapter = yield* GeminiCliAdapter; + + const session = yield* adapter.startSession({ + threadId: asThreadId("thread-1"), + runtimeMode: "full-access", + }); + + assert.equal(session.provider, "geminiCli"); + assert.equal(manager.startSessionImpl.mock.calls[0]?.[0], asThreadId("thread-1")); + }).pipe(Effect.provide(makeAdapterLayer(new FakeGeminiCliManager())), Effect.scoped), ); -layer("GeminiCliAdapterLive", (it) => { - it.effect("delegates session startup to the manager", () => - Effect.gen(function* () { - manager.startSessionImpl.mockClear(); - const adapter = yield* GeminiCliAdapter; +it.effect("returns validation error when the provider is disabled", () => + Effect.gen(function* () { + const adapter = yield* GeminiCliAdapter; - const session = yield* adapter.startSession({ - threadId: asThreadId("thread-1"), + const result = yield* adapter + .startSession({ + threadId: asThreadId("thread-disabled"), runtimeMode: "full-access", - }); + }) + .pipe(Effect.result); + + assert.equal(result._tag, "Failure"); + if (result._tag !== "Failure") return; + assert.equal(result.failure._tag, "ProviderAdapterValidationError"); + }).pipe( + Effect.provide(makeAdapterLayer(new FakeGeminiCliManager(), disabledConfig)), + Effect.scoped, + ), +); - assert.equal(session.provider, "geminiCli"); - assert.equal(manager.startSessionImpl.mock.calls[0]?.[0], asThreadId("thread-1")); - }), - ); +it.effect("rejects attachments until Gemini CLI attachment wiring exists", () => + Effect.gen(function* () { + const adapter = yield* GeminiCliAdapter; + const result = yield* adapter + .sendTurn({ + threadId: asThreadId("thread-attachments"), + input: "hello", + attachments: [{ id: "attachment-1" }] as never, + }) + .pipe(Effect.result); + + assert.equal(result._tag, "Failure"); + if (result._tag !== "Failure") { + return; + } + assert.equal(result.failure._tag, "ProviderAdapterValidationError"); + }).pipe(Effect.provide(makeAdapterLayer(new FakeGeminiCliManager())), Effect.scoped), +); - it.effect("rejects attachments until Gemini CLI attachment wiring exists", () => - Effect.gen(function* () { - const adapter = yield* GeminiCliAdapter; - const result = yield* adapter - .sendTurn({ - threadId: asThreadId("thread-attachments"), - input: "hello", - attachments: [{ id: "attachment-1" }] as never, - }) - .pipe(Effect.result); - - assert.equal(result._tag, "Failure"); - if (result._tag !== "Failure") { - return; - } - assert.equal(result.failure._tag, "ProviderAdapterValidationError"); - }), - ); +it.effect("forwards manager runtime events through the adapter stream", () => + Effect.gen(function* () { + const manager = new FakeGeminiCliManager(); + const layer = makeAdapterLayer(manager); + const adapter = yield* GeminiCliAdapter; - it.effect("forwards manager runtime events through the adapter stream", () => - Effect.gen(function* () { - const adapter = yield* GeminiCliAdapter; - - const event = { - type: "content.delta", - eventId: asEventId("evt-gemini-delta"), - provider: "geminiCli", - createdAt: new Date().toISOString(), - threadId: asThreadId("thread-1"), - turnId: asTurnId("turn-1"), - itemId: asItemId("item-1"), - payload: { - streamKind: "assistant_text", - delta: "hello", - }, - } as unknown as ProviderRuntimeEvent; - - // Emit first — the event is buffered in the unbounded queue via the - // listener that was registered during layer construction. - manager.emit("event", event); - - // Now consume the head. Since the queue already has an item, this - // resolves immediately without a race condition. - const received = yield* Stream.runHead(adapter.streamEvents); - - assert.equal(received._tag, "Some"); - if (received._tag !== "Some") { - return; - } - assert.equal(received.value.type, "content.delta"); - if (received.value.type !== "content.delta") { - return; - } - assert.equal(received.value.payload.delta, "hello"); - }), - ); -}); + const event = { + type: "content.delta", + eventId: asEventId("evt-gemini-delta"), + provider: "geminiCli", + createdAt: new Date().toISOString(), + threadId: asThreadId("thread-1"), + turnId: asTurnId("turn-1"), + itemId: asItemId("item-1"), + payload: { + streamKind: "assistant_text", + delta: "hello", + }, + } as unknown as ProviderRuntimeEvent; + + // Event must be emitted AFTER the listener is attached (i.e. after the + // layer is built and the adapter is yielded); the buffered queue then + // holds the event so the subsequent `runHead` resolves immediately. + manager.emit("event", event); + + const received = yield* Stream.runHead(adapter.streamEvents); + + assert.equal(received._tag, "Some"); + if (received._tag !== "Some") { + return; + } + assert.equal(received.value.type, "content.delta"); + if (received.value.type !== "content.delta") { + return; + } + assert.equal(received.value.payload.delta, "hello"); + + void layer; // keep ref so eslint doesn't complain + }).pipe(Effect.provide(makeAdapterLayer(new FakeGeminiCliManager())), Effect.scoped), +); diff --git a/apps/server/src/provider/Layers/GeminiCliAdapter.ts b/apps/server/src/provider/Layers/GeminiCliAdapter.ts index 1c14bf9743b..a1804c80b56 100644 --- a/apps/server/src/provider/Layers/GeminiCliAdapter.ts +++ b/apps/server/src/provider/Layers/GeminiCliAdapter.ts @@ -1,145 +1,183 @@ -import { type ProviderRuntimeEvent } from "@t3tools/contracts"; -import { Effect, Layer, Queue, Stream } from "effect"; +/** + * GeminiCliAdapter — per-instance Gemini CLI provider adapter. + * + * Mirrors the Amp/Kilo pattern but exposes a `makeGeminiCliAdapter(config, options)` + * factory (instead of the old `Layer.effect` + service-tag setup) so the + * `GeminiCliDriver` can capture one adapter per instance. + * + * The underlying `GeminiCliServerManager` is a pure EventEmitter that owns + * all per-instance state — sessions, child processes, usage accumulator — + * so two `makeGeminiCliAdapter` calls with different configs produce two + * fully isolated adapters with no shared mutable state. + * + * @module provider/Layers/GeminiCliAdapter + */ +import { + type GenericProviderSettings, + ProviderDriverKind, + ProviderInstanceId, + type ProviderRuntimeEvent, +} from "@t3tools/contracts"; +import { Effect, Queue, Stream } from "effect"; import { GeminiCliServerManager } from "../../geminiCliServerManager.ts"; -import { ProviderAdapterProcessError, ProviderAdapterValidationError } from "../Errors.ts"; -import { getProviderCapabilities } from "../Services/ProviderAdapter.ts"; -import { GeminiCliAdapter, type GeminiCliAdapterShape } from "../Services/GeminiCliAdapter.ts"; +import { + ProviderAdapterValidationError, + type ProviderAdapterError, +} from "../Errors.ts"; +import type { ProviderAdapterShape } from "../Services/ProviderAdapter.ts"; +import type { EventNdjsonLogger } from "./EventNdjsonLogger.ts"; import { makeErrorHelpers } from "./ProviderAdapterUtils.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; -const PROVIDER = "geminiCli" as const; -const { toRequestError } = makeErrorHelpers(PROVIDER, { +const PROVIDER = ProviderDriverKind.make("geminiCli"); +const { toRequestError } = makeErrorHelpers("geminiCli", { sessionNotFoundHints: ["unknown gemini cli session", "unknown session"], }); -export interface GeminiCliAdapterLiveOptions { +export interface GeminiCliAdapterShape extends ProviderAdapterShape {} + +export interface GeminiCliAdapterOptions { + readonly instanceId?: ProviderInstanceId; + readonly environment?: NodeJS.ProcessEnv; + readonly nativeEventLogger?: EventNdjsonLogger; + /** + * Inject a pre-built manager. Test-only — the driver always constructs its + * own. Mutually exclusive with `makeManager`. + */ readonly manager?: GeminiCliServerManager; + /** + * Lazy manager factory used by tests that want a fresh manager but still + * want the adapter to call the constructor. + */ readonly makeManager?: () => GeminiCliServerManager; } -export function makeGeminiCliAdapterLive(options: GeminiCliAdapterLiveOptions = {}) { - return Layer.effect( - GeminiCliAdapter, - Effect.gen(function* () { - const manager = options.manager ?? options.makeManager?.() ?? new GeminiCliServerManager(); - const runtimeEventQueue = yield* Queue.unbounded(); - const serverSettingsService = yield* ServerSettingsService; +/** + * Build one Gemini CLI adapter bound to the given config and options. + * + * Closes over a private `GeminiCliServerManager` so that two instances of + * the same driver cannot reach into each other's session tables. Adds an + * Effect finalizer that detaches the manager event listener and stops every + * remaining session when the surrounding scope closes. + */ +export const makeGeminiCliAdapter = Effect.fn("makeGeminiCliAdapter")(function* ( + config: GenericProviderSettings, + options: GeminiCliAdapterOptions = {}, +) { + const _boundInstanceId = options.instanceId ?? ProviderInstanceId.make("geminiCli"); + const trimmedBinary = config.binaryPath.trim(); + const manager = + options.manager ?? + options.makeManager?.() ?? + new GeminiCliServerManager({ + ...(trimmedBinary.length > 0 ? { binaryPath: trimmedBinary } : {}), + }); + // Keep the manager's binary path in sync with the latest config — drivers + // recreate the adapter when settings change, but tests may pass a manager + // with an empty default that should pick up the config value. + if (trimmedBinary.length > 0 && !manager.binaryPath) { + manager.binaryPath = trimmedBinary; + } - yield* Effect.acquireRelease( - Effect.sync(() => { - const listener = (event: ProviderRuntimeEvent) => { - Effect.runFork(Queue.offer(runtimeEventQueue, event).pipe(Effect.asVoid)); - }; - manager.on("event", listener); - return listener; - }), - (listener) => - Effect.gen(function* () { - manager.off("event", listener); - manager.stopAll(); - yield* Queue.shutdown(runtimeEventQueue); - }), - ); + const runtimeEventQueue = yield* Queue.unbounded(); - const service = { - provider: PROVIDER, - capabilities: getProviderCapabilities(PROVIDER), - startSession: (input) => - Effect.gen(function* () { - const providerSettings = yield* serverSettingsService.getSettings.pipe( - Effect.map((s) => s.providers.geminiCli), - Effect.mapError( - (error) => - new ProviderAdapterProcessError({ - provider: PROVIDER, - threadId: input.threadId, - detail: error.message, - cause: error, - }), - ), - ); - if (!providerSettings.enabled) { - return yield* new ProviderAdapterValidationError({ - provider: PROVIDER, - operation: "startSession", - issue: "Gemini CLI provider is disabled in server settings.", - }); - } - manager.binaryPath = providerSettings.binaryPath.trim() || undefined; - return yield* Effect.tryPromise({ - try: () => manager.startSession(input), - catch: (cause) => toRequestError(input.threadId, "session/start", cause), - }); - }), - sendTurn: (input) => { - if ((input.attachments?.length ?? 0) > 0) { - return Effect.fail( - new ProviderAdapterValidationError({ - provider: PROVIDER, - operation: "sendTurn", - issue: "Gemini CLI attachments are not supported yet.", - }), - ); - } + yield* Effect.acquireRelease( + Effect.sync(() => { + const listener = (event: ProviderRuntimeEvent) => { + Effect.runFork(Queue.offer(runtimeEventQueue, event).pipe(Effect.asVoid)); + }; + manager.on("event", listener); + return listener; + }), + (listener) => + Effect.gen(function* () { + manager.off("event", listener); + manager.stopAll(); + yield* Queue.shutdown(runtimeEventQueue); + }), + ); - return Effect.tryPromise({ - try: () => manager.sendTurn(input), - catch: (cause) => toRequestError(input.threadId, "session/prompt", cause), + const adapter: GeminiCliAdapterShape = { + provider: PROVIDER, + capabilities: { sessionModelSwitch: "in-session" }, + startSession: (input) => + Effect.gen(function* () { + if (!config.enabled) { + return yield* new ProviderAdapterValidationError({ + provider: "geminiCli", + operation: "startSession", + issue: "Gemini CLI provider is disabled in server settings.", }); - }, - interruptTurn: (threadId) => - Effect.tryPromise({ - try: () => manager.interruptTurn(threadId), - catch: (cause) => toRequestError(threadId, "session/interrupt", cause), - }), - respondToRequest: (threadId, requestId, decision) => - Effect.tryPromise({ - try: () => manager.respondToRequest(threadId, requestId, decision), - catch: (cause) => toRequestError(threadId, "permission/reply", cause), - }), - respondToUserInput: (threadId, requestId, answers) => - Effect.tryPromise({ - try: () => manager.respondToUserInput(threadId, requestId, answers), - catch: (cause) => toRequestError(threadId, "question/reply", cause), + } + return yield* Effect.tryPromise({ + try: () => manager.startSession(input), + catch: (cause) => toRequestError(input.threadId, "session/start", cause), + }); + }), + sendTurn: (input) => { + if ((input.attachments?.length ?? 0) > 0) { + return Effect.fail( + new ProviderAdapterValidationError({ + provider: "geminiCli", + operation: "sendTurn", + issue: "Gemini CLI attachments are not supported yet.", }), - stopSession: (threadId) => - Effect.sync(() => { - manager.stopSession(threadId); - }), - listSessions: () => Effect.sync(() => manager.listSessions()), - hasSession: (threadId) => Effect.sync(() => manager.hasSession(threadId)), - readThread: (threadId) => - Effect.tryPromise({ - try: () => manager.readThread(threadId), - catch: (cause) => toRequestError(threadId, "session/messages", cause), - }), - rollbackThread: (threadId, numTurns) => { - if (!Number.isInteger(numTurns) || numTurns < 1) { - return Effect.fail( - new ProviderAdapterValidationError({ - provider: PROVIDER, - operation: "rollbackThread", - issue: "numTurns must be an integer >= 1.", - }), - ); - } + ); + } - return Effect.tryPromise({ - try: () => manager.rollbackThread(threadId), - catch: (cause) => toRequestError(threadId, "session/revert", cause), - }); - }, - stopAll: () => - Effect.sync(() => { - manager.stopAll(); + return Effect.tryPromise({ + try: () => manager.sendTurn(input), + catch: (cause) => toRequestError(input.threadId, "session/prompt", cause), + }); + }, + interruptTurn: (threadId) => + Effect.tryPromise({ + try: () => manager.interruptTurn(threadId), + catch: (cause) => toRequestError(threadId, "session/interrupt", cause), + }), + respondToRequest: (threadId, requestId, decision) => + Effect.tryPromise({ + try: () => manager.respondToRequest(threadId, requestId, decision), + catch: (cause) => toRequestError(threadId, "permission/reply", cause), + }), + respondToUserInput: (threadId, requestId, answers) => + Effect.tryPromise({ + try: () => manager.respondToUserInput(threadId, requestId, answers), + catch: (cause) => toRequestError(threadId, "question/reply", cause), + }), + stopSession: (threadId) => + Effect.sync(() => { + manager.stopSession(threadId); + }), + listSessions: () => Effect.sync(() => manager.listSessions()), + hasSession: (threadId) => Effect.sync(() => manager.hasSession(threadId)), + readThread: (threadId) => + Effect.tryPromise({ + try: () => manager.readThread(threadId), + catch: (cause) => toRequestError(threadId, "session/messages", cause), + }), + rollbackThread: (threadId, numTurns) => { + if (!Number.isInteger(numTurns) || numTurns < 1) { + return Effect.fail( + new ProviderAdapterValidationError({ + provider: "geminiCli", + operation: "rollbackThread", + issue: "numTurns must be an integer >= 1.", }), - streamEvents: Stream.fromQueue(runtimeEventQueue), - } satisfies GeminiCliAdapterShape; + ); + } - return service; - }), - ); -} + return Effect.tryPromise({ + try: () => manager.rollbackThread(threadId), + catch: (cause) => toRequestError(threadId, "session/revert", cause), + }); + }, + stopAll: () => + Effect.sync(() => { + manager.stopAll(); + }), + streamEvents: Stream.fromQueue(runtimeEventQueue), + }; -export const GeminiCliAdapterLive = makeGeminiCliAdapterLive(); + return adapter; +}); diff --git a/apps/server/src/provider/Layers/GeminiCliProvider.ts b/apps/server/src/provider/Layers/GeminiCliProvider.ts new file mode 100644 index 00000000000..30cc50405f9 --- /dev/null +++ b/apps/server/src/provider/Layers/GeminiCliProvider.ts @@ -0,0 +1,256 @@ +/** + * GeminiCliProvider — snapshot probe for the Gemini CLI provider. + * + * Mirrors `ClaudeProvider` / `OpenCodeProvider`: exposes + * `checkGeminiCliStatus(config, env)` which spawns `gemini --version` and + * returns a `ServerProviderDraft`, plus `makePendingGeminiCliProvider(config)` + * which returns the placeholder shape used before the first probe completes. + * + * Drivers stamp `instanceId` / `driver` / `displayName` onto the draft via + * `withInstanceIdentity` in `GeminiCliDriver`. + * + * @module provider/Layers/GeminiCliProvider + */ +import { + type GenericProviderSettings, + type ModelCapabilities, + ProviderDriverKind, + type ServerProviderModel, +} from "@t3tools/contracts"; +import { Effect, Option, Path, Result } from "effect"; +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; + +import { createModelCapabilities } from "@t3tools/shared/model"; + +import { + buildSelectOptionDescriptor, + buildServerProvider, + DEFAULT_TIMEOUT_MS, + detailFromResult, + isCommandMissingCause, + parseGenericCliVersion, + providerModelsFromSettings, + spawnAndCollect, + type ServerProviderDraft, +} from "../providerSnapshot.ts"; + +const PROVIDER = ProviderDriverKind.make("geminiCli"); +const GEMINI_PRESENTATION = { + displayName: "Gemini CLI", + showInteractionModeToggle: true, +} as const; + +/** + * Capabilities for known Gemini models. Includes a `thinkingBudget` selector + * (fork-only feature) — kept in this layer because the probe / adapter both + * need it. + */ +const THINKING_BUDGET_DESCRIPTOR = buildSelectOptionDescriptor({ + id: "thinkingBudget", + label: "Thinking Budget", + options: [ + { value: "auto", label: "Auto", isDefault: true }, + { value: "low", label: "Low" }, + { value: "medium", label: "Medium" }, + { value: "high", label: "High" }, + ], +}); + +const DEFAULT_GEMINI_MODEL_CAPABILITIES: ModelCapabilities = createModelCapabilities({ + optionDescriptors: [THINKING_BUDGET_DESCRIPTOR], +}); + +const BUILT_IN_MODELS: ReadonlyArray = [ + { + slug: "gemini-2.5-pro", + name: "Gemini 2.5 Pro", + isCustom: false, + capabilities: DEFAULT_GEMINI_MODEL_CAPABILITIES, + }, + { + slug: "gemini-2.5-flash", + name: "Gemini 2.5 Flash", + isCustom: false, + capabilities: DEFAULT_GEMINI_MODEL_CAPABILITIES, + }, + { + slug: "gemini-3-flash", + name: "Gemini 3 Flash", + isCustom: false, + capabilities: DEFAULT_GEMINI_MODEL_CAPABILITIES, + }, +]; + +/** Resolve the configured binary path, or fall back to `"gemini"`. */ +function resolveBinary(config: GenericProviderSettings): string { + const trimmed = config.binaryPath.trim(); + return trimmed.length > 0 ? trimmed : "gemini"; +} + +const runGeminiCommand = Effect.fn("runGeminiCommand")(function* ( + config: GenericProviderSettings, + args: ReadonlyArray, + environment: NodeJS.ProcessEnv = process.env, +) { + const binaryPath = resolveBinary(config); + const command = ChildProcess.make(binaryPath, [...args], { + env: environment, + shell: process.platform === "win32", + }); + return yield* spawnAndCollect(binaryPath, command); +}); + +export const checkGeminiCliStatus = Effect.fn("checkGeminiCliStatus")(function* ( + config: GenericProviderSettings, + environment: NodeJS.ProcessEnv = process.env, +): Effect.fn.Return< + ServerProviderDraft, + never, + ChildProcessSpawner.ChildProcessSpawner | Path.Path +> { + const checkedAt = new Date().toISOString(); + const allModels = providerModelsFromSettings( + BUILT_IN_MODELS, + PROVIDER, + config.customModels, + DEFAULT_GEMINI_MODEL_CAPABILITIES, + ); + + if (!config.enabled) { + return buildServerProvider({ + presentation: GEMINI_PRESENTATION, + enabled: false, + checkedAt, + models: allModels, + probe: { + installed: false, + version: null, + status: "warning", + auth: { status: "unknown" }, + message: "Gemini CLI is disabled in T3 Code settings.", + }, + }); + } + + const versionProbe = yield* runGeminiCommand(config, ["--version"], environment).pipe( + Effect.timeoutOption(DEFAULT_TIMEOUT_MS), + Effect.result, + ); + + if (Result.isFailure(versionProbe)) { + const error = versionProbe.failure; + return buildServerProvider({ + presentation: GEMINI_PRESENTATION, + enabled: config.enabled, + checkedAt, + models: allModels, + probe: { + installed: !isCommandMissingCause(error), + version: null, + status: "error", + auth: { status: "unknown" }, + message: isCommandMissingCause(error) + ? "Gemini CLI (`gemini`) is not installed or not on PATH." + : `Failed to execute Gemini CLI health check: ${error instanceof Error ? error.message : String(error)}.`, + }, + }); + } + + if (Option.isNone(versionProbe.success)) { + return buildServerProvider({ + presentation: GEMINI_PRESENTATION, + enabled: config.enabled, + checkedAt, + models: allModels, + probe: { + installed: true, + version: null, + status: "error", + auth: { status: "unknown" }, + message: "Gemini CLI is installed but failed to run. Timed out while running command.", + }, + }); + } + + const version = versionProbe.success.value; + const parsedVersion = parseGenericCliVersion(`${version.stdout}\n${version.stderr}`); + if (version.code !== 0) { + const detail = detailFromResult(version); + return buildServerProvider({ + presentation: GEMINI_PRESENTATION, + enabled: config.enabled, + checkedAt, + models: allModels, + probe: { + installed: true, + version: parsedVersion, + status: "error", + auth: { status: "unknown" }, + message: detail + ? `Gemini CLI is installed but failed to run. ${detail}` + : "Gemini CLI is installed but failed to run.", + }, + }); + } + + return buildServerProvider({ + presentation: GEMINI_PRESENTATION, + enabled: config.enabled, + checkedAt, + models: allModels, + probe: { + installed: true, + version: parsedVersion, + status: "ready", + auth: { + status: "authenticated", + type: "geminiCli", + label: "Gemini CLI", + }, + }, + }); +}); + +export const makePendingGeminiCliProvider = ( + config: GenericProviderSettings, +): ServerProviderDraft => { + const checkedAt = new Date().toISOString(); + const models = providerModelsFromSettings( + BUILT_IN_MODELS, + PROVIDER, + config.customModels, + DEFAULT_GEMINI_MODEL_CAPABILITIES, + ); + + if (!config.enabled) { + return buildServerProvider({ + presentation: GEMINI_PRESENTATION, + enabled: false, + checkedAt, + models, + probe: { + installed: false, + version: null, + status: "warning", + auth: { status: "unknown" }, + message: "Gemini CLI is disabled in T3 Code settings.", + }, + }); + } + + return buildServerProvider({ + presentation: GEMINI_PRESENTATION, + enabled: true, + checkedAt, + models, + probe: { + installed: false, + version: null, + status: "warning", + auth: { status: "unknown" }, + message: "Gemini CLI provider status has not been checked in this session yet.", + }, + }); +}; + +export { BUILT_IN_MODELS as GEMINI_BUILT_IN_MODELS, DEFAULT_GEMINI_MODEL_CAPABILITIES }; diff --git a/apps/server/src/provider/Layers/KiloAdapter.test.ts b/apps/server/src/provider/Layers/KiloAdapter.test.ts index 227fd36a04b..b7a811613fe 100644 --- a/apps/server/src/provider/Layers/KiloAdapter.test.ts +++ b/apps/server/src/provider/Layers/KiloAdapter.test.ts @@ -1,24 +1,20 @@ import assert from "node:assert/strict"; import { - ApprovalRequestId, EventId, RuntimeItemId, ThreadId, TurnId, - type ProviderApprovalDecision, type ProviderRuntimeEvent, type ProviderSession, type ProviderTurnStartResult, - type ProviderUserInputAnswers, } from "@t3tools/contracts"; import { it, vi } from "@effect/vitest"; -import { Effect, Layer, Stream } from "effect"; +import { Effect, Schema, Stream } from "effect"; +import { GenericProviderSettings } from "@t3tools/contracts"; import { KiloServerManager } from "../../kiloServerManager.ts"; -import { KiloAdapter } from "../Services/KiloAdapter.ts"; -import { makeKiloAdapterLive } from "./KiloAdapter.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; +import { makeKiloAdapter } from "./KiloAdapter.ts"; const asThreadId = (value: string): ThreadId => ThreadId.make(value); const asTurnId = (value: string): TurnId => TurnId.make(value); @@ -47,13 +43,6 @@ class FakeKiloManager extends KiloServerManager { }), ); - public interruptTurnImpl = vi.fn(async (): Promise => undefined); - public respondToRequestImpl = vi.fn(async (): Promise => undefined); - public respondToUserInputImpl = vi.fn(async (): Promise => undefined); - public readThreadImpl = vi.fn(async (threadId: ThreadId) => ({ threadId, turns: [] })); - public rollbackThreadImpl = vi.fn(async (threadId: ThreadId) => ({ threadId, turns: [] })); - public stopAllImpl = vi.fn(() => undefined); - override startSession(input: { threadId: ThreadId }): Promise { return this.startSessionImpl(input.threadId); } @@ -63,31 +52,15 @@ class FakeKiloManager extends KiloServerManager { } override interruptTurn(_threadId: ThreadId): Promise { - return this.interruptTurnImpl(); - } - - override respondToRequest( - _threadId: ThreadId, - _requestId: ApprovalRequestId, - _decision: ProviderApprovalDecision, - ): Promise { - return this.respondToRequestImpl(); - } - - override respondToUserInput( - _threadId: ThreadId, - _requestId: ApprovalRequestId, - _answers: ProviderUserInputAnswers, - ): Promise { - return this.respondToUserInputImpl(); + return Promise.resolve(); } override readThread(threadId: ThreadId) { - return this.readThreadImpl(threadId); + return Promise.resolve({ threadId, turns: [] }); } override rollbackThread(threadId: ThreadId) { - return this.rollbackThreadImpl(threadId); + return Promise.resolve({ threadId, turns: [] }); } override stopSession(_threadId: ThreadId): void {} @@ -100,21 +73,16 @@ class FakeKiloManager extends KiloServerManager { return false; } - override stopAll(): void { - this.stopAllImpl(); - } + override stopAll(): void {} } -const manager = new FakeKiloManager(); -const layer = it.layer( - makeKiloAdapterLive({ manager }).pipe(Layer.provideMerge(ServerSettingsService.layerTest())), -); +const enabledSettings = Schema.decodeSync(GenericProviderSettings)({ enabled: true }); -layer("KiloAdapterLive", (it) => { - it.effect("delegates session startup to the manager", () => +it.effect("makeKiloAdapter delegates session startup to the manager", () => + Effect.scoped( Effect.gen(function* () { - manager.startSessionImpl.mockClear(); - const adapter = yield* KiloAdapter; + const manager = new FakeKiloManager(); + const adapter = yield* makeKiloAdapter(enabledSettings, { manager }); const session = yield* adapter.startSession({ threadId: asThreadId("thread-1"), @@ -124,11 +92,14 @@ layer("KiloAdapterLive", (it) => { assert.equal(session.provider, "kilo"); assert.equal(manager.startSessionImpl.mock.calls[0]?.[0], asThreadId("thread-1")); }), - ); + ), +); - it.effect("rejects attachments until Kilo attachment wiring exists", () => +it.effect("makeKiloAdapter rejects attachments until Kilo wiring exists", () => + Effect.scoped( Effect.gen(function* () { - const adapter = yield* KiloAdapter; + const adapter = yield* makeKiloAdapter(enabledSettings, { manager: new FakeKiloManager() }); + const result = yield* adapter .sendTurn({ threadId: asThreadId("thread-attachments"), @@ -143,11 +114,14 @@ layer("KiloAdapterLive", (it) => { } assert.equal(result.failure._tag, "ProviderAdapterValidationError"); }), - ); + ), +); - it.effect("forwards manager runtime events through the adapter stream", () => +it.effect("makeKiloAdapter forwards manager runtime events through the stream", () => + Effect.scoped( Effect.gen(function* () { - const adapter = yield* KiloAdapter; + const manager = new FakeKiloManager(); + const adapter = yield* makeKiloAdapter(enabledSettings, { manager }); const event = { type: "content.delta", @@ -163,12 +137,8 @@ layer("KiloAdapterLive", (it) => { }, } as unknown as ProviderRuntimeEvent; - // Emit first — the event is buffered in the unbounded queue via the - // listener that was registered during layer construction. manager.emit("event", event); - // Now consume the head. Since the queue already has an item, this - // resolves immediately without a race condition. const received = yield* Stream.runHead(adapter.streamEvents); assert.equal(received._tag, "Some"); @@ -181,5 +151,24 @@ layer("KiloAdapterLive", (it) => { } assert.equal(received.value.payload.delta, "hello"); }), - ); -}); + ), +); + +it.effect("makeKiloAdapter rejects startSession when disabled", () => + Effect.scoped( + Effect.gen(function* () { + const disabled = Schema.decodeSync(GenericProviderSettings)({ enabled: false }); + const adapter = yield* makeKiloAdapter(disabled, { manager: new FakeKiloManager() }); + + const result = yield* adapter + .startSession({ threadId: asThreadId("thread-disabled"), runtimeMode: "full-access" }) + .pipe(Effect.result); + + assert.equal(result._tag, "Failure"); + if (result._tag !== "Failure") { + return; + } + assert.equal(result.failure._tag, "ProviderAdapterValidationError"); + }), + ), +); diff --git a/apps/server/src/provider/Layers/KiloAdapter.ts b/apps/server/src/provider/Layers/KiloAdapter.ts index 88564676879..4a7e417a639 100644 --- a/apps/server/src/provider/Layers/KiloAdapter.ts +++ b/apps/server/src/provider/Layers/KiloAdapter.ts @@ -1,145 +1,170 @@ -import { type ProviderRuntimeEvent } from "@t3tools/contracts"; -import { Effect, Layer, Queue, Stream } from "effect"; +/** + * KiloAdapter — per-instance Kilo provider adapter. + * + * The adapter owns a fresh `KiloServerManager` per `ProviderInstance`, so two + * Kilo instances never share session state, server processes, or runtime + * event queues. The factory `makeKiloAdapter` is invoked from + * {@link ../Drivers/KiloDriver} inside the registry's per-instance scope; the + * scope finalizer registered here calls `manager.stopAll()` and shuts down + * the runtime event queue. + * + * @module provider/Layers/KiloAdapter + */ +import { + ProviderDriverKind, + ProviderInstanceId, + type ProviderRuntimeEvent, +} from "@t3tools/contracts"; +import { Effect, Queue, Stream } from "effect"; import { KiloServerManager } from "../../kiloServerManager.ts"; import type { KiloSessionStartInput } from "../../kilo/types.ts"; -import { ProviderAdapterProcessError, ProviderAdapterValidationError } from "../Errors.ts"; -import { getProviderCapabilities } from "../Services/ProviderAdapter.ts"; -import { KiloAdapter, type KiloAdapterShape } from "../Services/KiloAdapter.ts"; +import type { OpenCodeAdapterShape } from "../Services/OpenCodeAdapter.ts"; +import { + ProviderAdapterRequestError, + ProviderAdapterValidationError, +} from "../Errors.ts"; import { makeErrorHelpers } from "./ProviderAdapterUtils.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; +import type { KiloSettings } from "./KiloProvider.ts"; -const PROVIDER = "kilo" as const; -const { toRequestError } = makeErrorHelpers(PROVIDER); +const PROVIDER = ProviderDriverKind.make("kilo"); +const { toRequestError } = makeErrorHelpers("kilo"); -export interface KiloAdapterLiveOptions { +export interface KiloAdapterOptions { + readonly instanceId?: ProviderInstanceId; + readonly environment?: NodeJS.ProcessEnv; + /** Optional injection point used by tests to swap in a fake manager. */ readonly manager?: KiloServerManager; readonly makeManager?: () => KiloServerManager; } -export function makeKiloAdapterLive(options: KiloAdapterLiveOptions = {}) { - return Layer.effect( - KiloAdapter, - Effect.gen(function* () { - const manager = options.manager ?? options.makeManager?.() ?? new KiloServerManager(); - const runtimeEventQueue = yield* Queue.unbounded(); - const serverSettingsService = yield* ServerSettingsService; +/** + * KiloAdapterShape — per-instance Kilo adapter contract. Reuses the + * OpenCode adapter shape (Kilo is API-compatible) and is keyed by the + * `kilo` driver kind. + */ +export interface KiloAdapterShape extends OpenCodeAdapterShape {} - yield* Effect.acquireRelease( - Effect.sync(() => { - const listener = (event: ProviderRuntimeEvent) => { - Effect.runFork(Queue.offer(runtimeEventQueue, event).pipe(Effect.asVoid)); - }; - manager.on("event", listener); - return listener; - }), - (listener) => - Effect.gen(function* () { - manager.off("event", listener); - manager.stopAll(); - yield* Queue.shutdown(runtimeEventQueue); - }), - ); +export const makeKiloAdapter = Effect.fn("makeKiloAdapter")(function* ( + kiloSettings: KiloSettings, + options?: KiloAdapterOptions, +) { + const _instanceId = options?.instanceId ?? ProviderInstanceId.make("kilo"); + void _instanceId; // reserved for future per-instance tagging + const manager = + options?.manager ?? options?.makeManager?.() ?? new KiloServerManager(); + const runtimeEventQueue = yield* Queue.unbounded(); - const service = { - provider: PROVIDER, - capabilities: getProviderCapabilities(PROVIDER), - startSession: (input) => - Effect.gen(function* () { - const providerSettings = yield* serverSettingsService.getSettings.pipe( - Effect.map((s) => s.providers.kilo), - Effect.mapError( - (error) => - new ProviderAdapterProcessError({ - provider: PROVIDER, - threadId: input.threadId, - detail: error.message, - cause: error, - }), - ), - ); - if (!providerSettings.enabled) { - return yield* new ProviderAdapterValidationError({ - provider: PROVIDER, - operation: "startSession", - issue: "Kilo provider is disabled in server settings.", - }); - } - const binaryPath = providerSettings.binaryPath.trim() || "kilo"; - return yield* Effect.tryPromise({ - try: () => - manager.startSession({ ...input, kilo: { binaryPath } } as KiloSessionStartInput), - catch: (cause) => toRequestError(input.threadId, "session/start", cause), - }); - }), - sendTurn: (input) => { - if ((input.attachments?.length ?? 0) > 0) { - return Effect.fail( - new ProviderAdapterValidationError({ - provider: PROVIDER, - operation: "sendTurn", - issue: "Kilo attachments are not wired yet.", - }), - ); - } + // Acquire the manager event listener at scope start, release at scope close. + // Closing the registry-owned scope tears down sessions, the spawned Kilo + // server child process, and the runtime event queue exactly once. + yield* Effect.acquireRelease( + Effect.sync(() => { + const listener = (event: ProviderRuntimeEvent) => { + Effect.runFork(Queue.offer(runtimeEventQueue, event).pipe(Effect.asVoid)); + }; + manager.on("event", listener); + return listener; + }), + (listener) => + Effect.gen(function* () { + manager.off("event", listener); + manager.stopAll(); + yield* Queue.shutdown(runtimeEventQueue); + }), + ); - return Effect.tryPromise({ - try: () => manager.sendTurn(input), - catch: (cause) => toRequestError(input.threadId, "session/prompt_async", cause), + const resolveBinaryPath = (): string => kiloSettings.binaryPath.trim() || "kilo"; + + const service: KiloAdapterShape = { + provider: PROVIDER, + capabilities: { sessionModelSwitch: "in-session" }, + startSession: (input) => + Effect.gen(function* () { + if (!kiloSettings.enabled) { + return yield* new ProviderAdapterValidationError({ + provider: PROVIDER, + operation: "startSession", + issue: "Kilo provider is disabled in server settings.", }); - }, - interruptTurn: (threadId) => - Effect.tryPromise({ - try: () => manager.interruptTurn(threadId), - catch: (cause) => toRequestError(threadId, "session/abort", cause), - }), - respondToRequest: (threadId, requestId, decision) => - Effect.tryPromise({ - try: () => manager.respondToRequest(threadId, requestId, decision), - catch: (cause) => toRequestError(threadId, "permission/reply", cause), - }), - respondToUserInput: (threadId, requestId, answers) => - Effect.tryPromise({ - try: () => manager.respondToUserInput(threadId, requestId, answers), - catch: (cause) => toRequestError(threadId, "question/reply", cause), + } + const binaryPath = resolveBinaryPath(); + return yield* Effect.tryPromise({ + try: () => + manager.startSession({ + ...input, + kilo: { binaryPath }, + } as KiloSessionStartInput), + catch: (cause) => toRequestError(input.threadId, "session/start", cause), + }); + }), + sendTurn: (input) => { + if ((input.attachments?.length ?? 0) > 0) { + return Effect.fail( + new ProviderAdapterValidationError({ + provider: PROVIDER, + operation: "sendTurn", + issue: "Kilo attachments are not wired yet.", }), - stopSession: (threadId) => - Effect.sync(() => { - manager.stopSession(threadId); - }), - listSessions: () => Effect.sync(() => manager.listSessions()), - hasSession: (threadId) => Effect.sync(() => manager.hasSession(threadId)), - readThread: (threadId) => - Effect.tryPromise({ - try: () => manager.readThread(threadId), - catch: (cause) => toRequestError(threadId, "session/messages", cause), - }), - rollbackThread: (threadId, numTurns) => { - if (!Number.isInteger(numTurns) || numTurns < 1) { - return Effect.fail( - new ProviderAdapterValidationError({ - provider: PROVIDER, - operation: "rollbackThread", - issue: "numTurns must be an integer >= 1.", - }), - ); - } + ); + } - return Effect.tryPromise({ - try: () => manager.rollbackThread(threadId), - catch: (cause) => toRequestError(threadId, "session/revert", cause), - }); - }, - stopAll: () => - Effect.sync(() => { - manager.stopAll(); + return Effect.tryPromise({ + try: () => manager.sendTurn(input), + catch: (cause) => toRequestError(input.threadId, "session/prompt_async", cause), + }); + }, + interruptTurn: (threadId) => + Effect.tryPromise({ + try: () => manager.interruptTurn(threadId), + catch: (cause) => toRequestError(threadId, "session/abort", cause), + }), + respondToRequest: (threadId, requestId, decision) => + Effect.tryPromise({ + try: () => manager.respondToRequest(threadId, requestId, decision), + catch: (cause) => toRequestError(threadId, "permission/reply", cause), + }), + respondToUserInput: (threadId, requestId, answers) => + Effect.tryPromise({ + try: () => manager.respondToUserInput(threadId, requestId, answers), + catch: (cause) => toRequestError(threadId, "question/reply", cause), + }), + stopSession: (threadId) => + Effect.sync(() => { + manager.stopSession(threadId); + }), + listSessions: () => Effect.sync(() => manager.listSessions()), + hasSession: (threadId) => Effect.sync(() => manager.hasSession(threadId)), + readThread: (threadId) => + Effect.tryPromise({ + try: () => manager.readThread(threadId), + catch: (cause) => toRequestError(threadId, "session/messages", cause), + }), + rollbackThread: (threadId, numTurns) => { + if (!Number.isInteger(numTurns) || numTurns < 1) { + return Effect.fail( + new ProviderAdapterValidationError({ + provider: PROVIDER, + operation: "rollbackThread", + issue: "numTurns must be an integer >= 1.", }), - streamEvents: Stream.fromQueue(runtimeEventQueue), - } satisfies KiloAdapterShape; + ); + } - return service; - }), - ); -} + return Effect.tryPromise({ + try: () => manager.rollbackThread(threadId, numTurns), + catch: (cause) => toRequestError(threadId, "session/revert", cause), + }); + }, + stopAll: () => + Effect.sync(() => { + manager.stopAll(); + }), + streamEvents: Stream.fromQueue(runtimeEventQueue), + }; + + return service; +}); -export const KiloAdapterLive = makeKiloAdapterLive(); +// Re-export so callers using the public symbol name still resolve. +export { ProviderAdapterRequestError }; diff --git a/apps/server/src/provider/Layers/KiloProvider.ts b/apps/server/src/provider/Layers/KiloProvider.ts new file mode 100644 index 00000000000..768297b54c3 --- /dev/null +++ b/apps/server/src/provider/Layers/KiloProvider.ts @@ -0,0 +1,215 @@ +/** + * KiloProvider — snapshot probe for the Kilo Code provider. + * + * Kilo is a fork of OpenCode and exposes the same HTTP+SSE API. The probe + * is per-instance: it uses the per-driver `KiloSettings` (currently + * `GenericProviderSettings`) to resolve `binaryPath`, then runs + * `kilo --version` to confirm the binary is installed. Authentication is + * not validated here — the Kilo server handles that lazily on first + * session. Custom models are surfaced via `customModels`. + * + * Two helpers are exported: + * - `checkKiloProviderStatus` — full probe used by the driver's + * `makeManagedServerProvider` refresh. + * - `makePendingKiloProvider` — synchronous "checking…" snapshot that + * `makeManagedServerProvider` publishes as the initial value. + * + * @module provider/Layers/KiloProvider + */ +import { + ProviderDriverKind, + type GenericProviderSettings, + type ModelCapabilities, +} from "@t3tools/contracts"; +import { Effect, Option, Path, Result } from "effect"; +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; +import { createModelCapabilities } from "@t3tools/shared/model"; + +import { + buildServerProvider, + DEFAULT_TIMEOUT_MS, + detailFromResult, + isCommandMissingCause, + parseGenericCliVersion, + providerModelsFromSettings, + spawnAndCollect, + type ServerProviderDraft, +} from "../providerSnapshot.ts"; + +const PROVIDER = ProviderDriverKind.make("kilo"); +const KILO_PRESENTATION = { + displayName: "Kilo Code", + showInteractionModeToggle: true, +} as const; + +const DEFAULT_KILO_MODEL_CAPABILITIES: ModelCapabilities = createModelCapabilities({ + optionDescriptors: [], +}); + +export type KiloSettings = GenericProviderSettings; + +const runKiloCommand = Effect.fn("runKiloCommand")(function* ( + kiloSettings: KiloSettings, + args: ReadonlyArray, + environment: NodeJS.ProcessEnv = process.env, +) { + const binaryPath = kiloSettings.binaryPath.trim() || "kilo"; + const command = ChildProcess.make(binaryPath, [...args], { + env: environment, + shell: process.platform === "win32", + }); + return yield* spawnAndCollect(binaryPath, command); +}); + +export const checkKiloProviderStatus = Effect.fn("checkKiloProviderStatus")(function* ( + kiloSettings: KiloSettings, + environment: NodeJS.ProcessEnv = process.env, +): Effect.fn.Return< + ServerProviderDraft, + never, + ChildProcessSpawner.ChildProcessSpawner | Path.Path +> { + const checkedAt = new Date().toISOString(); + const allModels = providerModelsFromSettings( + [], + PROVIDER, + kiloSettings.customModels, + DEFAULT_KILO_MODEL_CAPABILITIES, + ); + + if (!kiloSettings.enabled) { + return buildServerProvider({ + presentation: KILO_PRESENTATION, + enabled: false, + checkedAt, + models: allModels, + probe: { + installed: false, + version: null, + status: "warning", + auth: { status: "unknown" }, + message: "Kilo is disabled in T3 Code settings.", + }, + }); + } + + const versionProbe = yield* runKiloCommand(kiloSettings, ["--version"], environment).pipe( + Effect.timeoutOption(DEFAULT_TIMEOUT_MS), + Effect.result, + ); + + if (Result.isFailure(versionProbe)) { + const error = versionProbe.failure; + return buildServerProvider({ + presentation: KILO_PRESENTATION, + enabled: kiloSettings.enabled, + checkedAt, + models: allModels, + probe: { + installed: !isCommandMissingCause(error), + version: null, + status: "error", + auth: { status: "unknown" }, + message: isCommandMissingCause(error) + ? "Kilo CLI (`kilo`) is not installed or not on PATH." + : `Failed to execute Kilo CLI health check: ${error instanceof Error ? error.message : String(error)}.`, + }, + }); + } + + if (Option.isNone(versionProbe.success)) { + return buildServerProvider({ + presentation: KILO_PRESENTATION, + enabled: kiloSettings.enabled, + checkedAt, + models: allModels, + probe: { + installed: true, + version: null, + status: "error", + auth: { status: "unknown" }, + message: "Kilo CLI is installed but failed to run. Timed out while running command.", + }, + }); + } + + const version = versionProbe.success.value; + const parsedVersion = parseGenericCliVersion(`${version.stdout}\n${version.stderr}`); + if (version.code !== 0) { + const detail = detailFromResult(version); + return buildServerProvider({ + presentation: KILO_PRESENTATION, + enabled: kiloSettings.enabled, + checkedAt, + models: allModels, + probe: { + installed: true, + version: parsedVersion, + status: "error", + auth: { status: "unknown" }, + message: detail + ? `Kilo CLI is installed but failed to run. ${detail}` + : "Kilo CLI is installed but failed to run.", + }, + }); + } + + return buildServerProvider({ + presentation: KILO_PRESENTATION, + enabled: true, + checkedAt, + models: allModels, + probe: { + installed: true, + version: parsedVersion, + status: "ready", + auth: { + status: "authenticated", + type: "kilo", + }, + message: parsedVersion + ? `Kilo v${parsedVersion} detected.` + : "Kilo CLI detected.", + }, + }); +}); + +export const makePendingKiloProvider = (kiloSettings: KiloSettings): ServerProviderDraft => { + const checkedAt = new Date().toISOString(); + const models = providerModelsFromSettings( + [], + PROVIDER, + kiloSettings.customModels, + DEFAULT_KILO_MODEL_CAPABILITIES, + ); + + if (!kiloSettings.enabled) { + return buildServerProvider({ + presentation: KILO_PRESENTATION, + enabled: false, + checkedAt, + models, + probe: { + installed: false, + version: null, + status: "warning", + auth: { status: "unknown" }, + message: "Kilo is disabled in T3 Code settings.", + }, + }); + } + + return buildServerProvider({ + presentation: KILO_PRESENTATION, + enabled: true, + checkedAt, + models, + probe: { + installed: false, + version: null, + status: "warning", + auth: { status: "unknown" }, + message: "Kilo provider status has not been checked in this session yet.", + }, + }); +}; diff --git a/apps/server/src/provider/Layers/OpenCodeAdapter.test.ts b/apps/server/src/provider/Layers/OpenCodeAdapter.test.ts index 9a391b55394..370e5c028ff 100644 --- a/apps/server/src/provider/Layers/OpenCodeAdapter.test.ts +++ b/apps/server/src/provider/Layers/OpenCodeAdapter.test.ts @@ -2,14 +2,20 @@ import assert from "node:assert/strict"; import * as NodeServices from "@effect/platform-node/NodeServices"; import { it } from "@effect/vitest"; -import { Effect, Layer, Option } from "effect"; +import { Context, Effect, Exit, Fiber, Layer, Option, Schema, Scope, Stream } from "effect"; import { beforeEach } from "vitest"; -import { ThreadId } from "@t3tools/contracts"; +import { + OpenCodeSettings, + ProviderDriverKind, + ProviderInstanceId, + ThreadId, +} from "@t3tools/contracts"; +import { createModelSelection } from "@t3tools/shared/model"; import { ServerConfig } from "../../config.ts"; import { ServerSettingsService } from "../../serverSettings.ts"; import { ProviderSessionDirectory } from "../Services/ProviderSessionDirectory.ts"; -import { OpenCodeAdapter } from "../Services/OpenCodeAdapter.ts"; +import type { OpenCodeAdapterShape } from "../Services/OpenCodeAdapter.ts"; import { OpenCodeRuntime, OpenCodeRuntimeError, @@ -17,10 +23,15 @@ import { } from "../opencodeRuntime.ts"; import { appendOpenCodeAssistantTextDelta, - makeOpenCodeAdapterLive, + makeOpenCodeAdapter, mergeOpenCodeAssistantText, } from "./OpenCodeAdapter.ts"; +// Test-local service tag so the rest of the file can keep using `yield* OpenCodeAdapter`. +class OpenCodeAdapter extends Context.Service()( + "test/OpenCodeAdapter", +) {} + const asThreadId = (value: string): ThreadId => ThreadId.make(value); type MessageEntry = { @@ -39,6 +50,7 @@ const runtimeMock = { abortCalls: [] as string[], closeCalls: [] as string[], revertCalls: [] as Array<{ sessionID: string; messageID?: string }>, + promptCalls: [] as Array, promptAsyncError: null as Error | null, closeError: null as Error | null, messages: [] as MessageEntry[], @@ -51,6 +63,7 @@ const runtimeMock = { this.state.abortCalls.length = 0; this.state.closeCalls.length = 0; this.state.revertCalls.length = 0; + this.state.promptCalls.length = 0; this.state.promptAsyncError = null; this.state.closeError = null; this.state.messages = []; @@ -111,7 +124,8 @@ const OpenCodeRuntimeTestDouble: OpenCodeRuntimeShape = { abort: async ({ sessionID }: { sessionID: string }) => { runtimeMock.state.abortCalls.push(sessionID); }, - promptAsync: async () => { + promptAsync: async (input: unknown) => { + runtimeMock.state.promptCalls.push(input); if (runtimeMock.state.promptAsyncError) { throw runtimeMock.state.promptAsyncError; } @@ -165,7 +179,24 @@ const providerSessionDirectoryTestLayer = Layer.succeed(ProviderSessionDirectory listBindings: () => Effect.succeed([]), }); -const OpenCodeAdapterTestLayer = makeOpenCodeAdapterLive().pipe( +// The adapter now receives its settings as a plain argument (the old design +// read from `ServerSettingsService` internally). The test-only +// `ServerSettingsService` below is still kept because other dependencies in +// the layer graph reach for it — but the routing values the assertions +// probe (serverUrl, serverPassword) must be threaded directly through the +// decoded `OpenCodeSettings`. +const openCodeAdapterTestSettings = Schema.decodeSync(OpenCodeSettings)({ + binaryPath: "fake-opencode", + serverUrl: "http://127.0.0.1:9999", + serverPassword: "secret-password", +}); + +const OpenCodeAdapterTestLayer = Layer.effect( + OpenCodeAdapter, + Effect.gen(function* () { + return yield* makeOpenCodeAdapter(openCodeAdapterTestSettings); + }), +).pipe( Layer.provideMerge(Layer.succeed(OpenCodeRuntime, OpenCodeRuntimeTestDouble)), Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), Layer.provideMerge( @@ -196,7 +227,7 @@ it.layer(OpenCodeAdapterTestLayer)("OpenCodeAdapterLive", (it) => { const adapter = yield* OpenCodeAdapter; const session = yield* adapter.startSession({ - provider: "opencode", + provider: ProviderDriverKind.make("opencode"), threadId: asThreadId("thread-opencode"), runtimeMode: "full-access", }); @@ -215,7 +246,7 @@ it.layer(OpenCodeAdapterTestLayer)("OpenCodeAdapterLive", (it) => { Effect.gen(function* () { const adapter = yield* OpenCodeAdapter; yield* adapter.startSession({ - provider: "opencode", + provider: ProviderDriverKind.make("opencode"), threadId: asThreadId("thread-opencode"), runtimeMode: "full-access", }); @@ -230,16 +261,42 @@ it.layer(OpenCodeAdapterTestLayer)("OpenCodeAdapterLive", (it) => { }), ); + it.effect("emits one session.exited event when stopping a session", () => + Effect.gen(function* () { + const adapter = yield* OpenCodeAdapter; + const threadId = asThreadId("thread-opencode-stop-event"); + const eventsFiber = yield* adapter.streamEvents.pipe( + Stream.filter((event) => event.threadId === threadId), + Stream.take(3), + Stream.runCollect, + Effect.forkChild, + ); + + yield* adapter.startSession({ + provider: ProviderDriverKind.make("opencode"), + threadId, + runtimeMode: "full-access", + }); + yield* adapter.stopSession(threadId); + + const events = Array.from(yield* Fiber.join(eventsFiber).pipe(Effect.timeout("1 second"))); + assert.deepEqual( + events.map((event) => event.type), + ["session.started", "thread.started", "session.exited"], + ); + }), + ); + it.effect("clears session state even when cleanup finalizers throw", () => Effect.gen(function* () { const adapter = yield* OpenCodeAdapter; yield* adapter.startSession({ - provider: "opencode", + provider: ProviderDriverKind.make("opencode"), threadId: asThreadId("thread-stop-all-a"), runtimeMode: "full-access", }); yield* adapter.startSession({ - provider: "opencode", + provider: ProviderDriverKind.make("opencode"), threadId: asThreadId("thread-stop-all-b"), runtimeMode: "full-access", }); @@ -261,11 +318,44 @@ it.layer(OpenCodeAdapterTestLayer)("OpenCodeAdapterLive", (it) => { }), ); + it.effect("completes streamEvents when the adapter scope closes", () => + Effect.gen(function* () { + const scope = yield* Scope.make("sequential"); + let scopeClosed = false; + + try { + const adapterLayer = Layer.effect( + OpenCodeAdapter, + makeOpenCodeAdapter(openCodeAdapterTestSettings), + ).pipe( + Layer.provideMerge(Layer.succeed(OpenCodeRuntime, OpenCodeRuntimeTestDouble)), + Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), + Layer.provideMerge(ServerSettingsService.layerTest()), + Layer.provideMerge(providerSessionDirectoryTestLayer), + Layer.provideMerge(NodeServices.layer), + ); + const context = yield* Layer.buildWithScope(adapterLayer, scope); + const adapter = yield* Effect.service(OpenCodeAdapter).pipe(Effect.provide(context)); + const eventsFiber = yield* adapter.streamEvents.pipe(Stream.runCollect, Effect.forkChild); + + yield* Scope.close(scope, Exit.void); + scopeClosed = true; + + const exit = yield* Fiber.await(eventsFiber).pipe(Effect.timeout("1 second")); + assert.equal(Exit.hasInterrupts(exit), true); + } finally { + if (!scopeClosed) { + yield* Scope.close(scope, Exit.void).pipe(Effect.ignore); + } + } + }), + ); + it.effect("rolls back session state when sendTurn fails before OpenCode accepts the prompt", () => Effect.gen(function* () { const adapter = yield* OpenCodeAdapter; yield* adapter.startSession({ - provider: "opencode", + provider: ProviderDriverKind.make("opencode"), threadId: asThreadId("thread-send-turn-failure"), runtimeMode: "full-access", }); @@ -276,7 +366,7 @@ it.layer(OpenCodeAdapterTestLayer)("OpenCodeAdapterLive", (it) => { threadId: asThreadId("thread-send-turn-failure"), input: "Fix it", modelSelection: { - provider: "opencode", + instanceId: ProviderInstanceId.make("opencode"), model: "openai/gpt-5", }, }) @@ -299,12 +389,158 @@ it.layer(OpenCodeAdapterTestLayer)("OpenCodeAdapterLive", (it) => { }), ); + it.effect("passes agent and variant options for the adapter's bound custom instance id", () => { + const customInstanceId = ProviderInstanceId.make("opencode_zen"); + const adapterLayer = Layer.effect( + OpenCodeAdapter, + Effect.gen(function* () { + return yield* makeOpenCodeAdapter(openCodeAdapterTestSettings, { + instanceId: customInstanceId, + }); + }), + ).pipe( + Layer.provideMerge(Layer.succeed(OpenCodeRuntime, OpenCodeRuntimeTestDouble)), + Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), + Layer.provideMerge(ServerSettingsService.layerTest()), + Layer.provideMerge(providerSessionDirectoryTestLayer), + Layer.provideMerge(NodeServices.layer), + ); + + return Effect.gen(function* () { + const adapter = yield* OpenCodeAdapter; + yield* adapter.startSession({ + provider: ProviderDriverKind.make("opencode"), + threadId: asThreadId("thread-custom-instance"), + runtimeMode: "full-access", + }); + + yield* adapter.sendTurn({ + threadId: asThreadId("thread-custom-instance"), + input: "Fix it", + modelSelection: createModelSelection( + ProviderInstanceId.make("opencode_zen"), + "anthropic/claude-sonnet-4-5", + [ + { id: "agent", value: "github-copilot" }, + { id: "variant", value: "high" }, + ], + ), + }); + + assert.deepEqual(runtimeMock.state.promptCalls.at(-1), { + sessionID: "http://127.0.0.1:9999/session", + model: { + providerID: "anthropic", + modelID: "claude-sonnet-4-5", + }, + agent: "github-copilot", + variant: "high", + parts: [{ type: "text", text: "Fix it" }], + }); + }).pipe(Effect.provide(adapterLayer)); + }); + + it.effect("uses the bound custom instance id for fallback sendTurn model selection", () => { + const customInstanceId = ProviderInstanceId.make("opencode_zen"); + const adapterLayer = Layer.effect( + OpenCodeAdapter, + Effect.gen(function* () { + return yield* makeOpenCodeAdapter(openCodeAdapterTestSettings, { + instanceId: customInstanceId, + }); + }), + ).pipe( + Layer.provideMerge(Layer.succeed(OpenCodeRuntime, OpenCodeRuntimeTestDouble)), + Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), + Layer.provideMerge(ServerSettingsService.layerTest()), + Layer.provideMerge(providerSessionDirectoryTestLayer), + Layer.provideMerge(NodeServices.layer), + ); + + return Effect.gen(function* () { + const adapter = yield* OpenCodeAdapter; + const threadId = asThreadId("thread-custom-instance-fallback-model"); + yield* adapter.startSession({ + provider: ProviderDriverKind.make("opencode"), + threadId, + runtimeMode: "full-access", + modelSelection: createModelSelection( + ProviderInstanceId.make("opencode_zen"), + "anthropic/claude-sonnet-4-5", + ), + }); + + yield* adapter.sendTurn({ + threadId, + input: "Fix it", + }); + + assert.deepEqual(runtimeMock.state.promptCalls.at(-1), { + sessionID: "http://127.0.0.1:9999/session", + model: { + providerID: "anthropic", + modelID: "claude-sonnet-4-5", + }, + parts: [{ type: "text", text: "Fix it" }], + }); + }).pipe(Effect.provide(adapterLayer)); + }); + + it.effect("rejects sendTurn model selections for another instance id", () => { + const customInstanceId = ProviderInstanceId.make("opencode_zen"); + const adapterLayer = Layer.effect( + OpenCodeAdapter, + Effect.gen(function* () { + return yield* makeOpenCodeAdapter(openCodeAdapterTestSettings, { + instanceId: customInstanceId, + }); + }), + ).pipe( + Layer.provideMerge(Layer.succeed(OpenCodeRuntime, OpenCodeRuntimeTestDouble)), + Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), + Layer.provideMerge(ServerSettingsService.layerTest()), + Layer.provideMerge(providerSessionDirectoryTestLayer), + Layer.provideMerge(NodeServices.layer), + ); + + return Effect.gen(function* () { + const adapter = yield* OpenCodeAdapter; + const threadId = asThreadId("thread-custom-instance-wrong-selection"); + yield* adapter.startSession({ + provider: ProviderDriverKind.make("opencode"), + threadId, + runtimeMode: "full-access", + }); + + const error = yield* adapter + .sendTurn({ + threadId, + input: "Fix it", + modelSelection: createModelSelection( + ProviderInstanceId.make("opencode"), + "anthropic/claude-sonnet-4-5", + ), + }) + .pipe(Effect.flip); + + assert.equal(error._tag, "ProviderAdapterValidationError"); + if (error._tag !== "ProviderAdapterValidationError") { + throw new Error("Unexpected error type"); + } + assert.equal( + error.issue, + "OpenCode model selection is bound to instance 'opencode', expected 'opencode_zen'.", + ); + assert.deepEqual(runtimeMock.state.promptCalls, []); + }).pipe(Effect.provide(adapterLayer)); + }); + it.effect("reverts the full thread when rollback removes every assistant turn", () => Effect.gen(function* () { const adapter = yield* OpenCodeAdapter; const threadId = asThreadId("thread-rollback-all"); yield* adapter.startSession({ - provider: "opencode", + provider: ProviderDriverKind.make("opencode"), threadId, runtimeMode: "full-access", }); @@ -396,9 +632,14 @@ it.layer(OpenCodeAdapterTestLayer)("OpenCodeAdapterLive", (it) => { close: () => Effect.void, }; - const adapterLayer = makeOpenCodeAdapterLive({ - nativeEventLogger, - }).pipe( + const adapterLayer = Layer.effect( + OpenCodeAdapter, + Effect.gen(function* () { + return yield* makeOpenCodeAdapter(openCodeAdapterTestSettings, { + nativeEventLogger, + }); + }), + ).pipe( Layer.provideMerge(Layer.succeed(OpenCodeRuntime, OpenCodeRuntimeTestDouble)), Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), Layer.provideMerge( @@ -419,7 +660,7 @@ it.layer(OpenCodeAdapterTestLayer)("OpenCodeAdapterLive", (it) => { const session = yield* Effect.gen(function* () { const adapter = yield* OpenCodeAdapter; const started = yield* adapter.startSession({ - provider: "opencode", + provider: ProviderDriverKind.make("opencode"), threadId: asThreadId("thread-native-log"), runtimeMode: "full-access", }); @@ -475,9 +716,12 @@ it.layer(OpenCodeAdapterTestLayer)("OpenCodeAdapterLive", (it) => { close: () => Effect.void, }; - const adapterLayer = makeOpenCodeAdapterLive({ - nativeEventLogger, - }).pipe( + const adapterLayer = Layer.effect( + OpenCodeAdapter, + makeOpenCodeAdapter(openCodeAdapterTestSettings, { + nativeEventLogger, + }), + ).pipe( Layer.provideMerge(Layer.succeed(OpenCodeRuntime, OpenCodeRuntimeTestDouble)), Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), Layer.provideMerge( @@ -504,7 +748,7 @@ it.layer(OpenCodeAdapterTestLayer)("OpenCodeAdapterLive", (it) => { const { sessions, closeCallsDuringRun } = yield* Effect.gen(function* () { const adapter = yield* OpenCodeAdapter; yield* adapter.startSession({ - provider: "opencode", + provider: ProviderDriverKind.make("opencode"), threadId: asThreadId("thread-native-log-failure"), runtimeMode: "full-access", }); diff --git a/apps/server/src/provider/Layers/OpenCodeAdapter.ts b/apps/server/src/provider/Layers/OpenCodeAdapter.ts index 9412146036b..a7b179ab1c3 100644 --- a/apps/server/src/provider/Layers/OpenCodeAdapter.ts +++ b/apps/server/src/provider/Layers/OpenCodeAdapter.ts @@ -1,7 +1,8 @@ -import { randomUUID } from "node:crypto"; - import { EventId, + type OpenCodeSettings, + ProviderDriverKind, + ProviderInstanceId, type ProviderRuntimeEvent, type ProviderSession, RuntimeItemId, @@ -11,12 +12,12 @@ import { TurnId, type UserInputQuestion, } from "@t3tools/contracts"; -import { Cause, Effect, Exit, Layer, Queue, Ref, Scope, Stream } from "effect"; +import { Cause, Effect, Exit, Queue, Random, Ref, Scope, Stream } from "effect"; import type { OpencodeClient, Part, PermissionRequest, QuestionRequest } from "@opencode-ai/sdk/v2"; +import { getModelSelectionStringOptionValue } from "@t3tools/shared/model"; import { resolveAttachmentPath } from "../../attachmentStore.ts"; import { ServerConfig } from "../../config.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; import { type EventNdjsonLogger, makeEventNdjsonLogger } from "./EventNdjsonLogger.ts"; import { ProviderAdapterProcessError, @@ -25,8 +26,7 @@ import { ProviderAdapterSessionNotFoundError, ProviderAdapterValidationError, } from "../Errors.ts"; -import { OpenCodeAdapter, type OpenCodeAdapterShape } from "../Services/OpenCodeAdapter.ts"; -import { getProviderCapabilities } from "../Services/ProviderAdapter.ts"; +import { type OpenCodeAdapterShape } from "../Services/OpenCodeAdapter.ts"; import { buildOpenCodePermissionRules, OpenCodeRuntime, @@ -41,7 +41,7 @@ import { type OpenCodeServerConnection, } from "../opencodeRuntime.ts"; -const PROVIDER = "opencode" as const; +const PROVIDER = ProviderDriverKind.make("opencode"); interface OpenCodeTurnSnapshot { readonly id: TurnId; @@ -89,6 +89,8 @@ interface OpenCodeSessionContext { } export interface OpenCodeAdapterLiveOptions { + readonly instanceId?: ProviderInstanceId; + readonly environment?: NodeJS.ProcessEnv; readonly nativeEventLogPath?: string; readonly nativeEventLogger?: EventNdjsonLogger; } @@ -125,35 +127,38 @@ const toProcessError = (threadId: ThreadId, cause: unknown): ProviderAdapterProc cause, }); -function buildEventBase(input: { +const buildEventBase = (input: { readonly threadId: ThreadId; readonly turnId?: TurnId | undefined; readonly itemId?: string | undefined; readonly requestId?: string | undefined; readonly createdAt?: string | undefined; readonly raw?: unknown; -}): Pick< - ProviderRuntimeEvent, - "eventId" | "provider" | "threadId" | "createdAt" | "turnId" | "itemId" | "requestId" | "raw" -> { - return { - eventId: EventId.make(randomUUID()), - provider: PROVIDER, - threadId: input.threadId, - createdAt: input.createdAt ?? nowIso(), - ...(input.turnId ? { turnId: input.turnId } : {}), - ...(input.itemId ? { itemId: RuntimeItemId.make(input.itemId) } : {}), - ...(input.requestId ? { requestId: RuntimeRequestId.make(input.requestId) } : {}), - ...(input.raw !== undefined - ? { - raw: { - source: "opencode.sdk.event", - payload: input.raw, - }, - } - : {}), - }; -} +}): Effect.Effect< + Pick< + ProviderRuntimeEvent, + "eventId" | "provider" | "threadId" | "createdAt" | "turnId" | "itemId" | "requestId" | "raw" + > +> => + Random.nextUUIDv4.pipe( + Effect.map((uuid) => ({ + eventId: EventId.make(uuid), + provider: PROVIDER, + threadId: input.threadId, + createdAt: input.createdAt ?? nowIso(), + ...(input.turnId ? { turnId: input.turnId } : {}), + ...(input.itemId ? { itemId: RuntimeItemId.make(input.itemId) } : {}), + ...(input.requestId ? { requestId: RuntimeRequestId.make(input.requestId) } : {}), + ...(input.raw !== undefined + ? { + raw: { + source: "opencode.sdk.event", + payload: input.raw, + }, + } + : {}), + })), + ); function toToolLifecycleItemType(toolName: string): ToolLifecycleItemType { const normalized = toolName.toLowerCase(); @@ -245,13 +250,19 @@ function ensureSessionContext( ): OpenCodeSessionContext { const session = sessions.get(threadId); if (!session) { - throw new ProviderAdapterSessionNotFoundError({ provider: PROVIDER, threadId }); + throw new ProviderAdapterSessionNotFoundError({ + provider: PROVIDER, + threadId, + }); } // `ensureSessionContext` is a sync gate used from both sync helpers and // Effect bodies. `Ref.getUnsafe` is an atomic read of the backing cell — // no fiber suspension required, which keeps this callable everywhere. if (Ref.getUnsafe(session.stopped)) { - throw new ProviderAdapterSessionClosedError({ provider: PROVIDER, threadId }); + throw new ProviderAdapterSessionClosedError({ + provider: PROVIDER, + threadId, + }); } return session; } @@ -419,7 +430,7 @@ const stopOpenCodeContext = Effect.fn("stopOpenCodeContext")(function* ( ) { // Race-safe one-shot: first caller flips the flag, everyone else no-ops. if (yield* Ref.getAndSet(context.stopped, true)) { - return; + return false; } // Best-effort remote abort. The scope close below tears down the local @@ -433,955 +444,990 @@ const stopOpenCodeContext = Effect.fn("stopOpenCodeContext")(function* ( // runs each finalizer we registered — the `AbortController.abort()` call, // the child-process termination, etc. yield* Scope.close(context.sessionScope, Exit.void); + return true; }); -export function makeOpenCodeAdapterLive(options?: OpenCodeAdapterLiveOptions) { - return Layer.effect( - OpenCodeAdapter, - Effect.gen(function* () { - const serverConfig = yield* ServerConfig; - const serverSettings = yield* ServerSettingsService; - const openCodeRuntime = yield* OpenCodeRuntime; - const nativeEventLogger = - options?.nativeEventLogger ?? - (options?.nativeEventLogPath !== undefined - ? yield* makeEventNdjsonLogger(options.nativeEventLogPath, { - stream: "native", - }) - : undefined); - // Only close loggers we created. If the caller passed one in via - // `options.nativeEventLogger`, they own its lifecycle. - const managedNativeEventLogger = - options?.nativeEventLogger === undefined ? nativeEventLogger : undefined; - const runtimeEvents = yield* Queue.unbounded(); - const sessions = new Map(); - - // Layer-level finalizer: when the adapter layer shuts down, stop every - // session. Each session's `Scope.close` tears down its spawned OpenCode - // server (via the `ChildProcessSpawner` finalizer installed in - // `startOpenCodeServerProcess`) and interrupts the forked event/exit - // fibers. Consumers that can't reason about Effect scopes therefore - // cannot leak OpenCode child processes by forgetting to call `stopAll`. - yield* Effect.addFinalizer(() => - Effect.gen(function* () { - const contexts = [...sessions.values()]; - sessions.clear(); - // `ignoreCause` swallows both typed failures (none here) and defects - // from throwing scope finalizers so a sibling's death can't interrupt - // the remaining cleanups. - yield* Effect.forEach( - contexts, - (context) => Effect.ignoreCause(stopOpenCodeContext(context)), - { concurrency: "unbounded", discard: true }, - ); - // Close the logger AFTER session teardown so any final lifecycle - // events emitted during shutdown still get written. `close` flushes - // the `Logger.batched` window and closes each per-thread - // `RotatingFileSink` handle owned by the logger's internal scope. - if (managedNativeEventLogger !== undefined) { - yield* managedNativeEventLogger.close(); - } - }), - ); - - const emit = (event: ProviderRuntimeEvent) => - Queue.offer(runtimeEvents, event).pipe(Effect.asVoid); - const writeNativeEvent = ( - threadId: ThreadId, - event: { - readonly observedAt: string; - readonly event: Record; +export function makeOpenCodeAdapter( + openCodeSettings: OpenCodeSettings, + options?: OpenCodeAdapterLiveOptions, +) { + return Effect.gen(function* () { + const boundInstanceId = options?.instanceId ?? ProviderInstanceId.make("opencode"); + const serverConfig = yield* ServerConfig; + const openCodeRuntime = yield* OpenCodeRuntime; + const nativeEventLogger = + options?.nativeEventLogger ?? + (options?.nativeEventLogPath !== undefined + ? yield* makeEventNdjsonLogger(options.nativeEventLogPath, { + stream: "native", + }) + : undefined); + // Only close loggers we created. If the caller passed one in via + // `options.nativeEventLogger`, they own its lifecycle. + const managedNativeEventLogger = + options?.nativeEventLogger === undefined ? nativeEventLogger : undefined; + const runtimeEvents = yield* Queue.unbounded(); + const sessions = new Map(); + + // Layer-level finalizer: when the adapter layer shuts down, stop every + // session. Each session's `Scope.close` tears down its spawned OpenCode + // server (via the `ChildProcessSpawner` finalizer installed in + // `startOpenCodeServerProcess`) and interrupts the forked event/exit + // fibers. Consumers that can't reason about Effect scopes therefore + // cannot leak OpenCode child processes by forgetting to call `stopAll`. + yield* Effect.addFinalizer(() => + Effect.gen(function* () { + const contexts = [...sessions.values()]; + sessions.clear(); + // `ignoreCause` swallows both typed failures (none here) and defects + // from throwing scope finalizers so a sibling's death can't interrupt + // the remaining cleanups. + yield* Effect.forEach( + contexts, + (context) => Effect.ignoreCause(stopOpenCodeContext(context)), + { concurrency: "unbounded", discard: true }, + ); + // Close the logger AFTER session teardown so any final lifecycle + // events emitted during shutdown still get written. `close` flushes + // the `Logger.batched` window and closes each per-thread + // `RotatingFileSink` handle owned by the logger's internal scope. + if (managedNativeEventLogger !== undefined) { + yield* managedNativeEventLogger.close(); + } + }).pipe(Effect.ensuring(Queue.shutdown(runtimeEvents))), + ); + + const emit = (event: ProviderRuntimeEvent) => + Queue.offer(runtimeEvents, event).pipe(Effect.asVoid); + const writeNativeEvent = ( + threadId: ThreadId, + event: { + readonly observedAt: string; + readonly event: Record; + }, + ) => (nativeEventLogger ? nativeEventLogger.write(event, threadId) : Effect.void); + const writeNativeEventBestEffort = ( + threadId: ThreadId, + event: { + readonly observedAt: string; + readonly event: Record; + }, + ) => writeNativeEvent(threadId, event).pipe(Effect.catchCause(() => Effect.void)); + + const emitUnexpectedExit = Effect.fn("emitUnexpectedExit")(function* ( + context: OpenCodeSessionContext, + message: string, + ) { + // Atomic one-shot: two fibers can race here (the event-pump on stream + // failure and the server-exit watcher). `getAndSet` flips the flag in + // a single step so the loser observes `true` and returns; a plain + // `Ref.get` would let both racers slip past and emit duplicates. + if (yield* Ref.getAndSet(context.stopped, true)) { + return; + } + const turnId = context.activeTurnId; + sessions.delete(context.session.threadId); + // Emit lifecycle events BEFORE tearing down the scope. Both call sites + // run this inside a fiber forked via `Effect.forkIn(context.sessionScope)`; + // closing that scope triggers the fiber-interrupt finalizer, so any + // subsequent yield point would unwind and silently drop these emits. + yield* emit({ + ...(yield* buildEventBase({ + threadId: context.session.threadId, + turnId, + })), + type: "runtime.error", + payload: { + message, + class: "transport_error", }, - ) => (nativeEventLogger ? nativeEventLogger.write(event, threadId) : Effect.void); - const writeNativeEventBestEffort = ( - threadId: ThreadId, - event: { - readonly observedAt: string; - readonly event: Record; + }).pipe(Effect.ignore); + yield* emit({ + ...(yield* buildEventBase({ + threadId: context.session.threadId, + turnId, + })), + type: "session.exited", + payload: { + reason: message, + recoverable: false, + exitKind: "error", }, - ) => writeNativeEvent(threadId, event).pipe(Effect.catchCause(() => Effect.void)); - - const emitUnexpectedExit = Effect.fn("emitUnexpectedExit")(function* ( - context: OpenCodeSessionContext, - message: string, - ) { - // Atomic one-shot: two fibers can race here (the event-pump on stream - // failure and the server-exit watcher). `getAndSet` flips the flag in - // a single step so the loser observes `true` and returns; a plain - // `Ref.get` would let both racers slip past and emit duplicates. - if (yield* Ref.getAndSet(context.stopped, true)) { - return; - } - const turnId = context.activeTurnId; - sessions.delete(context.session.threadId); - // Emit lifecycle events BEFORE tearing down the scope. Both call sites - // run this inside a fiber forked via `Effect.forkIn(context.sessionScope)`; - // closing that scope triggers the fiber-interrupt finalizer, so any - // subsequent yield point would unwind and silently drop these emits. + }).pipe(Effect.ignore); + // Inline the teardown that `stopOpenCodeContext` would do; we can't + // delegate to it because our `getAndSet` above already flipped the + // one-shot guard, so the call would no-op. + yield* runOpenCodeSdk("session.abort", () => + context.client.session.abort({ sessionID: context.openCodeSessionId }), + ).pipe(Effect.ignore({ log: true })); + yield* Scope.close(context.sessionScope, Exit.void); + }); + + /** Emit content.delta and item.completed events for an assistant text part. */ + const emitAssistantTextDelta = Effect.fn("emitAssistantTextDelta")(function* ( + context: OpenCodeSessionContext, + part: Part, + turnId: TurnId | undefined, + raw: unknown, + ) { + const text = textFromPart(part); + if (text === undefined) { + return; + } + const previousText = context.emittedTextByPartId.get(part.id); + const { latestText, deltaToEmit } = mergeOpenCodeAssistantText(previousText, text); + context.emittedTextByPartId.set(part.id, latestText); + if (latestText !== text) { + context.partById.set( + part.id, + (part.type === "text" || part.type === "reasoning" + ? { ...part, text: latestText } + : part) satisfies Part, + ); + } + if (deltaToEmit.length > 0) { yield* emit({ - ...buildEventBase({ threadId: context.session.threadId, turnId }), - type: "runtime.error", + ...(yield* buildEventBase({ + threadId: context.session.threadId, + turnId, + itemId: part.id, + createdAt: + part.type === "text" || part.type === "reasoning" + ? isoFromEpochMs(part.time?.start) + : undefined, + raw, + })), + type: "content.delta", payload: { - message, - class: "transport_error", + streamKind: resolveTextStreamKind(part), + delta: deltaToEmit, }, - }).pipe(Effect.ignore); + }); + } + + if ( + part.type === "text" && + part.time?.end !== undefined && + !context.completedAssistantPartIds.has(part.id) + ) { + context.completedAssistantPartIds.add(part.id); yield* emit({ - ...buildEventBase({ threadId: context.session.threadId, turnId }), - type: "session.exited", + ...(yield* buildEventBase({ + threadId: context.session.threadId, + turnId, + itemId: part.id, + createdAt: isoFromEpochMs(part.time.end), + raw, + })), + type: "item.completed", payload: { - reason: message, - recoverable: false, - exitKind: "error", + itemType: "assistant_message", + status: "completed", + title: "Assistant message", + ...(latestText.length > 0 ? { detail: latestText } : {}), }, - }).pipe(Effect.ignore); - // Inline the teardown that `stopOpenCodeContext` would do; we can't - // delegate to it because our `getAndSet` above already flipped the - // one-shot guard, so the call would no-op. - yield* runOpenCodeSdk("session.abort", () => - context.client.session.abort({ sessionID: context.openCodeSessionId }), - ).pipe(Effect.ignore({ log: true })); - yield* Scope.close(context.sessionScope, Exit.void); + }); + } + }); + + const handleSubscribedEvent = Effect.fn("handleSubscribedEvent")(function* ( + context: OpenCodeSessionContext, + event: OpenCodeSubscribedEvent, + ) { + const payloadSessionId = + "properties" in event ? (event.properties as { sessionID?: unknown }).sessionID : undefined; + if (payloadSessionId !== context.openCodeSessionId) { + return; + } + + const turnId = context.activeTurnId; + yield* writeNativeEventBestEffort(context.session.threadId, { + observedAt: nowIso(), + event: { + provider: PROVIDER, + threadId: context.session.threadId, + providerThreadId: context.openCodeSessionId, + type: event.type, + ...(turnId ? { turnId } : {}), + payload: event, + }, }); - /** Emit content.delta and item.completed events for an assistant text part. */ - const emitAssistantTextDelta = Effect.fn("emitAssistantTextDelta")(function* ( - context: OpenCodeSessionContext, - part: Part, - turnId: TurnId | undefined, - raw: unknown, - ) { - const text = textFromPart(part); - if (text === undefined) { - return; + switch (event.type) { + case "message.updated": { + context.messageRoleById.set(event.properties.info.id, event.properties.info.role); + if (event.properties.info.role === "assistant") { + for (const part of context.partById.values()) { + if (part.messageID !== event.properties.info.id) { + continue; + } + yield* emitAssistantTextDelta(context, part, turnId, event); + } + } + break; } - const previousText = context.emittedTextByPartId.get(part.id); - const { latestText, deltaToEmit } = mergeOpenCodeAssistantText(previousText, text); - context.emittedTextByPartId.set(part.id, latestText); - if (latestText !== text) { - context.partById.set( - part.id, - (part.type === "text" || part.type === "reasoning" - ? { ...part, text: latestText } - : part) satisfies Part, - ); + + case "message.removed": { + context.messageRoleById.delete(event.properties.messageID); + break; } - if (deltaToEmit.length > 0) { + + case "message.part.delta": { + const existingPart = context.partById.get(event.properties.partID); + if (!existingPart) { + break; + } + const role = messageRoleForPart(context, existingPart); + if (role !== "assistant") { + break; + } + const streamKind = resolveTextStreamKind(existingPart); + const delta = event.properties.delta; + if (delta.length === 0) { + break; + } + const previousText = + context.emittedTextByPartId.get(event.properties.partID) ?? + textFromPart(existingPart) ?? + ""; + const { nextText, deltaToEmit } = appendOpenCodeAssistantTextDelta(previousText, delta); + if (deltaToEmit.length === 0) { + break; + } + context.emittedTextByPartId.set(event.properties.partID, nextText); + if (existingPart.type === "text" || existingPart.type === "reasoning") { + context.partById.set(event.properties.partID, { + ...existingPart, + text: nextText, + }); + } yield* emit({ - ...buildEventBase({ + ...(yield* buildEventBase({ threadId: context.session.threadId, turnId, - itemId: part.id, - createdAt: - part.type === "text" || part.type === "reasoning" - ? isoFromEpochMs(part.time?.start) - : undefined, - raw, - }), + itemId: event.properties.partID, + raw: event, + })), type: "content.delta", payload: { - streamKind: resolveTextStreamKind(part), + streamKind, delta: deltaToEmit, }, }); + break; } - if ( - part.type === "text" && - part.time?.end !== undefined && - !context.completedAssistantPartIds.has(part.id) - ) { - context.completedAssistantPartIds.add(part.id); + case "message.part.updated": { + const part = event.properties.part; + context.partById.set(part.id, part); + const messageRole = messageRoleForPart(context, part); + + if (messageRole === "assistant") { + yield* emitAssistantTextDelta(context, part, turnId, event); + } + + if (part.type === "tool") { + const itemType = toToolLifecycleItemType(part.tool); + const title = + part.state.status === "running" ? (part.state.title ?? part.tool) : part.tool; + const detail = detailFromToolPart(part); + const payload = { + itemType, + ...(part.state.status === "error" + ? { status: "failed" as const } + : part.state.status === "completed" + ? { status: "completed" as const } + : { status: "inProgress" as const }), + ...(title ? { title } : {}), + ...(detail ? { detail } : {}), + data: { + tool: part.tool, + state: part.state, + }, + }; + const runtimeEvent: ProviderRuntimeEvent = { + ...(yield* buildEventBase({ + threadId: context.session.threadId, + turnId, + itemId: part.callID, + createdAt: toolStateCreatedAt(part), + raw: event, + })), + type: + part.state.status === "pending" + ? "item.started" + : part.state.status === "completed" || part.state.status === "error" + ? "item.completed" + : "item.updated", + payload, + }; + appendTurnItem(context, turnId, part); + yield* emit(runtimeEvent); + } + break; + } + + case "permission.asked": { + context.pendingPermissions.set(event.properties.id, event.properties); yield* emit({ - ...buildEventBase({ + ...(yield* buildEventBase({ threadId: context.session.threadId, turnId, - itemId: part.id, - createdAt: isoFromEpochMs(part.time.end), - raw, - }), - type: "item.completed", + requestId: event.properties.id, + raw: event, + })), + type: "request.opened", payload: { - itemType: "assistant_message", - status: "completed", - title: "Assistant message", - ...(latestText.length > 0 ? { detail: latestText } : {}), + requestType: mapPermissionToRequestType(event.properties.permission), + detail: + event.properties.patterns.length > 0 + ? event.properties.patterns.join("\n") + : event.properties.permission, + args: event.properties.metadata, }, }); + break; } - }); - const handleSubscribedEvent = Effect.fn("handleSubscribedEvent")(function* ( - context: OpenCodeSessionContext, - event: OpenCodeSubscribedEvent, - ) { - const payloadSessionId = - "properties" in event - ? (event.properties as { sessionID?: unknown }).sessionID - : undefined; - if (payloadSessionId !== context.openCodeSessionId) { - return; + case "permission.replied": { + context.pendingPermissions.delete(event.properties.requestID); + yield* emit({ + ...(yield* buildEventBase({ + threadId: context.session.threadId, + turnId, + requestId: event.properties.requestID, + raw: event, + })), + type: "request.resolved", + payload: { + requestType: "unknown", + decision: mapPermissionDecision(event.properties.reply), + }, + }); + break; } - const turnId = context.activeTurnId; - yield* writeNativeEventBestEffort(context.session.threadId, { - observedAt: nowIso(), - event: { - provider: PROVIDER, - threadId: context.session.threadId, - providerThreadId: context.openCodeSessionId, - type: event.type, - ...(turnId ? { turnId } : {}), - payload: event, - }, - }); + case "question.asked": { + context.pendingQuestions.set(event.properties.id, event.properties); + yield* emit({ + ...(yield* buildEventBase({ + threadId: context.session.threadId, + turnId, + requestId: event.properties.id, + raw: event, + })), + type: "user-input.requested", + payload: { + questions: normalizeQuestionRequest(event.properties), + }, + }); + break; + } - switch (event.type) { - case "message.updated": { - context.messageRoleById.set(event.properties.info.id, event.properties.info.role); - if (event.properties.info.role === "assistant") { - for (const part of context.partById.values()) { - if (part.messageID !== event.properties.info.id) { - continue; - } - yield* emitAssistantTextDelta(context, part, turnId, event); - } - } - break; - } + case "question.replied": { + const request = context.pendingQuestions.get(event.properties.requestID); + context.pendingQuestions.delete(event.properties.requestID); + const answers = Object.fromEntries( + (request?.questions ?? []).map((question, index) => [ + openCodeQuestionId(index, question), + event.properties.answers[index]?.join(", ") ?? "", + ]), + ); + yield* emit({ + ...(yield* buildEventBase({ + threadId: context.session.threadId, + turnId, + requestId: event.properties.requestID, + raw: event, + })), + type: "user-input.resolved", + payload: { answers }, + }); + break; + } - case "message.removed": { - context.messageRoleById.delete(event.properties.messageID); - break; - } + case "question.rejected": { + context.pendingQuestions.delete(event.properties.requestID); + yield* emit({ + ...(yield* buildEventBase({ + threadId: context.session.threadId, + turnId, + requestId: event.properties.requestID, + raw: event, + })), + type: "user-input.resolved", + payload: { answers: {} }, + }); + break; + } - case "message.part.delta": { - const existingPart = context.partById.get(event.properties.partID); - if (!existingPart) { - break; - } - const role = messageRoleForPart(context, existingPart); - if (role !== "assistant") { - break; - } - const streamKind = resolveTextStreamKind(existingPart); - const delta = event.properties.delta; - if (delta.length === 0) { - break; - } - const previousText = - context.emittedTextByPartId.get(event.properties.partID) ?? - textFromPart(existingPart) ?? - ""; - const { nextText, deltaToEmit } = appendOpenCodeAssistantTextDelta(previousText, delta); - if (deltaToEmit.length === 0) { - break; - } - context.emittedTextByPartId.set(event.properties.partID, nextText); - if (existingPart.type === "text" || existingPart.type === "reasoning") { - context.partById.set(event.properties.partID, { - ...existingPart, - text: nextText, - }); - } - yield* emit({ - ...buildEventBase({ - threadId: context.session.threadId, - turnId, - itemId: event.properties.partID, - raw: event, - }), - type: "content.delta", - payload: { - streamKind, - delta: deltaToEmit, - }, + case "session.status": { + if (event.properties.status.type === "busy") { + updateProviderSession(context, { + status: "running", + activeTurnId: turnId, }); - break; - } - - case "message.part.updated": { - const part = event.properties.part; - context.partById.set(part.id, part); - const messageRole = messageRoleForPart(context, part); - - if (messageRole === "assistant") { - yield* emitAssistantTextDelta(context, part, turnId, event); - } - - if (part.type === "tool") { - const itemType = toToolLifecycleItemType(part.tool); - const title = - part.state.status === "running" ? (part.state.title ?? part.tool) : part.tool; - const detail = detailFromToolPart(part); - const payload = { - itemType, - ...(part.state.status === "error" - ? { status: "failed" as const } - : part.state.status === "completed" - ? { status: "completed" as const } - : { status: "inProgress" as const }), - ...(title ? { title } : {}), - ...(detail ? { detail } : {}), - data: { - tool: part.tool, - state: part.state, - }, - }; - const runtimeEvent: ProviderRuntimeEvent = { - ...buildEventBase({ - threadId: context.session.threadId, - turnId, - itemId: part.callID, - createdAt: toolStateCreatedAt(part), - raw: event, - }), - type: - part.state.status === "pending" - ? "item.started" - : part.state.status === "completed" || part.state.status === "error" - ? "item.completed" - : "item.updated", - payload, - }; - appendTurnItem(context, turnId, part); - yield* emit(runtimeEvent); - } - break; } - case "permission.asked": { - context.pendingPermissions.set(event.properties.id, event.properties); + if (event.properties.status.type === "retry") { yield* emit({ - ...buildEventBase({ + ...(yield* buildEventBase({ threadId: context.session.threadId, turnId, - requestId: event.properties.id, raw: event, - }), - type: "request.opened", + })), + type: "runtime.warning", payload: { - requestType: mapPermissionToRequestType(event.properties.permission), - detail: - event.properties.patterns.length > 0 - ? event.properties.patterns.join("\n") - : event.properties.permission, - args: event.properties.metadata, + message: event.properties.status.message, + detail: event.properties.status, }, }); break; } - case "permission.replied": { - context.pendingPermissions.delete(event.properties.requestID); + if (event.properties.status.type === "idle" && turnId) { + context.activeTurnId = undefined; + updateProviderSession(context, { status: "ready" }, { clearActiveTurnId: true }); yield* emit({ - ...buildEventBase({ + ...(yield* buildEventBase({ threadId: context.session.threadId, turnId, - requestId: event.properties.requestID, raw: event, - }), - type: "request.resolved", + })), + type: "turn.completed", payload: { - requestType: "unknown", - decision: mapPermissionDecision(event.properties.reply), + state: "completed", }, }); - break; } + break; + } - case "question.asked": { - context.pendingQuestions.set(event.properties.id, event.properties); + case "session.error": { + const message = sessionErrorMessage(event.properties.error); + const activeTurnId = context.activeTurnId; + context.activeTurnId = undefined; + updateProviderSession( + context, + { + status: "error", + lastError: message, + }, + { clearActiveTurnId: true }, + ); + if (activeTurnId) { yield* emit({ - ...buildEventBase({ + ...(yield* buildEventBase({ threadId: context.session.threadId, - turnId, - requestId: event.properties.id, + turnId: activeTurnId, raw: event, - }), - type: "user-input.requested", + })), + type: "turn.completed", payload: { - questions: normalizeQuestionRequest(event.properties), + state: "failed", + errorMessage: message, }, }); - break; } + yield* emit({ + ...(yield* buildEventBase({ + threadId: context.session.threadId, + raw: event, + })), + type: "runtime.error", + payload: { + message, + class: "provider_error", + detail: event.properties.error, + }, + }); + break; + } - case "question.replied": { - const request = context.pendingQuestions.get(event.properties.requestID); - context.pendingQuestions.delete(event.properties.requestID); - const answers = Object.fromEntries( - (request?.questions ?? []).map((question, index) => [ - openCodeQuestionId(index, question), - event.properties.answers[index]?.join(", ") ?? "", - ]), - ); - yield* emit({ - ...buildEventBase({ - threadId: context.session.threadId, - turnId, - requestId: event.properties.requestID, - raw: event, - }), - type: "user-input.resolved", - payload: { answers }, - }); - break; - } + default: + break; + } + }); + + const startEventPump = Effect.fn("startEventPump")(function* (context: OpenCodeSessionContext) { + // One AbortController per session scope. The finalizer fires when + // the scope closes (explicit stop, unexpected exit, or layer + // shutdown) and cancels the in-flight `event.subscribe` fetch so + // the async iterable unwinds cleanly. + const eventsAbortController = new AbortController(); + yield* Scope.addFinalizer( + context.sessionScope, + Effect.sync(() => eventsAbortController.abort()), + ); - case "question.rejected": { - context.pendingQuestions.delete(event.properties.requestID); - yield* emit({ - ...buildEventBase({ - threadId: context.session.threadId, - turnId, - requestId: event.properties.requestID, - raw: event, + // Fibers forked into `context.sessionScope` are interrupted + // automatically when the scope closes — no bookkeeping required. + yield* Effect.flatMap( + runOpenCodeSdk("event.subscribe", () => + context.client.event.subscribe(undefined, { + signal: eventsAbortController.signal, + }), + ), + (subscription) => + Stream.fromAsyncIterable( + subscription.stream, + (cause) => + new OpenCodeRuntimeError({ + operation: "event.subscribe", + detail: openCodeRuntimeErrorDetail(cause), + cause, }), - type: "user-input.resolved", - payload: { answers: {} }, - }); - break; - } - - case "session.status": { - if (event.properties.status.type === "busy") { - updateProviderSession(context, { status: "running", activeTurnId: turnId }); + ).pipe(Stream.runForEach((event) => handleSubscribedEvent(context, event))), + ).pipe( + Effect.exit, + Effect.flatMap((exit) => + Effect.gen(function* () { + // Expected paths: caller aborted the fetch or the session + // has already been marked stopped. Treat as a clean exit. + if (eventsAbortController.signal.aborted || (yield* Ref.get(context.stopped))) { + return; } - - if (event.properties.status.type === "retry") { - yield* emit({ - ...buildEventBase({ threadId: context.session.threadId, turnId, raw: event }), - type: "runtime.warning", - payload: { - message: event.properties.status.message, - detail: event.properties.status, - }, - }); - break; - } - - if (event.properties.status.type === "idle" && turnId) { - context.activeTurnId = undefined; - updateProviderSession(context, { status: "ready" }, { clearActiveTurnId: true }); - yield* emit({ - ...buildEventBase({ threadId: context.session.threadId, turnId, raw: event }), - type: "turn.completed", - payload: { - state: "completed", - }, - }); - } - break; - } - - case "session.error": { - const message = sessionErrorMessage(event.properties.error); - const activeTurnId = context.activeTurnId; - context.activeTurnId = undefined; - updateProviderSession( - context, - { - status: "error", - lastError: message, - }, - { clearActiveTurnId: true }, - ); - if (activeTurnId) { - yield* emit({ - ...buildEventBase({ - threadId: context.session.threadId, - turnId: activeTurnId, - raw: event, - }), - type: "turn.completed", - payload: { - state: "failed", - errorMessage: message, - }, - }); + if (Exit.isFailure(exit)) { + yield* emitUnexpectedExit( + context, + openCodeRuntimeErrorDetail(Cause.squash(exit.cause)), + ); } - yield* emit({ - ...buildEventBase({ threadId: context.session.threadId, raw: event }), - type: "runtime.error", - payload: { - message, - class: "provider_error", - detail: event.properties.error, - }, - }); - break; - } - - default: - break; - } - }); - - const startEventPump = Effect.fn("startEventPump")(function* ( - context: OpenCodeSessionContext, - ) { - // One AbortController per session scope. The finalizer fires when - // the scope closes (explicit stop, unexpected exit, or layer - // shutdown) and cancels the in-flight `event.subscribe` fetch so - // the async iterable unwinds cleanly. - const eventsAbortController = new AbortController(); - yield* Scope.addFinalizer( - context.sessionScope, - Effect.sync(() => eventsAbortController.abort()), - ); + }), + ), + Effect.forkIn(context.sessionScope), + ); - // Fibers forked into `context.sessionScope` are interrupted - // automatically when the scope closes — no bookkeeping required. - yield* Effect.flatMap( - runOpenCodeSdk("event.subscribe", () => - context.client.event.subscribe(undefined, { - signal: eventsAbortController.signal, - }), - ), - (subscription) => - Stream.fromAsyncIterable( - subscription.stream, - (cause) => - new OpenCodeRuntimeError({ - operation: "event.subscribe", - detail: openCodeRuntimeErrorDetail(cause), - cause, - }), - ).pipe(Stream.runForEach((event) => handleSubscribedEvent(context, event))), - ).pipe( - Effect.exit, - Effect.flatMap((exit) => + if (!context.server.external && context.server.exitCode !== null) { + yield* context.server.exitCode.pipe( + Effect.flatMap((code) => Effect.gen(function* () { - // Expected paths: caller aborted the fetch or the session - // has already been marked stopped. Treat as a clean exit. - if (eventsAbortController.signal.aborted || (yield* Ref.get(context.stopped))) { + if (yield* Ref.get(context.stopped)) { return; } - if (Exit.isFailure(exit)) { - yield* emitUnexpectedExit( - context, - openCodeRuntimeErrorDetail(Cause.squash(exit.cause)), - ); - } + yield* emitUnexpectedExit(context, `OpenCode server exited unexpectedly (${code}).`); }), ), Effect.forkIn(context.sessionScope), ); - - if (!context.server.external && context.server.exitCode !== null) { - yield* context.server.exitCode.pipe( - Effect.flatMap((code) => - Effect.gen(function* () { - if (yield* Ref.get(context.stopped)) { - return; - } - yield* emitUnexpectedExit( - context, - `OpenCode server exited unexpectedly (${code}).`, - ); - }), - ), - Effect.forkIn(context.sessionScope), - ); + } + }); + + const startSession: OpenCodeAdapterShape["startSession"] = Effect.fn("startSession")( + function* (input) { + const binaryPath = openCodeSettings.binaryPath; + const serverUrl = openCodeSettings.serverUrl; + const serverPassword = openCodeSettings.serverPassword; + const directory = input.cwd ?? serverConfig.cwd; + const existing = sessions.get(input.threadId); + if (existing) { + yield* stopOpenCodeContext(existing); + sessions.delete(input.threadId); } - }); - const startSession: OpenCodeAdapterShape["startSession"] = Effect.fn("startSession")( - function* (input) { - const settings = yield* serverSettings.getSettings.pipe( - Effect.mapError( - (cause) => - new ProviderAdapterProcessError({ - provider: PROVIDER, - threadId: input.threadId, - detail: "Failed to read OpenCode settings.", - cause, + const started = yield* Effect.gen(function* () { + const sessionScope = yield* Scope.make(); + const startedExit = yield* Effect.exit( + Effect.gen(function* () { + // The runtime binds the server's lifetime to the Scope.Scope + // we provide below — closing `sessionScope` kills the child + // process automatically. No manual `server.close()` needed. + const server = yield* openCodeRuntime.connectToOpenCodeServer({ + binaryPath, + serverUrl, + ...(options?.environment ? { environment: options.environment } : {}), + }); + const client = openCodeRuntime.createOpenCodeSdkClient({ + baseUrl: server.url, + directory, + ...(server.external && serverPassword ? { serverPassword } : {}), + }); + const openCodeSession = yield* runOpenCodeSdk("session.create", () => + client.session.create({ + title: `T3 Code ${input.threadId}`, + permission: buildOpenCodePermissionRules(input.runtimeMode), }), - ), - ); - const binaryPath = settings.providers.opencode.binaryPath; - const serverUrl = settings.providers.opencode.serverUrl; - const serverPassword = settings.providers.opencode.serverPassword; - const directory = input.cwd ?? serverConfig.cwd; - const existing = sessions.get(input.threadId); - if (existing) { - yield* stopOpenCodeContext(existing); - sessions.delete(input.threadId); - } - - const started = yield* Effect.gen(function* () { - const sessionScope = yield* Scope.make(); - const startedExit = yield* Effect.exit( - Effect.gen(function* () { - // The runtime binds the server's lifetime to the Scope.Scope - // we provide below — closing `sessionScope` kills the child - // process automatically. No manual `server.close()` needed. - const server = yield* openCodeRuntime.connectToOpenCodeServer({ - binaryPath, - serverUrl, - }); - const client = openCodeRuntime.createOpenCodeSdkClient({ - baseUrl: server.url, - directory, - ...(server.external && serverPassword ? { serverPassword } : {}), + ); + if (!openCodeSession.data) { + return yield* new OpenCodeRuntimeError({ + operation: "session.create", + detail: "OpenCode session.create returned no session payload.", }); - const openCodeSession = yield* runOpenCodeSdk("session.create", () => - client.session.create({ - title: `T3 Code ${input.threadId}`, - permission: buildOpenCodePermissionRules(input.runtimeMode), - }), - ); - if (!openCodeSession.data) { - return yield* new OpenCodeRuntimeError({ - operation: "session.create", - detail: "OpenCode session.create returned no session payload.", - }); - } - return { sessionScope, server, client, openCodeSession: openCodeSession.data }; - }).pipe(Effect.provideService(Scope.Scope, sessionScope)), - ); - if (Exit.isFailure(startedExit)) { - yield* Scope.close(sessionScope, Exit.void).pipe(Effect.ignore); - return yield* toProcessError(input.threadId, Cause.squash(startedExit.cause)); - } - return startedExit.value; - }); - - // Guard against a concurrent startSession call that may have raced - // and already inserted a session while we were awaiting async work. - const raceWinner = sessions.get(input.threadId); - if (raceWinner) { - // Another call won the race – clean up the session we just created - // (including the remote SDK session) and return the existing one. - yield* runOpenCodeSdk("session.abort", () => - started.client.session.abort({ sessionID: started.openCodeSession.id }), - ).pipe(Effect.ignore); - yield* Scope.close(started.sessionScope, Exit.void).pipe(Effect.ignore); - return raceWinner.session; + } + return { + sessionScope, + server, + client, + openCodeSession: openCodeSession.data, + }; + }).pipe(Effect.provideService(Scope.Scope, sessionScope)), + ); + if (Exit.isFailure(startedExit)) { + yield* Scope.close(sessionScope, Exit.void).pipe(Effect.ignore); + return yield* toProcessError(input.threadId, Cause.squash(startedExit.cause)); } + return startedExit.value; + }); - const createdAt = nowIso(); - const session: ProviderSession = { - provider: PROVIDER, - status: "ready", - runtimeMode: input.runtimeMode, - cwd: directory, - ...(input.modelSelection ? { model: input.modelSelection.model } : {}), - threadId: input.threadId, - createdAt, - updatedAt: createdAt, - }; - - const context: OpenCodeSessionContext = { - session, - client: started.client, - server: started.server, - directory, - openCodeSessionId: started.openCodeSession.id, - pendingPermissions: new Map(), - pendingQuestions: new Map(), - partById: new Map(), - emittedTextByPartId: new Map(), - messageRoleById: new Map(), - completedAssistantPartIds: new Set(), - turns: [], - activeTurnId: undefined, - activeAgent: undefined, - activeVariant: undefined, - stopped: yield* Ref.make(false), - sessionScope: started.sessionScope, - }; - sessions.set(input.threadId, context); - yield* startEventPump(context); - - yield* emit({ - ...buildEventBase({ threadId: input.threadId }), - type: "session.started", - payload: { - message: "OpenCode session started", - }, - }); - yield* emit({ - ...buildEventBase({ threadId: input.threadId }), - type: "thread.started", - payload: { - providerThreadId: started.openCodeSession.id, - }, - }); - - return session; - }, - ); - - const sendTurn: OpenCodeAdapterShape["sendTurn"] = Effect.fn("sendTurn")(function* (input) { - const context = ensureSessionContext(sessions, input.threadId); - const turnId = TurnId.make(`opencode-turn-${randomUUID()}`); - const modelSelection = - input.modelSelection ?? - (context.session.model - ? { provider: PROVIDER, model: context.session.model } - : undefined); - const parsedModel = parseOpenCodeModelSlug(modelSelection?.model); - if (!parsedModel) { - return yield* new ProviderAdapterValidationError({ - provider: PROVIDER, - operation: "sendTurn", - issue: "OpenCode model selection must use the 'provider/model' format.", - }); + // Guard against a concurrent startSession call that may have raced + // and already inserted a session while we were awaiting async work. + const raceWinner = sessions.get(input.threadId); + if (raceWinner) { + // Another call won the race – clean up the session we just created + // (including the remote SDK session) and return the existing one. + yield* runOpenCodeSdk("session.abort", () => + started.client.session.abort({ + sessionID: started.openCodeSession.id, + }), + ).pipe(Effect.ignore); + yield* Scope.close(started.sessionScope, Exit.void).pipe(Effect.ignore); + return raceWinner.session; } - const text = input.input?.trim(); - const fileParts = toOpenCodeFileParts({ - attachments: input.attachments, - resolveAttachmentPath: (attachment) => - resolveAttachmentPath({ attachmentsDir: serverConfig.attachmentsDir, attachment }), - }); - if ((!text || text.length === 0) && fileParts.length === 0) { - return yield* new ProviderAdapterValidationError({ - provider: PROVIDER, - operation: "sendTurn", - issue: "OpenCode turns require text input or at least one attachment.", - }); - } + const createdAt = nowIso(); + const session: ProviderSession = { + provider: PROVIDER, + providerInstanceId: boundInstanceId, + status: "ready", + runtimeMode: input.runtimeMode, + cwd: directory, + ...(input.modelSelection ? { model: input.modelSelection.model } : {}), + threadId: input.threadId, + createdAt, + updatedAt: createdAt, + }; - const agent = - input.modelSelection?.provider === PROVIDER - ? input.modelSelection.options?.agent - : undefined; - const variant = - input.modelSelection?.provider === PROVIDER - ? input.modelSelection.options?.variant - : undefined; - - context.activeTurnId = turnId; - context.activeAgent = agent ?? (input.interactionMode === "plan" ? "plan" : undefined); - context.activeVariant = variant; - updateProviderSession( - context, - { - status: "running", - activeTurnId: turnId, - model: modelSelection?.model ?? context.session.model, - }, - { clearLastError: true }, - ); + const context: OpenCodeSessionContext = { + session, + client: started.client, + server: started.server, + directory, + openCodeSessionId: started.openCodeSession.id, + pendingPermissions: new Map(), + pendingQuestions: new Map(), + partById: new Map(), + emittedTextByPartId: new Map(), + messageRoleById: new Map(), + completedAssistantPartIds: new Set(), + turns: [], + activeTurnId: undefined, + activeAgent: undefined, + activeVariant: undefined, + stopped: yield* Ref.make(false), + sessionScope: started.sessionScope, + }; + sessions.set(input.threadId, context); + yield* startEventPump(context); yield* emit({ - ...buildEventBase({ threadId: input.threadId, turnId }), - type: "turn.started", + ...(yield* buildEventBase({ threadId: input.threadId })), + type: "session.started", + payload: { + message: "OpenCode session started", + }, + }); + yield* emit({ + ...(yield* buildEventBase({ threadId: input.threadId })), + type: "thread.started", payload: { - model: modelSelection?.model ?? context.session.model, - ...(variant ? { effort: variant } : {}), + providerThreadId: started.openCodeSession.id, }, }); - yield* runOpenCodeSdk("session.promptAsync", () => - context.client.session.promptAsync({ - sessionID: context.openCodeSessionId, - model: parsedModel, - ...(context.activeAgent ? { agent: context.activeAgent } : {}), - ...(context.activeVariant ? { variant: context.activeVariant } : {}), - parts: [...(text ? [{ type: "text" as const, text }] : []), ...fileParts], + return session; + }, + ); + + const sendTurn: OpenCodeAdapterShape["sendTurn"] = Effect.fn("sendTurn")(function* (input) { + const context = ensureSessionContext(sessions, input.threadId); + const turnId = TurnId.make(`opencode-turn-${yield* Random.nextUUIDv4}`); + const modelSelection = + input.modelSelection ?? + (context.session.model + ? { instanceId: boundInstanceId, model: context.session.model } + : undefined); + if (modelSelection !== undefined && modelSelection.instanceId !== boundInstanceId) { + return yield* new ProviderAdapterValidationError({ + provider: PROVIDER, + operation: "sendTurn", + issue: `OpenCode model selection is bound to instance '${modelSelection?.instanceId}', expected '${boundInstanceId}'.`, + }); + } + const parsedModel = parseOpenCodeModelSlug(modelSelection?.model); + if (!parsedModel) { + return yield* new ProviderAdapterValidationError({ + provider: PROVIDER, + operation: "sendTurn", + issue: "OpenCode model selection must use the 'provider/model' format.", + }); + } + + const text = input.input?.trim(); + const fileParts = toOpenCodeFileParts({ + attachments: input.attachments, + resolveAttachmentPath: (attachment) => + resolveAttachmentPath({ + attachmentsDir: serverConfig.attachmentsDir, + attachment, }), - ).pipe( - Effect.mapError(toRequestError), - // On failure: clear active-turn state, flip the session back to ready - // with lastError set, emit turn.aborted, then let the typed error - // propagate. We don't need to rebuild the error here — `toRequestError` - // already produced the right shape. - Effect.tapError((requestError) => - Effect.gen(function* () { - context.activeTurnId = undefined; - context.activeAgent = undefined; - context.activeVariant = undefined; - updateProviderSession( - context, - { - status: "ready", - model: modelSelection?.model ?? context.session.model, - lastError: requestError.detail, - }, - { clearActiveTurnId: true }, - ); - yield* emit({ - ...buildEventBase({ threadId: input.threadId, turnId }), - type: "turn.aborted", - payload: { - reason: requestError.detail, - }, - }); - }), - ), - ); + }); + if ((!text || text.length === 0) && fileParts.length === 0) { + return yield* new ProviderAdapterValidationError({ + provider: PROVIDER, + operation: "sendTurn", + issue: "OpenCode turns require text input or at least one attachment.", + }); + } + + const agent = getModelSelectionStringOptionValue(modelSelection, "agent"); + const variant = getModelSelectionStringOptionValue(modelSelection, "variant"); + + context.activeTurnId = turnId; + context.activeAgent = agent ?? (input.interactionMode === "plan" ? "plan" : undefined); + context.activeVariant = variant; + updateProviderSession( + context, + { + status: "running", + activeTurnId: turnId, + model: modelSelection?.model ?? context.session.model, + }, + { clearLastError: true }, + ); - return { - threadId: input.threadId, - turnId, - }; + yield* emit({ + ...(yield* buildEventBase({ threadId: input.threadId, turnId })), + type: "turn.started", + payload: { + model: modelSelection?.model ?? context.session.model, + ...(variant ? { effort: variant } : {}), + }, }); - const interruptTurn: OpenCodeAdapterShape["interruptTurn"] = Effect.fn("interruptTurn")( - function* (threadId, turnId) { - const context = ensureSessionContext(sessions, threadId); - yield* runOpenCodeSdk("session.abort", () => - context.client.session.abort({ sessionID: context.openCodeSessionId }), - ).pipe(Effect.mapError(toRequestError)); - if (turnId ?? context.activeTurnId) { + yield* runOpenCodeSdk("session.promptAsync", () => + context.client.session.promptAsync({ + sessionID: context.openCodeSessionId, + model: parsedModel, + ...(context.activeAgent ? { agent: context.activeAgent } : {}), + ...(context.activeVariant ? { variant: context.activeVariant } : {}), + parts: [...(text ? [{ type: "text" as const, text }] : []), ...fileParts], + }), + ).pipe( + Effect.mapError(toRequestError), + // On failure: clear active-turn state, flip the session back to ready + // with lastError set, emit turn.aborted, then let the typed error + // propagate. We don't need to rebuild the error here — `toRequestError` + // already produced the right shape. + Effect.tapError((requestError) => + Effect.gen(function* () { + context.activeTurnId = undefined; + context.activeAgent = undefined; + context.activeVariant = undefined; + updateProviderSession( + context, + { + status: "ready", + model: modelSelection?.model ?? context.session.model, + lastError: requestError.detail, + }, + { clearActiveTurnId: true }, + ); yield* emit({ - ...buildEventBase({ threadId, turnId: turnId ?? context.activeTurnId }), + ...(yield* buildEventBase({ + threadId: input.threadId, + turnId, + })), type: "turn.aborted", payload: { - reason: "Interrupted by user.", + reason: requestError.detail, }, }); - } - }, + }), + ), ); - const respondToRequest: OpenCodeAdapterShape["respondToRequest"] = Effect.fn( - "respondToRequest", - )(function* (threadId, requestId, decision) { - const context = ensureSessionContext(sessions, threadId); - if (!context.pendingPermissions.has(requestId)) { - return yield* new ProviderAdapterRequestError({ - provider: PROVIDER, - method: "permission.reply", - detail: `Unknown pending permission request: ${requestId}`, - }); - } - - yield* runOpenCodeSdk("permission.reply", () => - context.client.permission.reply({ - requestID: requestId, - reply: toOpenCodePermissionReply(decision), - }), - ).pipe(Effect.mapError(toRequestError)); - }); + return { + threadId: input.threadId, + turnId, + }; + }); - const respondToUserInput: OpenCodeAdapterShape["respondToUserInput"] = Effect.fn( - "respondToUserInput", - )(function* (threadId, requestId, answers) { + const interruptTurn: OpenCodeAdapterShape["interruptTurn"] = Effect.fn("interruptTurn")( + function* (threadId, turnId) { const context = ensureSessionContext(sessions, threadId); - const request = context.pendingQuestions.get(requestId); - if (!request) { - return yield* new ProviderAdapterRequestError({ - provider: PROVIDER, - method: "question.reply", - detail: `Unknown pending user-input request: ${requestId}`, - }); - } - - yield* runOpenCodeSdk("question.reply", () => - context.client.question.reply({ - requestID: requestId, - answers: toOpenCodeQuestionAnswers(request, answers), - }), + yield* runOpenCodeSdk("session.abort", () => + context.client.session.abort({ sessionID: context.openCodeSessionId }), ).pipe(Effect.mapError(toRequestError)); - }); - - const stopSession: OpenCodeAdapterShape["stopSession"] = Effect.fn("stopSession")( - function* (threadId) { - const context = ensureSessionContext(sessions, threadId); - yield* stopOpenCodeContext(context); - sessions.delete(threadId); + if (turnId ?? context.activeTurnId) { yield* emit({ - ...buildEventBase({ threadId }), - type: "session.exited", + ...(yield* buildEventBase({ + threadId, + turnId: turnId ?? context.activeTurnId, + })), + type: "turn.aborted", payload: { - reason: "Session stopped.", - recoverable: false, - exitKind: "graceful", + reason: "Interrupted by user.", }, }); - }, - ); + } + }, + ); + + const respondToRequest: OpenCodeAdapterShape["respondToRequest"] = Effect.fn( + "respondToRequest", + )(function* (threadId, requestId, decision) { + const context = ensureSessionContext(sessions, threadId); + if (!context.pendingPermissions.has(requestId)) { + return yield* new ProviderAdapterRequestError({ + provider: PROVIDER, + method: "permission.reply", + detail: `Unknown pending permission request: ${requestId}`, + }); + } + + yield* runOpenCodeSdk("permission.reply", () => + context.client.permission.reply({ + requestID: requestId, + reply: toOpenCodePermissionReply(decision), + }), + ).pipe(Effect.mapError(toRequestError)); + }); + + const respondToUserInput: OpenCodeAdapterShape["respondToUserInput"] = Effect.fn( + "respondToUserInput", + )(function* (threadId, requestId, answers) { + const context = ensureSessionContext(sessions, threadId); + const request = context.pendingQuestions.get(requestId); + if (!request) { + return yield* new ProviderAdapterRequestError({ + provider: PROVIDER, + method: "question.reply", + detail: `Unknown pending user-input request: ${requestId}`, + }); + } - const listSessions: OpenCodeAdapterShape["listSessions"] = () => - Effect.sync(() => [...sessions.values()].map((context) => context.session)); + yield* runOpenCodeSdk("question.reply", () => + context.client.question.reply({ + requestID: requestId, + answers: toOpenCodeQuestionAnswers(request, answers), + }), + ).pipe(Effect.mapError(toRequestError)); + }); + + const stopSession: OpenCodeAdapterShape["stopSession"] = Effect.fn("stopSession")( + function* (threadId) { + const context = sessions.get(threadId); + if (!context) { + throw new ProviderAdapterSessionNotFoundError({ + provider: PROVIDER, + threadId, + }); + } + const stopped = yield* stopOpenCodeContext(context); + sessions.delete(threadId); + if (!stopped) { + return; + } + yield* emit({ + ...(yield* buildEventBase({ threadId })), + type: "session.exited", + payload: { + reason: "Session stopped.", + recoverable: false, + exitKind: "graceful", + }, + }); + }, + ); - const hasSession: OpenCodeAdapterShape["hasSession"] = (threadId) => - Effect.sync(() => sessions.has(threadId)); + const listSessions: OpenCodeAdapterShape["listSessions"] = () => + Effect.sync(() => [...sessions.values()].map((context) => context.session)); - const readThread: OpenCodeAdapterShape["readThread"] = Effect.fn("readThread")( - function* (threadId) { - const context = ensureSessionContext(sessions, threadId); - const messages = yield* runOpenCodeSdk("session.messages", () => - context.client.session.messages({ sessionID: context.openCodeSessionId }), - ).pipe(Effect.mapError(toRequestError)); + const hasSession: OpenCodeAdapterShape["hasSession"] = (threadId) => + Effect.sync(() => sessions.has(threadId)); - const turns = (messages.data ?? []) - .filter((entry) => entry.info.role === "assistant") - .map((entry) => ({ - id: TurnId.make(entry.info.id), - items: [entry.info, ...entry.parts], - })); + const readThread: OpenCodeAdapterShape["readThread"] = Effect.fn("readThread")( + function* (threadId) { + const context = ensureSessionContext(sessions, threadId); + const messages = yield* runOpenCodeSdk("session.messages", () => + context.client.session.messages({ + sessionID: context.openCodeSessionId, + }), + ).pipe(Effect.mapError(toRequestError)); - return { - threadId, - turns, - }; - }, - ); + const turns = (messages.data ?? []) + .filter((entry) => entry.info.role === "assistant") + .map((entry) => ({ + id: TurnId.make(entry.info.id), + items: [entry.info, ...entry.parts], + })); - const rollbackThread: OpenCodeAdapterShape["rollbackThread"] = Effect.fn("rollbackThread")( - function* (threadId, numTurns) { - const context = ensureSessionContext(sessions, threadId); - const messages = yield* runOpenCodeSdk("session.messages", () => - context.client.session.messages({ sessionID: context.openCodeSessionId }), - ).pipe(Effect.mapError(toRequestError)); + return { + threadId, + turns, + }; + }, + ); - const assistantMessages = (messages.data ?? []).filter( - (entry) => entry.info.role === "assistant", - ); - const targetIndex = assistantMessages.length - numTurns - 1; - const target = targetIndex >= 0 ? assistantMessages[targetIndex] : null; - yield* runOpenCodeSdk("session.revert", () => - context.client.session.revert({ - sessionID: context.openCodeSessionId, - ...(target ? { messageID: target.info.id } : {}), - }), - ).pipe(Effect.mapError(toRequestError)); + const rollbackThread: OpenCodeAdapterShape["rollbackThread"] = Effect.fn("rollbackThread")( + function* (threadId, numTurns) { + const context = ensureSessionContext(sessions, threadId); + const messages = yield* runOpenCodeSdk("session.messages", () => + context.client.session.messages({ + sessionID: context.openCodeSessionId, + }), + ).pipe(Effect.mapError(toRequestError)); - return yield* readThread(threadId); - }, - ); + const assistantMessages = (messages.data ?? []).filter( + (entry) => entry.info.role === "assistant", + ); + const targetIndex = assistantMessages.length - numTurns - 1; + const target = targetIndex >= 0 ? assistantMessages[targetIndex] : null; + yield* runOpenCodeSdk("session.revert", () => + context.client.session.revert({ + sessionID: context.openCodeSessionId, + ...(target ? { messageID: target.info.id } : {}), + }), + ).pipe(Effect.mapError(toRequestError)); - const stopAll: OpenCodeAdapterShape["stopAll"] = () => - Effect.gen(function* () { - const contexts = [...sessions.values()]; - sessions.clear(); - // `stopOpenCodeContext` is typed as never-failing — SDK aborts are - // already `Effect.ignore`'d inside it. `ignoreCause` here also - // swallows defects from throwing finalizers so one bad close can't - // interrupt the sibling fibers. Same pattern as the layer finalizer. - yield* Effect.forEach( - contexts, - (context) => Effect.ignoreCause(stopOpenCodeContext(context)), - { concurrency: "unbounded", discard: true }, - ); - }); + return yield* readThread(threadId); + }, + ); + + const stopAll: OpenCodeAdapterShape["stopAll"] = () => + Effect.gen(function* () { + const contexts = [...sessions.values()]; + sessions.clear(); + // `stopOpenCodeContext` is typed as never-failing — SDK aborts are + // already `Effect.ignore`'d inside it. `ignoreCause` here also + // swallows defects from throwing finalizers so one bad close can't + // interrupt the sibling fibers. Same pattern as the layer finalizer. + yield* Effect.forEach( + contexts, + (context) => Effect.ignoreCause(stopOpenCodeContext(context)), + { concurrency: "unbounded", discard: true }, + ); + }); - return { - provider: PROVIDER, - capabilities: getProviderCapabilities(PROVIDER), - startSession, - sendTurn, - interruptTurn, - respondToRequest, - respondToUserInput, - stopSession, - listSessions, - hasSession, - readThread, - rollbackThread, - stopAll, - get streamEvents() { - return Stream.fromQueue(runtimeEvents); - }, - } satisfies OpenCodeAdapterShape; - }), - ); + return { + provider: PROVIDER, + capabilities: { + sessionModelSwitch: "in-session", + }, + startSession, + sendTurn, + interruptTurn, + respondToRequest, + respondToUserInput, + stopSession, + listSessions, + hasSession, + readThread, + rollbackThread, + stopAll, + get streamEvents() { + return Stream.fromQueue(runtimeEvents); + }, + } satisfies OpenCodeAdapterShape; + }); } - -export const OpenCodeAdapterLive = makeOpenCodeAdapterLive(); diff --git a/apps/server/src/provider/Layers/OpenCodeProvider.test.ts b/apps/server/src/provider/Layers/OpenCodeProvider.test.ts index ffce7084342..7abe0be9816 100644 --- a/apps/server/src/provider/Layers/OpenCodeProvider.test.ts +++ b/apps/server/src/provider/Layers/OpenCodeProvider.test.ts @@ -2,24 +2,36 @@ import assert from "node:assert/strict"; import * as NodeServices from "@effect/platform-node/NodeServices"; import { it } from "@effect/vitest"; -import { Effect, Layer } from "effect"; +import { Effect, Layer, Schema } from "effect"; import { beforeEach } from "vitest"; +import { OpenCodeSettings } from "@t3tools/contracts"; import { ServerConfig } from "../../config.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; -import { OpenCodeProvider } from "../Services/OpenCodeProvider.ts"; import { OpenCodeRuntime, OpenCodeRuntimeError, type OpenCodeRuntimeShape, } from "../opencodeRuntime.ts"; -import { OpenCodeProviderLive } from "./OpenCodeProvider.ts"; +import { checkOpenCodeProviderStatus } from "./OpenCodeProvider.ts"; import type { OpenCodeInventory } from "../opencodeRuntime.ts"; +const DEFAULT_VERSION_STDOUT = "opencode 1.14.19\n"; + +/** + * The legacy `OpenCodeProviderLive` Layer + `OpenCodeProvider` service tag + * are deleted. The snapshot-producing logic they wrapped now lives in the + * standalone `checkOpenCodeProviderStatus(settings, cwd)` Effect, which + * drivers call directly when building their per-instance snapshot + * `ServerProviderShape`. Tests mirror that shape: build a settings payload, + * invoke the check, assert on the returned snapshot. + */ + const runtimeMock = { state: { runVersionError: null as Error | null, + versionStdout: DEFAULT_VERSION_STDOUT, inventoryError: null as Error | null, + closeCalls: 0, inventory: { providerList: { connected: [] as string[], all: [] as unknown[], default: {} }, agents: [] as unknown[], @@ -27,7 +39,9 @@ const runtimeMock = { }, reset() { this.state.runVersionError = null; + this.state.versionStdout = DEFAULT_VERSION_STDOUT; this.state.inventoryError = null; + this.state.closeCalls = 0; this.state.inventory = { providerList: { connected: [], all: [] as unknown[], default: {} }, agents: [] as unknown[], @@ -42,10 +56,19 @@ const OpenCodeRuntimeTestDouble: OpenCodeRuntimeShape = { exitCode: Effect.never, }), connectToOpenCodeServer: ({ serverUrl }) => - Effect.succeed({ - url: serverUrl ?? "http://127.0.0.1:4301", - exitCode: null, - external: Boolean(serverUrl), + Effect.gen(function* () { + if (!serverUrl) { + yield* Effect.addFinalizer(() => + Effect.sync(() => { + runtimeMock.state.closeCalls += 1; + }), + ); + } + return { + url: serverUrl ?? "http://127.0.0.1:4301", + exitCode: null, + external: Boolean(serverUrl), + }; }), runOpenCodeCommand: () => runtimeMock.state.runVersionError @@ -56,7 +79,7 @@ const OpenCodeRuntimeTestDouble: OpenCodeRuntimeShape = { cause: runtimeMock.state.runVersionError, }), ) - : Effect.succeed({ stdout: "opencode 1.0.0\n", stderr: "", code: 0 }), + : Effect.succeed({ stdout: runtimeMock.state.versionStdout, stderr: "", code: 0 }), createOpenCodeSdkClient: () => ({}) as unknown as ReturnType, loadOpenCodeInventory: () => @@ -75,20 +98,26 @@ beforeEach(() => { runtimeMock.reset(); }); -const makeTestLayer = (settingsOverrides?: Parameters[0]) => - OpenCodeProviderLive.pipe( - Layer.provideMerge(Layer.succeed(OpenCodeRuntime, OpenCodeRuntimeTestDouble)), - Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), - Layer.provideMerge(ServerSettingsService.layerTest(settingsOverrides)), - Layer.provideMerge(NodeServices.layer), - ); - -it.layer(makeTestLayer())("OpenCodeProviderLive", (it) => { +const testLayer = Layer.succeed(OpenCodeRuntime, OpenCodeRuntimeTestDouble).pipe( + Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), + Layer.provideMerge(NodeServices.layer), +); + +const makeOpenCodeSettings = (overrides?: Partial): OpenCodeSettings => + Schema.decodeSync(OpenCodeSettings)({ + enabled: true, + binaryPath: "opencode", + serverUrl: "", + serverPassword: "", + customModels: [], + ...overrides, + }); + +it.layer(testLayer)("checkOpenCodeProviderStatus", (it) => { it.effect("shows a codex-style missing binary message", () => Effect.gen(function* () { runtimeMock.state.runVersionError = new Error("spawn opencode ENOENT"); - const provider = yield* OpenCodeProvider; - const snapshot = yield* provider.refresh; + const snapshot = yield* checkOpenCodeProviderStatus(makeOpenCodeSettings(), process.cwd()); assert.equal(snapshot.status, "error"); assert.equal(snapshot.installed, false); @@ -99,8 +128,7 @@ it.layer(makeTestLayer())("OpenCodeProviderLive", (it) => { it.effect("hides generic Effect.tryPromise text for local CLI probe failures", () => Effect.gen(function* () { runtimeMock.state.runVersionError = new Error("An error occurred in Effect.tryPromise"); - const provider = yield* OpenCodeProvider; - const snapshot = yield* provider.refresh; + const snapshot = yield* checkOpenCodeProviderStatus(makeOpenCodeSettings(), process.cwd()); assert.equal(snapshot.status, "error"); assert.equal(snapshot.installed, true); @@ -140,38 +168,49 @@ it.layer(makeTestLayer())("OpenCodeProviderLive", (it) => { ], }; - const provider = yield* OpenCodeProvider; - const snapshot = yield* provider.refresh; + const snapshot = yield* checkOpenCodeProviderStatus(makeOpenCodeSettings(), process.cwd()); const model = snapshot.models.find((entry) => entry.slug === "openai/gpt-5.4"); assert.ok(model); + const variantDescriptor = model.capabilities?.optionDescriptors?.find( + (descriptor) => descriptor.id === "variant" && descriptor.type === "select", + ); + assert.ok(variantDescriptor && variantDescriptor.type === "select"); assert.equal( - model.capabilities?.variantOptions?.find((option) => option.isDefault)?.value, + variantDescriptor.options.find((option) => option.isDefault === true)?.id, "medium", ); + const agentDescriptor = model.capabilities?.optionDescriptors?.find( + (descriptor) => descriptor.id === "agent" && descriptor.type === "select", + ); + assert.ok(agentDescriptor && agentDescriptor.type === "select"); assert.equal( - model.capabilities?.agentOptions?.find((option) => option.isDefault)?.value, + agentDescriptor.options.find((option) => option.isDefault === true)?.id, "build", ); }), ); + + it.effect("closes the local OpenCode server scope after provider refresh", () => + Effect.gen(function* () { + yield* checkOpenCodeProviderStatus(makeOpenCodeSettings(), process.cwd()); + + assert.equal(runtimeMock.state.closeCalls, 1); + }), + ); }); -it.layer( - makeTestLayer({ - providers: { - opencode: { - serverUrl: "http://127.0.0.1:9999", - serverPassword: "secret-password", - }, - }, - }), -)("OpenCodeProviderLive with configured server URL", (it) => { +it.layer(testLayer)("checkOpenCodeProviderStatus with configured server URL", (it) => { it.effect("surfaces a friendly auth error for configured servers", () => Effect.gen(function* () { runtimeMock.state.inventoryError = new Error("401 Unauthorized"); - const provider = yield* OpenCodeProvider; - const snapshot = yield* provider.refresh; + const snapshot = yield* checkOpenCodeProviderStatus( + makeOpenCodeSettings({ + serverUrl: "http://127.0.0.1:9999", + serverPassword: "secret-password", + }), + process.cwd(), + ); assert.equal(snapshot.status, "error"); assert.equal(snapshot.installed, true); @@ -187,8 +226,13 @@ it.layer( runtimeMock.state.inventoryError = new Error( "fetch failed: connect ECONNREFUSED 127.0.0.1:9999", ); - const provider = yield* OpenCodeProvider; - const snapshot = yield* provider.refresh; + const snapshot = yield* checkOpenCodeProviderStatus( + makeOpenCodeSettings({ + serverUrl: "http://127.0.0.1:9999", + serverPassword: "secret-password", + }), + process.cwd(), + ); assert.equal(snapshot.status, "error"); assert.equal(snapshot.installed, true); diff --git a/apps/server/src/provider/Layers/OpenCodeProvider.ts b/apps/server/src/provider/Layers/OpenCodeProvider.ts index 5e51eae0282..c7487d7d526 100644 --- a/apps/server/src/provider/Layers/OpenCodeProvider.ts +++ b/apps/server/src/provider/Layers/OpenCodeProvider.ts @@ -1,20 +1,21 @@ -import type { - ModelCapabilities, - OpenCodeSettings, - ServerProvider, - ServerProviderModel, +import { + ProviderDriverKind, + type ModelCapabilities, + type OpenCodeSettings, + type ServerProviderModel, } from "@t3tools/contracts"; -import { Cause, Data, Effect, Equal, Layer, Stream } from "effect"; +import { Cause, Data, Effect } from "effect"; + +import { createModelCapabilities } from "@t3tools/shared/model"; -import { ServerConfig } from "../../config.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; -import { makeManagedServerProvider } from "../makeManagedServerProvider.ts"; import { buildServerProvider, + nonEmptyTrimmed, parseGenericCliVersion, providerModelsFromSettings, + type ServerProviderDraft, } from "../providerSnapshot.ts"; -import { OpenCodeProvider } from "../Services/OpenCodeProvider.ts"; +import { compareCliVersions } from "../cliVersion.ts"; import { OpenCodeRuntime, openCodeRuntimeErrorDetail, @@ -22,7 +23,12 @@ import { } from "../opencodeRuntime.ts"; import type { Agent, ProviderListResponse } from "@opencode-ai/sdk/v2"; -const PROVIDER = "opencode" as const; +const PROVIDER = ProviderDriverKind.make("opencode"); +const OPENCODE_PRESENTATION = { + displayName: "OpenCode", + showInteractionModeToggle: false, +} as const; +const MINIMUM_OPENCODE_VERSION = "1.14.19"; class OpenCodeProbeError extends Data.TaggedError("OpenCodeProbeError")<{ readonly cause: unknown; @@ -156,13 +162,9 @@ function inferDefaultAgent(agents: ReadonlyArray): string | undefined { return agents.find((agent) => agent.name === "build")?.name ?? agents[0]?.name ?? undefined; } -const DEFAULT_OPENCODE_MODEL_CAPABILITIES: ModelCapabilities = { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], -}; +const DEFAULT_OPENCODE_MODEL_CAPABILITIES: ModelCapabilities = createModelCapabilities({ + optionDescriptors: [], +}); function openCodeCapabilitiesForModel(input: { readonly providerID: string; @@ -171,27 +173,46 @@ function openCodeCapabilitiesForModel(input: { }): ModelCapabilities { const variantValues = Object.keys(input.model.variants ?? {}); const defaultVariant = inferDefaultVariant(input.providerID, variantValues); - const variantOptions: ModelCapabilities["variantOptions"] = variantValues.map((value) => - Object.assign( - { value, label: titleCaseSlug(value) }, - defaultVariant === value ? { isDefault: true } : {}, - ), + const variantOptions = variantValues.map((value) => + defaultVariant === value + ? { id: value, label: titleCaseSlug(value), isDefault: true as const } + : { id: value, label: titleCaseSlug(value) }, ); const primaryAgents = input.agents.filter( (agent) => !agent.hidden && (agent.mode === "primary" || agent.mode === "all"), ); const defaultAgent = inferDefaultAgent(primaryAgents); - const agentOptions: ModelCapabilities["agentOptions"] = primaryAgents.map((agent) => - Object.assign( - { value: agent.name, label: titleCaseSlug(agent.name) }, - defaultAgent === agent.name ? { isDefault: true } : {}, - ), + const agentOptions = primaryAgents.map((agent) => + defaultAgent === agent.name + ? { id: agent.name, label: titleCaseSlug(agent.name), isDefault: true as const } + : { id: agent.name, label: titleCaseSlug(agent.name) }, ); - return { - ...DEFAULT_OPENCODE_MODEL_CAPABILITIES, - ...(variantOptions.length > 0 ? { variantOptions } : {}), - ...(agentOptions.length > 0 ? { agentOptions } : {}), - }; + return createModelCapabilities({ + optionDescriptors: [ + ...(variantOptions.length > 0 + ? [ + { + id: "variant", + label: "Variant", + type: "select" as const, + options: variantOptions, + ...(defaultVariant ? { currentValue: defaultVariant } : {}), + }, + ] + : []), + ...(agentOptions.length > 0 + ? [ + { + id: "agent", + label: "Agent", + type: "select" as const, + options: agentOptions, + ...(defaultAgent ? { currentValue: defaultAgent } : {}), + }, + ] + : []), + ], + }); } function flattenOpenCodeModels(input: OpenCodeInventory): ReadonlyArray { @@ -204,10 +225,16 @@ function flattenOpenCodeModels(input: OpenCodeInventory): ReadonlyArray left.name.localeCompare(right.name)); } -const makePendingOpenCodeProvider = (openCodeSettings: OpenCodeSettings): ServerProvider => { +export const makePendingOpenCodeProvider = ( + openCodeSettings: OpenCodeSettings, +): ServerProviderDraft => { const checkedAt = new Date().toISOString(); const models = providerModelsFromSettings( [], @@ -232,7 +261,7 @@ const makePendingOpenCodeProvider = (openCodeSettings: OpenCodeSettings): Server if (!openCodeSettings.enabled) { return buildServerProvider({ - provider: PROVIDER, + presentation: OPENCODE_PRESENTATION, enabled: false, checkedAt, models, @@ -250,7 +279,7 @@ const makePendingOpenCodeProvider = (openCodeSettings: OpenCodeSettings): Server } return buildServerProvider({ - provider: PROVIDER, + presentation: OPENCODE_PRESENTATION, enabled: true, checkedAt, models, @@ -264,177 +293,179 @@ const makePendingOpenCodeProvider = (openCodeSettings: OpenCodeSettings): Server }); }; -export const OpenCodeProviderLive = Layer.effect( - OpenCodeProvider, - Effect.gen(function* () { - const serverSettings = yield* ServerSettingsService; - const serverConfig = yield* ServerConfig; - const openCodeRuntime = yield* OpenCodeRuntime; - - const checkOpenCodeProviderStatus = Effect.fn("checkOpenCodeProviderStatus")(function* (input: { - readonly settings: OpenCodeSettings; - readonly cwd: string; - }): Effect.fn.Return { - const checkedAt = new Date().toISOString(); - const customModels = input.settings.customModels; - const isExternalServer = input.settings.serverUrl.trim().length > 0; - - const fallback = (cause: unknown, version: string | null = null) => { - const failure = formatOpenCodeProbeError({ - cause, - isExternalServer, - serverUrl: input.settings.serverUrl, - }); - return buildServerProvider({ - provider: PROVIDER, - enabled: input.settings.enabled, - checkedAt, - models: providerModelsFromSettings( - [], - PROVIDER, - customModels, - DEFAULT_OPENCODE_MODEL_CAPABILITIES, - ), - probe: { - installed: failure.installed, - version, - status: "error", - auth: { status: "unknown" }, - message: failure.message, - }, - }); - }; - - if (!input.settings.enabled) { - return buildServerProvider({ - provider: PROVIDER, - enabled: false, - checkedAt, - models: providerModelsFromSettings( - [], - PROVIDER, - customModels, - DEFAULT_OPENCODE_MODEL_CAPABILITIES, - ), - probe: { - installed: false, - version: null, - status: "warning", - auth: { status: "unknown" }, - message: isExternalServer - ? "OpenCode is disabled in T3 Code settings. A server URL is configured." - : "OpenCode is disabled in T3 Code settings.", - }, - }); - } - - let version: string | null = null; - if (!isExternalServer) { - const versionExit = yield* Effect.exit( - openCodeRuntime - .runOpenCodeCommand({ - binaryPath: input.settings.binaryPath, - args: ["--version"], - }) - .pipe( - Effect.mapError( - (cause) => - new OpenCodeProbeError({ cause, detail: openCodeRuntimeErrorDetail(cause) }), - ), - ), - ); - if (versionExit._tag === "Failure") { - return fallback(Cause.squash(versionExit.cause)); - } - version = parseGenericCliVersion(versionExit.value.stdout) ?? null; - } - - const inventoryExit = yield* Effect.exit( - Effect.scoped( - Effect.gen(function* () { - const server = yield* openCodeRuntime - .connectToOpenCodeServer({ - binaryPath: input.settings.binaryPath, - serverUrl: input.settings.serverUrl, - }) - .pipe( - Effect.mapError( - (cause) => - new OpenCodeProbeError({ cause, detail: openCodeRuntimeErrorDetail(cause) }), - ), - ); - return yield* openCodeRuntime - .loadOpenCodeInventory( - openCodeRuntime.createOpenCodeSdkClient({ - baseUrl: server.url, - directory: input.cwd, - ...(isExternalServer && input.settings.serverPassword - ? { serverPassword: input.settings.serverPassword } - : {}), - }), - ) - .pipe( - Effect.mapError( - (cause) => - new OpenCodeProbeError({ cause, detail: openCodeRuntimeErrorDetail(cause) }), - ), - ); - }), - ), - ); - if (inventoryExit._tag === "Failure") { - return fallback(Cause.squash(inventoryExit.cause), version); - } +export const checkOpenCodeProviderStatus = Effect.fn("checkOpenCodeProviderStatus")(function* ( + openCodeSettings: OpenCodeSettings, + cwd: string, + environment: NodeJS.ProcessEnv = process.env, +): Effect.fn.Return { + const openCodeRuntime = yield* OpenCodeRuntime; + const checkedAt = new Date().toISOString(); + const customModels = openCodeSettings.customModels; + const isExternalServer = openCodeSettings.serverUrl.trim().length > 0; + + const fallback = (cause: unknown, version: string | null = null) => { + const failure = formatOpenCodeProbeError({ + cause, + isExternalServer, + serverUrl: openCodeSettings.serverUrl, + }); + return buildServerProvider({ + presentation: OPENCODE_PRESENTATION, + enabled: openCodeSettings.enabled, + checkedAt, + models: providerModelsFromSettings( + [], + PROVIDER, + customModels, + DEFAULT_OPENCODE_MODEL_CAPABILITIES, + ), + probe: { + installed: failure.installed, + version, + status: "error", + auth: { status: "unknown" }, + message: failure.message, + }, + }); + }; - const models = providerModelsFromSettings( - flattenOpenCodeModels(inventoryExit.value), + if (!openCodeSettings.enabled) { + return buildServerProvider({ + presentation: OPENCODE_PRESENTATION, + enabled: false, + checkedAt, + models: providerModelsFromSettings( + [], PROVIDER, customModels, DEFAULT_OPENCODE_MODEL_CAPABILITIES, + ), + probe: { + installed: false, + version: null, + status: "warning", + auth: { status: "unknown" }, + message: isExternalServer + ? "OpenCode is disabled in T3 Code settings. A server URL is configured." + : "OpenCode is disabled in T3 Code settings.", + }, + }); + } + + let version: string | null = null; + if (!isExternalServer) { + const versionExit = yield* Effect.exit( + openCodeRuntime + .runOpenCodeCommand({ + binaryPath: openCodeSettings.binaryPath, + args: ["--version"], + environment, + }) + .pipe( + Effect.mapError( + (cause) => new OpenCodeProbeError({ cause, detail: openCodeRuntimeErrorDetail(cause) }), + ), + ), + ); + if (versionExit._tag === "Failure") { + return fallback(Cause.squash(versionExit.cause)); + } + version = parseGenericCliVersion(versionExit.value.stdout) ?? null; + + if (!version) { + return fallback( + new Error( + `Unable to determine OpenCode version from \`opencode --version\` output. T3 Code requires OpenCode v${MINIMUM_OPENCODE_VERSION} or newer.`, + ), + null, ); - const connectedCount = inventoryExit.value.providerList.connected.length; + } + if (compareCliVersions(version, MINIMUM_OPENCODE_VERSION) < 0) { return buildServerProvider({ - provider: PROVIDER, - enabled: true, + presentation: OPENCODE_PRESENTATION, + enabled: openCodeSettings.enabled, checkedAt, - models, + models: providerModelsFromSettings( + [], + PROVIDER, + customModels, + DEFAULT_OPENCODE_MODEL_CAPABILITIES, + ), probe: { installed: true, version, - status: connectedCount > 0 ? "ready" : "warning", - auth: { - status: connectedCount > 0 ? "authenticated" : "unknown", - type: "opencode", - }, - message: - connectedCount > 0 - ? `${connectedCount} upstream provider${connectedCount === 1 ? "" : "s"} connected through ${isExternalServer ? "the configured OpenCode server" : "OpenCode"}.` - : isExternalServer - ? "Connected to the configured OpenCode server, but it did not report any connected upstream providers." - : "OpenCode is available, but it did not report any connected upstream providers.", + status: "error", + auth: { status: "unknown" }, + message: `OpenCode v${version} is too old. Upgrade to v${MINIMUM_OPENCODE_VERSION} or newer.`, }, }); - }); + } + } - const getProviderSettings = serverSettings.getSettings.pipe( - Effect.map((settings) => settings.providers.opencode), - ); + const inventoryExit = yield* Effect.exit( + Effect.scoped( + Effect.gen(function* () { + const server = yield* openCodeRuntime + .connectToOpenCodeServer({ + binaryPath: openCodeSettings.binaryPath, + serverUrl: openCodeSettings.serverUrl, + environment, + }) + .pipe( + Effect.mapError( + (cause) => + new OpenCodeProbeError({ cause, detail: openCodeRuntimeErrorDetail(cause) }), + ), + ); + return yield* openCodeRuntime + .loadOpenCodeInventory( + openCodeRuntime.createOpenCodeSdkClient({ + baseUrl: server.url, + directory: cwd, + ...(isExternalServer && openCodeSettings.serverPassword + ? { serverPassword: openCodeSettings.serverPassword } + : {}), + }), + ) + .pipe( + Effect.mapError( + (cause) => + new OpenCodeProbeError({ cause, detail: openCodeRuntimeErrorDetail(cause) }), + ), + ); + }), + ), + ); + if (inventoryExit._tag === "Failure") { + return fallback(Cause.squash(inventoryExit.cause), version); + } - return yield* makeManagedServerProvider({ - getSettings: getProviderSettings.pipe(Effect.orDie), - streamSettings: serverSettings.streamChanges.pipe( - Stream.map((settings) => settings.providers.opencode), - ), - haveSettingsChanged: (previous, next) => !Equal.equals(previous, next), - initialSnapshot: makePendingOpenCodeProvider, - checkProvider: getProviderSettings.pipe( - Effect.flatMap((settings) => - checkOpenCodeProviderStatus({ - settings, - cwd: serverConfig.cwd, - }), - ), - ), - }); - }), -); + const models = providerModelsFromSettings( + flattenOpenCodeModels(inventoryExit.value), + PROVIDER, + customModels, + DEFAULT_OPENCODE_MODEL_CAPABILITIES, + ); + const connectedCount = inventoryExit.value.providerList.connected.length; + return buildServerProvider({ + presentation: OPENCODE_PRESENTATION, + enabled: true, + checkedAt, + models, + probe: { + installed: true, + version, + status: connectedCount > 0 ? "ready" : "warning", + auth: { + status: connectedCount > 0 ? "authenticated" : "unknown", + type: "opencode", + }, + message: + connectedCount > 0 + ? `${connectedCount} upstream provider${connectedCount === 1 ? "" : "s"} connected through ${isExternalServer ? "the configured OpenCode server" : "OpenCode"}.` + : isExternalServer + ? "Connected to the configured OpenCode server, but it did not report any connected upstream providers." + : "OpenCode is available, but it did not report any connected upstream providers.", + }, + }); +}); diff --git a/apps/server/src/provider/Layers/ProviderAdapterConformance.test.ts b/apps/server/src/provider/Layers/ProviderAdapterConformance.test.ts deleted file mode 100644 index 24053e5844e..00000000000 --- a/apps/server/src/provider/Layers/ProviderAdapterConformance.test.ts +++ /dev/null @@ -1,159 +0,0 @@ -import * as NodeServices from "@effect/platform-node/NodeServices"; -import { Effect, Layer, Option } from "effect"; -import { describe, expect, it } from "vitest"; - -import { AmpServerManager } from "../../ampServerManager.ts"; -import { GeminiCliServerManager } from "../../geminiCliServerManager.ts"; -import { ServerConfig } from "../../config.ts"; -import { makeAmpAdapterLive } from "./AmpAdapter.ts"; -import { makeClaudeAdapterLive } from "./ClaudeAdapter.ts"; -import { makeCodexAdapterLive } from "./CodexAdapter.ts"; -import { makeCopilotAdapterLive } from "./CopilotAdapter.ts"; -import { makeCursorAdapterLive } from "./CursorAdapter.ts"; -import { makeGeminiCliAdapterLive } from "./GeminiCliAdapter.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; -import { ProviderSessionDirectory } from "../Services/ProviderSessionDirectory.ts"; -import { - getProviderCapabilities, - validateProviderAdapterConformance, -} from "../Services/ProviderAdapter.ts"; -import { AmpAdapter } from "../Services/AmpAdapter.ts"; -import { ClaudeAdapter } from "../Services/ClaudeAdapter.ts"; -import { CodexAdapter } from "../Services/CodexAdapter.ts"; -import { CopilotAdapter } from "../Services/CopilotAdapter.ts"; -import { CursorAdapter } from "../Services/CursorAdapter.ts"; -import { GeminiCliAdapter } from "../Services/GeminiCliAdapter.ts"; - -const providerSessionDirectoryTestLayer = Layer.succeed(ProviderSessionDirectory, { - upsert: () => Effect.void, - getProvider: () => - Effect.die(new Error("ProviderSessionDirectory.getProvider is not used in conformance tests")), - getBinding: () => Effect.succeed(Option.none()), - listThreadIds: () => Effect.succeed([]), - listBindings: () => Effect.succeed([]), -}); - -const codexLayer = makeCodexAdapterLive().pipe( - Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), - Layer.provideMerge(providerSessionDirectoryTestLayer), - Layer.provideMerge(ServerSettingsService.layerTest()), - Layer.provideMerge(NodeServices.layer), -); - -const copilotLayer = makeCopilotAdapterLive({ - clientFactory: () => - ({ - start: async () => undefined, - listModels: async () => [], - createSession: async () => { - throw new Error("createSession should not be called in conformance tests"); - }, - resumeSession: async () => { - throw new Error("resumeSession should not be called in conformance tests"); - }, - stop: async () => [], - }) as never, -}).pipe( - Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), - Layer.provideMerge(ServerSettingsService.layerTest()), - Layer.provideMerge(NodeServices.layer), -); - -const claudeLayer = makeClaudeAdapterLive({ - createQuery: () => - ({ - [Symbol.asyncIterator]: async function* () { - yield* [] as never[]; - }, - interrupt: async () => undefined, - setModel: async () => undefined, - setPermissionMode: async () => undefined, - setMaxThinkingTokens: async () => undefined, - close: () => undefined, - }) as never, -}).pipe( - Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), - Layer.provideMerge(ServerSettingsService.layerTest()), - Layer.provideMerge(NodeServices.layer), -); - -const cursorLayer = makeCursorAdapterLive().pipe( - Layer.provideMerge(ServerConfig.layerTest(process.cwd(), process.cwd())), - Layer.provideMerge(ServerSettingsService.layerTest()), - Layer.provideMerge(NodeServices.layer), -); - -const geminiLayer = makeGeminiCliAdapterLive({ - manager: new GeminiCliServerManager(), -}).pipe(Layer.provideMerge(ServerSettingsService.layerTest())); - -const ampLayer = makeAmpAdapterLive({ - manager: new AmpServerManager(), -}).pipe(Layer.provideMerge(ServerSettingsService.layerTest())); - -describe("provider adapter conformance", () => { - const cases = [ - { - provider: "codex" as const, - load: () => - Effect.runPromise( - Effect.gen(function* () { - return yield* CodexAdapter; - }).pipe(Effect.provide(codexLayer)), - ), - }, - { - provider: "copilot" as const, - load: () => - Effect.runPromise( - Effect.gen(function* () { - return yield* CopilotAdapter; - }).pipe(Effect.provide(copilotLayer)), - ), - }, - { - provider: "claudeAgent" as const, - load: () => - Effect.runPromise( - Effect.gen(function* () { - return yield* ClaudeAdapter; - }).pipe(Effect.provide(claudeLayer)), - ), - }, - { - provider: "cursor" as const, - load: () => - Effect.runPromise( - Effect.gen(function* () { - return yield* CursorAdapter; - }).pipe(Effect.provide(cursorLayer)), - ), - }, - { - provider: "geminiCli" as const, - load: () => - Effect.runPromise( - Effect.gen(function* () { - return yield* GeminiCliAdapter; - }).pipe(Effect.provide(geminiLayer)), - ), - }, - { - provider: "amp" as const, - load: () => - Effect.runPromise( - Effect.gen(function* () { - return yield* AmpAdapter; - }).pipe(Effect.provide(ampLayer)), - ), - }, - ]; - - it.each(cases)("declares the shared harness matrix for $provider", async ({ provider, load }) => { - const adapter = await load(); - - expect(validateProviderAdapterConformance(adapter)).toEqual([]); - expect(adapter.provider).toBe(provider); - expect(adapter.capabilities).toEqual(getProviderCapabilities(provider)); - }); -}); diff --git a/apps/server/src/provider/Layers/ProviderAdapterRegistry.test.ts b/apps/server/src/provider/Layers/ProviderAdapterRegistry.test.ts index 05f52a73b38..eeba158ab42 100644 --- a/apps/server/src/provider/Layers/ProviderAdapterRegistry.test.ts +++ b/apps/server/src/provider/Layers/ProviderAdapterRegistry.test.ts @@ -1,25 +1,31 @@ -import * as NodeServices from "@effect/platform-node/NodeServices"; +import { + defaultInstanceIdForDriver, + ProviderDriverKind, + type ServerProvider, +} from "@t3tools/contracts"; import { it, assert, vi } from "@effect/vitest"; -import { assertFailure } from "@effect/vitest/utils"; -import type { ProviderKind } from "@t3tools/contracts"; -import { Effect, Layer, Stream } from "effect"; - -import { ProviderUnsupportedError } from "../Errors.ts"; -import { ClaudeAdapter, type ClaudeAdapterShape } from "../Services/ClaudeAdapter.ts"; -import { CopilotAdapter, type CopilotAdapterShape } from "../Services/CopilotAdapter.ts"; -import { CodexAdapter, type CodexAdapterShape } from "../Services/CodexAdapter.ts"; -import { CursorAdapter, type CursorAdapterShape } from "../Services/CursorAdapter.ts"; -import { GeminiCliAdapter, type GeminiCliAdapterShape } from "../Services/GeminiCliAdapter.ts"; -import { OpenCodeAdapter, type OpenCodeAdapterShape } from "../Services/OpenCodeAdapter.ts"; -import { AmpAdapter, type AmpAdapterShape } from "../Services/AmpAdapter.ts"; -import { KiloAdapter, type KiloAdapterShape } from "../Services/KiloAdapter.ts"; -import { getProviderCapabilities } from "../Services/ProviderAdapter.ts"; + +import { Effect, Layer, PubSub, Stream } from "effect"; + +import type { ClaudeAdapterShape } from "../Services/ClaudeAdapter.ts"; +import type { CodexAdapterShape } from "../Services/CodexAdapter.ts"; +import type { CursorAdapterShape } from "../Services/CursorAdapter.ts"; +import type { OpenCodeAdapterShape } from "../Services/OpenCodeAdapter.ts"; import { ProviderAdapterRegistry } from "../Services/ProviderAdapterRegistry.ts"; +import { ProviderInstanceRegistry } from "../Services/ProviderInstanceRegistry.ts"; +import type { ProviderInstance } from "../ProviderDriver.ts"; +import type { TextGenerationShape } from "../../textGeneration/TextGeneration.ts"; import { ProviderAdapterRegistryLive } from "./ProviderAdapterRegistry.ts"; +import * as NodeServices from "@effect/platform-node/NodeServices"; + +const CODEX_DRIVER = ProviderDriverKind.make("codex"); +const CLAUDE_AGENT_DRIVER = ProviderDriverKind.make("claudeAgent"); +const OPENCODE_DRIVER = ProviderDriverKind.make("opencode"); +const CURSOR_DRIVER = ProviderDriverKind.make("cursor"); const fakeCodexAdapter: CodexAdapterShape = { - provider: "codex", - capabilities: getProviderCapabilities("codex"), + provider: CODEX_DRIVER, + capabilities: { sessionModelSwitch: "in-session" }, startSession: vi.fn(), sendTurn: vi.fn(), interruptTurn: vi.fn(), @@ -35,8 +41,8 @@ const fakeCodexAdapter: CodexAdapterShape = { }; const fakeClaudeAdapter: ClaudeAdapterShape = { - provider: "claudeAgent", - capabilities: getProviderCapabilities("claudeAgent"), + provider: CLAUDE_AGENT_DRIVER, + capabilities: { sessionModelSwitch: "in-session" }, startSession: vi.fn(), sendTurn: vi.fn(), interruptTurn: vi.fn(), @@ -51,9 +57,9 @@ const fakeClaudeAdapter: ClaudeAdapterShape = { streamEvents: Stream.empty, }; -const fakeCopilotAdapter: CopilotAdapterShape = { - provider: "copilot", - capabilities: getProviderCapabilities("copilot"), +const fakeOpenCodeAdapter: OpenCodeAdapterShape = { + provider: OPENCODE_DRIVER, + capabilities: { sessionModelSwitch: "in-session" }, startSession: vi.fn(), sendTurn: vi.fn(), interruptTurn: vi.fn(), @@ -69,8 +75,8 @@ const fakeCopilotAdapter: CopilotAdapterShape = { }; const fakeCursorAdapter: CursorAdapterShape = { - provider: "cursor", - capabilities: getProviderCapabilities("cursor"), + provider: CURSOR_DRIVER, + capabilities: { sessionModelSwitch: "in-session" }, startSession: vi.fn(), sendTurn: vi.fn(), interruptTurn: vi.fn(), @@ -85,133 +91,94 @@ const fakeCursorAdapter: CursorAdapterShape = { streamEvents: Stream.empty, }; -const fakeOpenCodeAdapter: OpenCodeAdapterShape = { - provider: "opencode", - capabilities: getProviderCapabilities("opencode"), - startSession: vi.fn(), - sendTurn: vi.fn(), - interruptTurn: vi.fn(), - respondToRequest: vi.fn(), - respondToUserInput: vi.fn(), - stopSession: vi.fn(), - listSessions: vi.fn(), - hasSession: vi.fn(), - readThread: vi.fn(), - rollbackThread: vi.fn(), - stopAll: vi.fn(), - streamEvents: Stream.empty, +// ProviderAdapterRegistryLive is now a facade over ProviderInstanceRegistry — +// it walks `listInstances` once at boot and surfaces the default-instance +// adapter keyed by its driver kind. To test the facade we supply four fake +// instances whose `instanceId === defaultInstanceIdForDriver(driverKind)` so +// they pass the default-instance filter. +const makeFakeInstance = ( + driverKindString: "codex" | "claudeAgent" | "cursor" | "opencode", + adapter: ProviderInstance["adapter"], +): ProviderInstance => { + const driverKind = ProviderDriverKind.make(driverKindString); + return { + instanceId: defaultInstanceIdForDriver(driverKind), + driverKind, + continuationIdentity: { + driverKind, + continuationKey: `${driverKind}:instance:${defaultInstanceIdForDriver(driverKind)}`, + }, + displayName: undefined, + enabled: true, + snapshot: { + getSnapshot: Effect.succeed({} as unknown as ServerProvider), + refresh: Effect.succeed({} as unknown as ServerProvider), + streamChanges: Stream.empty, + }, + adapter, + textGeneration: {} as unknown as TextGenerationShape, + }; }; -const fakeGeminiCliAdapter: GeminiCliAdapterShape = { - provider: "geminiCli", - capabilities: getProviderCapabilities("geminiCli"), - startSession: vi.fn(), - sendTurn: vi.fn(), - interruptTurn: vi.fn(), - respondToRequest: vi.fn(), - respondToUserInput: vi.fn(), - stopSession: vi.fn(), - listSessions: vi.fn(), - hasSession: vi.fn(), - readThread: vi.fn(), - rollbackThread: vi.fn(), - stopAll: vi.fn(), - streamEvents: Stream.empty, -}; +const fakeInstances: ReadonlyArray = [ + makeFakeInstance("codex", fakeCodexAdapter), + makeFakeInstance("claudeAgent", fakeClaudeAdapter), + makeFakeInstance("opencode", fakeOpenCodeAdapter), + makeFakeInstance("cursor", fakeCursorAdapter), +]; -const fakeAmpAdapter: AmpAdapterShape = { - provider: "amp", - capabilities: getProviderCapabilities("amp"), - startSession: vi.fn(), - sendTurn: vi.fn(), - interruptTurn: vi.fn(), - respondToRequest: vi.fn(), - respondToUserInput: vi.fn(), - stopSession: vi.fn(), - listSessions: vi.fn(), - hasSession: vi.fn(), - readThread: vi.fn(), - rollbackThread: vi.fn(), - stopAll: vi.fn(), - streamEvents: Stream.empty, -}; - -const fakeKiloAdapter: KiloAdapterShape = { - provider: "kilo", - capabilities: getProviderCapabilities("kilo"), - startSession: vi.fn(), - sendTurn: vi.fn(), - interruptTurn: vi.fn(), - respondToRequest: vi.fn(), - respondToUserInput: vi.fn(), - stopSession: vi.fn(), - listSessions: vi.fn(), - hasSession: vi.fn(), - readThread: vi.fn(), - rollbackThread: vi.fn(), - stopAll: vi.fn(), - streamEvents: Stream.empty, -}; +const fakeInstanceRegistryLayer = Layer.succeed(ProviderInstanceRegistry, { + getInstance: (instanceId) => + Effect.succeed(fakeInstances.find((instance) => instance.instanceId === instanceId)), + listInstances: Effect.succeed(fakeInstances), + listUnavailable: Effect.succeed([]), + streamChanges: Stream.empty, + // Tests never drive changes through this fake; acquire a throwaway + // subscription on an unused PubSub so the shape is satisfied. + subscribeChanges: Effect.flatMap(PubSub.unbounded(), (pubsub) => PubSub.subscribe(pubsub)), +}); -const layer = it.layer( - ProviderAdapterRegistryLive.pipe( - Layer.provide( - Layer.mergeAll( - Layer.succeed(CodexAdapter, fakeCodexAdapter), - Layer.succeed(CopilotAdapter, fakeCopilotAdapter), - Layer.succeed(ClaudeAdapter, fakeClaudeAdapter), - Layer.succeed(CursorAdapter, fakeCursorAdapter), - Layer.succeed(OpenCodeAdapter, fakeOpenCodeAdapter), - Layer.succeed(GeminiCliAdapter, fakeGeminiCliAdapter), - Layer.succeed(AmpAdapter, fakeAmpAdapter), - Layer.succeed(KiloAdapter, fakeKiloAdapter), - ), - ), - Layer.provideMerge(NodeServices.layer), - ), +const layer = Layer.mergeAll( + Layer.provide(ProviderAdapterRegistryLive, fakeInstanceRegistryLayer), + NodeServices.layer, ); -layer("ProviderAdapterRegistryLive", (it) => { - it.effect("resolves registered provider adapters", () => +it.layer(layer)("ProviderAdapterRegistryLive", (it) => { + it("resolves adapters and routing metadata from provider instances", () => Effect.gen(function* () { const registry = yield* ProviderAdapterRegistry; - const codex = yield* registry.getByProvider("codex"); - const copilot = yield* registry.getByProvider("copilot"); - const claude = yield* registry.getByProvider("claudeAgent"); - const cursor = yield* registry.getByProvider("cursor"); - const opencode = yield* registry.getByProvider("opencode"); - const geminiCli = yield* registry.getByProvider("geminiCli"); - const amp = yield* registry.getByProvider("amp"); - const kilo = yield* registry.getByProvider("kilo"); - - assert.equal(codex, fakeCodexAdapter); - assert.equal(copilot, fakeCopilotAdapter); - assert.equal(claude, fakeClaudeAdapter); - assert.equal(cursor, fakeCursorAdapter); - assert.equal(opencode, fakeOpenCodeAdapter); - assert.equal(geminiCli, fakeGeminiCliAdapter); - assert.equal(amp, fakeAmpAdapter); - assert.equal(kilo, fakeKiloAdapter); + const claudeInstanceId = defaultInstanceIdForDriver(CLAUDE_AGENT_DRIVER); - const providers = yield* registry.listProviders(); - assert.deepEqual(providers, [ - "codex", - "claudeAgent", - "copilot", - "cursor", - "geminiCli", - "opencode", - "amp", - "kilo", + const adapter = yield* registry.getByInstance(claudeInstanceId); + assert.strictEqual(adapter, fakeClaudeAdapter); + + const info = yield* registry.getInstanceInfo(claudeInstanceId); + assert.deepStrictEqual(info, { + instanceId: claudeInstanceId, + driverKind: CLAUDE_AGENT_DRIVER, + displayName: undefined, + accentColor: undefined, + enabled: true, + continuationIdentity: { + driverKind: CLAUDE_AGENT_DRIVER, + continuationKey: "claudeAgent:instance:claudeAgent", + }, + }); + + const instances = yield* registry.listInstances(); + assert.deepStrictEqual(instances, [ + defaultInstanceIdForDriver(CODEX_DRIVER), + claudeInstanceId, + defaultInstanceIdForDriver(OPENCODE_DRIVER), + defaultInstanceIdForDriver(CURSOR_DRIVER), ]); - }), - ); - it.effect("fails with ProviderUnsupportedError for unknown providers", () => - Effect.gen(function* () { - const registry = yield* ProviderAdapterRegistry; - const adapter = yield* registry.getByProvider("unknown" as ProviderKind).pipe(Effect.result); - assertFailure(adapter, new ProviderUnsupportedError({ provider: "unknown" })); - }), - ); + const providers = yield* registry.listProviders(); + assert.deepStrictEqual(providers, [ + CODEX_DRIVER, + CLAUDE_AGENT_DRIVER, + OPENCODE_DRIVER, + CURSOR_DRIVER, + ]); + })); }); diff --git a/apps/server/src/provider/Layers/ProviderAdapterRegistry.ts b/apps/server/src/provider/Layers/ProviderAdapterRegistry.ts index 4295ef43937..f2eeaa1aae8 100644 --- a/apps/server/src/provider/Layers/ProviderAdapterRegistry.ts +++ b/apps/server/src/provider/Layers/ProviderAdapterRegistry.ts @@ -1,65 +1,101 @@ /** - * ProviderAdapterRegistryLive - In-memory provider adapter lookup layer. + * ProviderAdapterRegistryLive — facade over `ProviderInstanceRegistry`. * - * Binds provider kinds (codex/claudeAgent/...) to concrete adapter services. - * This layer only performs adapter lookup; it does not route session-scoped - * calls or own provider lifecycle workflows. + * `ProviderAdapterRegistry` historically mapped one `ProviderDriverKind` to one + * adapter via the four `AdapterLive` singleton Layers. The per-instance + * refactor moved adapter construction inside each `ProviderDriver.create()`: + * adapters are now bundled on the `ProviderInstance` that the + * `ProviderInstanceRegistry` owns. + * + * This facade fulfills the `ProviderAdapterRegistryShape` contract by doing + * dynamic look-ups against `ProviderInstanceRegistry` on every call. That + * means settings-driven hot-reload shows up here automatically — adding a + * new instance via settings makes `getByInstance` resolve immediately + * without rebuilding the facade. * * @module ProviderAdapterRegistryLive */ +import { + defaultInstanceIdForDriver, + ProviderInstanceId, + type ProviderDriverKind, +} from "@t3tools/contracts"; import { Effect, Layer } from "effect"; -import { ProviderUnsupportedError, type ProviderAdapterError } from "../Errors.ts"; -import type { ProviderAdapterShape } from "../Services/ProviderAdapter.ts"; +import { ProviderUnsupportedError } from "../Errors.ts"; +import { ProviderInstanceRegistry } from "../Services/ProviderInstanceRegistry.ts"; import { ProviderAdapterRegistry, type ProviderAdapterRegistryShape, } from "../Services/ProviderAdapterRegistry.ts"; -import { AmpAdapter } from "../Services/AmpAdapter.ts"; -import { ClaudeAdapter } from "../Services/ClaudeAdapter.ts"; -import { CodexAdapter } from "../Services/CodexAdapter.ts"; -import { CopilotAdapter } from "../Services/CopilotAdapter.ts"; -import { CursorAdapter } from "../Services/CursorAdapter.ts"; -import { GeminiCliAdapter } from "../Services/GeminiCliAdapter.ts"; -import { KiloAdapter } from "../Services/KiloAdapter.ts"; -import { OpenCodeAdapter } from "../Services/OpenCodeAdapter.ts"; -export interface ProviderAdapterRegistryLiveOptions { - readonly adapters?: ReadonlyArray>; -} +const makeProviderAdapterRegistry = Effect.fn("makeProviderAdapterRegistry")(function* () { + const registry = yield* ProviderInstanceRegistry; + + const getByInstance: ProviderAdapterRegistryShape["getByInstance"] = (instanceId) => + registry.getInstance(instanceId).pipe( + Effect.flatMap((instance) => + instance === undefined + ? Effect.fail( + new ProviderUnsupportedError({ + provider: instanceId, + }), + ) + : Effect.succeed(instance.adapter), + ), + ); -const makeProviderAdapterRegistry = Effect.fn("makeProviderAdapterRegistry")(function* ( - options?: ProviderAdapterRegistryLiveOptions, -) { - const adapters = - options?.adapters !== undefined - ? options.adapters - : [ - yield* CodexAdapter, - yield* ClaudeAdapter, - yield* CopilotAdapter, - yield* CursorAdapter, - yield* GeminiCliAdapter, - yield* OpenCodeAdapter, - yield* AmpAdapter, - yield* KiloAdapter, - ]; - const byProvider = new Map(adapters.map((adapter) => [adapter.provider, adapter])); + const getInstanceInfo: ProviderAdapterRegistryShape["getInstanceInfo"] = (instanceId) => + registry.getInstance(instanceId).pipe( + Effect.flatMap((instance) => + instance === undefined + ? Effect.fail( + new ProviderUnsupportedError({ + provider: instanceId, + }), + ) + : Effect.succeed({ + instanceId: instance.instanceId, + driverKind: instance.driverKind, + displayName: instance.displayName, + accentColor: instance.accentColor, + enabled: instance.enabled, + continuationIdentity: instance.continuationIdentity, + }), + ), + ); - const getByProvider: ProviderAdapterRegistryShape["getByProvider"] = (provider) => { - const adapter = byProvider.get(provider); - if (!adapter) { - return Effect.fail(new ProviderUnsupportedError({ provider })); - } - return Effect.succeed(adapter); - }; + const listInstances: ProviderAdapterRegistryShape["listInstances"] = () => + registry.listInstances.pipe( + Effect.map((instances) => instances.map((instance) => instance.instanceId)), + ); const listProviders: ProviderAdapterRegistryShape["listProviders"] = () => - Effect.sync(() => Array.from(byProvider.keys())); + registry.listInstances.pipe( + Effect.map((instances) => { + const kinds = new Set(); + for (const instance of instances) { + const defaultId = defaultInstanceIdForDriver(instance.driverKind); + if (instance.instanceId === defaultId) { + // Only the default-instance rows show up through the legacy + // shim — custom instances like `codex_personal` have no + // `ProviderDriverKind` equivalent. + kinds.add(instance.driverKind); + } + } + return Array.from(kinds); + }), + ); return { - getByProvider, + getByInstance, + getInstanceInfo, + listInstances, listProviders, + // Proxy directly — the facade has no state of its own; the instance + // registry already coalesces adds/removes/rebuilds into one emission. + streamChanges: registry.streamChanges, + subscribeChanges: registry.subscribeChanges, } satisfies ProviderAdapterRegistryShape; }); @@ -67,3 +103,14 @@ export const ProviderAdapterRegistryLive = Layer.effect( ProviderAdapterRegistry, makeProviderAdapterRegistry(), ); + +// Exposed for tests that want to build a facade over a pre-assembled +// `ProviderInstanceRegistry` without pulling in the whole boot graph. +export { makeProviderAdapterRegistry }; + +// Re-export for consumers that need the accessor shape. The service tag +// itself lives in `Services/ProviderAdapterRegistry.ts`. +export { ProviderAdapterRegistry } from "../Services/ProviderAdapterRegistry.ts"; +// Re-export for consumers (including tests) that construct a +// `ProviderInstanceId` before calling `getByInstance`. +export { ProviderInstanceId }; diff --git a/apps/server/src/provider/Layers/ProviderEventLoggers.ts b/apps/server/src/provider/Layers/ProviderEventLoggers.ts new file mode 100644 index 00000000000..4a15fdd6852 --- /dev/null +++ b/apps/server/src/provider/Layers/ProviderEventLoggers.ts @@ -0,0 +1,83 @@ +/** + * ProviderEventLoggers — single observability service that owns the two + * shared NDJSON streams the provider runtime writes: + * + * - `native` — provider-protocol events as the SDK emits them, written + * from inside each `Adapter` factory. + * - `canonical` — runtime events after `ProviderService` has normalized + * them onto `ProviderRuntimeEvent`. + * + * Why a service tag and not constructor options? + * + * - Adapters are now constructed *inside* drivers (`Driver.create()`), + * not at the boot Layer. There is no longer a single `makeAdapterLive(options)` + * call site where we can hand an `EventNdjsonLogger` in by hand. + * - Multiple driver instances per kind (`codex_personal`, `codex_work`) + * should share one underlying log writer per stream — opening N writers + * against the same rotating file would race the rotation logic. Owning + * the loggers on a single tag keeps that invariant intact. + * - Tests can swap one (or both) loggers with in-memory recorders by + * `Layer.succeed(ProviderEventLoggers, { native, canonical })` instead of + * juggling per-Layer option threading. + * + * Both fields are optional. `makeEventNdjsonLogger` returns `undefined` when + * the target directory cannot be created; we forward that as `undefined` + * rather than failing the boot Layer, matching the previous best-effort + * behavior of `server.ts`. + * + * @module provider/Layers/ProviderEventLoggers + */ +import { Context, Effect, Layer } from "effect"; + +import { ServerConfig } from "../../config.ts"; +import { type EventNdjsonLogger, makeEventNdjsonLogger } from "./EventNdjsonLogger.ts"; + +export interface ProviderEventLoggersShape { + readonly native: EventNdjsonLogger | undefined; + readonly canonical: EventNdjsonLogger | undefined; +} + +/** + * Shared logger pair for native + canonical provider event streams. + * + * Service value is intentionally a struct of two optional loggers rather + * than two parallel tags. Construction site is one place + * (`ProviderEventLoggersLive`); consumers (drivers, `ProviderService`) read + * one tag and pluck the field they need. + */ +export class ProviderEventLoggers extends Context.Service< + ProviderEventLoggers, + ProviderEventLoggersShape +>()("t3/provider/ProviderEventLoggers") {} + +/** + * Constant value used by tests / boot layers that want to opt out of native + * + canonical logging entirely. Keeps the tag non-optional in the type + * system while letting the runtime treat absence as a no-op. + */ +export const NoOpProviderEventLoggers: ProviderEventLoggersShape = { + native: undefined, + canonical: undefined, +}; + +/** + * Live Layer that builds both loggers from `ServerConfig.providerEventLogPath`. + * If the directory create fails for either stream, the corresponding field + * is `undefined` and writes from that stream become no-ops downstream. + */ +export const ProviderEventLoggersLive = Layer.effect( + ProviderEventLoggers, + Effect.gen(function* () { + const { providerEventLogPath } = yield* ServerConfig; + const native = yield* makeEventNdjsonLogger(providerEventLogPath, { + stream: "native", + }); + const canonical = yield* makeEventNdjsonLogger(providerEventLogPath, { + stream: "canonical", + }); + return { + native, + canonical, + } satisfies ProviderEventLoggersShape; + }), +); diff --git a/apps/server/src/provider/Layers/ProviderInstanceRegistryHydration.ts b/apps/server/src/provider/Layers/ProviderInstanceRegistryHydration.ts new file mode 100644 index 00000000000..b6c44306f93 --- /dev/null +++ b/apps/server/src/provider/Layers/ProviderInstanceRegistryHydration.ts @@ -0,0 +1,176 @@ +/** + * ProviderInstanceRegistryHydration — derive a `ProviderInstanceConfigMap` + * from `ServerSettings` and keep `ProviderInstanceRegistry` in sync with it. + * + * The server still reads two shapes: + * + * 1. `settings.providerInstances` — the new driver-agnostic map the + * registry expects. Keyed by `ProviderInstanceId`, values are + * `ProviderInstanceConfig` envelopes. + * 2. `settings.providers.` — the legacy single-instance-per-driver + * fields (`providers.codex`, `providers.claudeAgent`, …). These are + * the source of truth for every deployment that hasn't been migrated + * yet to an explicit `providerInstances` entry. + * + * This module bridges (2) into (1) and wires the resulting map into a + * mutable registry. For every built-in driver whose id is not already + * present in `providerInstances` (keyed on + * `defaultInstanceIdForDriver(driverKind)` — literally the driver kind as a + * routing slug), we synthesize an envelope from the legacy field. The + * registry decodes both flavours through the same `configSchema` and ends + * up with one uniform `ProviderInstance` per entry. + * + * Explicit `providerInstances` entries always win — users can already + * override the legacy `providers.` blob by authoring a + * `providerInstances.codex` entry with a matching driver, and we don't + * want the synthesized envelope to silently stomp their config. + * + * Hot-reload + * ---------- + * On layer build we: + * 1. Read the current `ServerSettings` once and use it to seed the + * registry's initial state via `ProviderInstanceRegistryMutableLayer`. + * 2. Fork a daemon fiber (lifetime tied to the layer's scope) that + * subscribes to `ServerSettingsService.streamChanges` and calls + * `ProviderInstanceRegistryMutator.reconcile` on every emission. + * + * Failures inside the watcher are logged and swallowed so a single bad + * settings emission cannot kill the registry. Unknown drivers and invalid + * configs already round-trip through the registry's own "unavailable" + * shadow bucket. + * + * @module provider/Layers/ProviderInstanceRegistryHydration + */ +import { + defaultInstanceIdForDriver, + type ProviderInstanceConfig, + type ProviderInstanceConfigMap, + ServerSettings, +} from "@t3tools/contracts"; +import { Effect, Layer, Stream } from "effect"; + +import { ServerSettingsService } from "../../serverSettings.ts"; +import { BUILT_IN_DRIVERS, type BuiltInDriversEnv } from "../builtInDrivers.ts"; +import { ProviderInstanceRegistry } from "../Services/ProviderInstanceRegistry.ts"; +import { ProviderInstanceRegistryMutator } from "../Services/ProviderInstanceRegistryMutator.ts"; +import { ProviderInstanceRegistryMutableLayer } from "./ProviderInstanceRegistryLive.ts"; + +/** + * Synthesize a `ProviderInstanceConfigMap` from a `ServerSettings` snapshot. + * + * Strategy: + * 1. Copy all explicit `settings.providerInstances` entries verbatim. + * 2. For each built-in driver whose `defaultInstanceIdForDriver(id)` key + * is *not* already in the explicit map, synthesize an entry from the + * matching legacy `settings.providers.` blob. + * + * The returned map is the input the registry consumes; pure & exported + * separately so the hydration logic can be exercised by unit tests + * without layering. + */ +export const deriveProviderInstanceConfigMap = ( + settings: ServerSettings, +): ProviderInstanceConfigMap => { + const merged: Record = { ...settings.providerInstances }; + + for (const driver of BUILT_IN_DRIVERS) { + const instanceId = defaultInstanceIdForDriver(driver.driverKind); + if (instanceId in merged) { + // Explicit `providerInstances` entry for this slot — user-authored + // config always wins over the legacy mirror. + continue; + } + + // Only built-in drivers have a legacy mirror; the registry's + // `providers` struct is keyed on the same literal slug as + // `driverKind`. Access is dynamic (the driver kind is a branded string), + // but it's constrained to `keyof settings.providers` by the union of + // built-in driver kinds. + const legacyKey = driver.driverKind as keyof ServerSettings["providers"]; + const legacyConfig = settings.providers[legacyKey]; + if (legacyConfig === undefined) { + continue; + } + + merged[instanceId] = { + driver: driver.driverKind, + config: legacyConfig, + }; + } + + return merged as ProviderInstanceConfigMap; +}; + +/** + * Layer that consumes `ProviderInstanceRegistryMutator` and forks a + * settings-watcher fiber. The fiber's lifetime is tied to the enclosing + * layer scope (process lifetime in production), so it is interrupted on + * shutdown without leaking. + * + * Errors inside the watcher are logged and swallowed — the registry's own + * "unavailable" bucket already absorbs unknown drivers and invalid + * configs, so the only way the watcher could fail is a settings stream + * tear-down, which logs and exits cleanly. + */ +const SettingsWatcherLive: Layer.Layer< + never, + never, + ProviderInstanceRegistryMutator | ServerSettingsService +> = Layer.effectDiscard( + Effect.gen(function* () { + const mutator = yield* ProviderInstanceRegistryMutator; + const serverSettings = yield* ServerSettingsService; + yield* serverSettings.streamChanges.pipe( + Stream.runForEach((next) => + mutator + .reconcile(deriveProviderInstanceConfigMap(next)) + .pipe( + Effect.catchCause((cause) => + Effect.logError("ProviderInstanceRegistry reconcile failed", cause), + ), + ), + ), + Effect.forkScoped, + ); + }), +); + +/** + * Hydrate `ProviderInstanceRegistry` from `ServerSettings` and keep it in + * sync with subsequent `streamChanges` emissions. + * + * The Layer's two halves: + * - `ProviderInstanceRegistryMutableLayer` produces the registry + + * mutator from the initial config map. Its scope owns every + * per-instance child scope created during reconcile. + * - `SettingsWatcherLive` consumes the mutator and runs a daemon fiber + * in the same scope. + * + * Composing via `Layer.provideMerge` makes the watcher's deps available + * from the mutable layer while still surfacing the registry as an output. + * The mutator tag is technically also exposed; only this module imports + * it, so the visibility leak is harmless in practice. + */ +export const ProviderInstanceRegistryHydrationLive: Layer.Layer< + ProviderInstanceRegistry, + never, + BuiltInDriversEnv | ServerSettingsService +> = Layer.unwrap( + Effect.gen(function* () { + const serverSettings = yield* ServerSettingsService; + const initialSettings: ServerSettings | undefined = yield* serverSettings.getSettings.pipe( + Effect.orElseSucceed(() => undefined), + ); + const initialConfigMap = + initialSettings === undefined + ? ({} as ProviderInstanceConfigMap) + : deriveProviderInstanceConfigMap(initialSettings); + + const mutableLayer = ProviderInstanceRegistryMutableLayer({ + drivers: BUILT_IN_DRIVERS, + configMap: initialConfigMap, + }); + + return SettingsWatcherLive.pipe(Layer.provideMerge(mutableLayer)); + }), +) as Layer.Layer; diff --git a/apps/server/src/provider/Layers/ProviderInstanceRegistryLive.test.ts b/apps/server/src/provider/Layers/ProviderInstanceRegistryLive.test.ts new file mode 100644 index 00000000000..2246a2ae478 --- /dev/null +++ b/apps/server/src/provider/Layers/ProviderInstanceRegistryLive.test.ts @@ -0,0 +1,357 @@ +/** + * Multi-instance validation slices for `ProviderInstanceRegistryLive`. + * + * Two axes of the driver/registry refactor are exercised here: + * + * 1. **Same driver, many instances** — the "multi-instance codex slice" + * describe block below configures two independent `codex` instances and + * asserts each gets its own closures and identity. This is the + * multi-codex capability the refactor exists to unlock. + * + * 2. **Many drivers, one registry** — the "all drivers slice" describe + * block below configures one instance of every shipped driver + * (`codex`, `claudeAgent`, `cursor`, `opencode`) in a single + * `ProviderInstanceConfigMap` and asserts the registry boots them all + * without cross-contamination. This proves the driver SPI is uniform + * across every provider — any driver plugs into the registry through + * the same `ProviderDriver` value contract. + * + * Every instance in these tests is configured with `enabled: false` so the + * provider-status checks short-circuit to pending/disabled snapshots + * without trying to spawn real `codex` / `claude` / `agent` / `opencode` + * binaries. That keeps the assertions focused on registry routing + * behaviour rather than the runtime details of each provider. + */ +import { describe, expect, it } from "@effect/vitest"; +import * as NodeServices from "@effect/platform-node/NodeServices"; +import { + type ClaudeSettings, + type CodexSettings, + type CursorSettings, + type OpenCodeSettings, + ProviderDriverKind, + type ProviderInstanceConfigMap, + ProviderInstanceId, +} from "@t3tools/contracts"; +import { Effect, Layer } from "effect"; + +import { ServerConfig } from "../../config.ts"; +import { ClaudeDriver } from "../Drivers/ClaudeDriver.ts"; +import { CodexDriver } from "../Drivers/CodexDriver.ts"; +import { CursorDriver } from "../Drivers/CursorDriver.ts"; +import { OpenCodeDriver } from "../Drivers/OpenCodeDriver.ts"; +import { OpenCodeRuntimeLive } from "../opencodeRuntime.ts"; +import { NoOpProviderEventLoggers, ProviderEventLoggers } from "./ProviderEventLoggers.ts"; +import { makeProviderInstanceRegistry } from "./ProviderInstanceRegistryLive.ts"; + +const makeCodexConfig = (overrides: Partial): CodexSettings => ({ + enabled: false, + binaryPath: "codex", + homePath: "", + shadowHomePath: "", + customModels: [], + ...overrides, +}); + +const makeClaudeConfig = (overrides: Partial): ClaudeSettings => ({ + enabled: false, + binaryPath: "claude", + homePath: "", + customModels: [], + launchArgs: "", + ...overrides, +}); + +const makeCursorConfig = (overrides: Partial): CursorSettings => ({ + enabled: false, + binaryPath: "agent", + apiEndpoint: "", + customModels: [], + ...overrides, +}); + +const makeOpenCodeConfig = (overrides: Partial): OpenCodeSettings => ({ + enabled: false, + binaryPath: "opencode", + serverUrl: "", + serverPassword: "", + customModels: [], + ...overrides, +}); + +describe("ProviderInstanceRegistryLive — multi-instance codex slice", () => { + // `ServerConfig.layerTest` needs `FileSystem` to materialize its scratch + // directory. `Layer.merge` just unions requirements, so we have to push + // `NodeServices.layer` through `Layer.provideMerge` to satisfy that + // dependency while still surfacing NodeServices to the test body (the + // codex driver's `create` yields `ChildProcessSpawner` directly). + const testLayer = ServerConfig.layerTest(process.cwd(), { + prefix: "provider-instance-registry-test", + }).pipe( + Layer.provideMerge(NodeServices.layer), + Layer.provideMerge(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), + ); + + it.live("boots two independent codex instances from a ProviderInstanceConfigMap", () => + Effect.gen(function* () { + const personalId = ProviderInstanceId.make("codex_personal"); + const workId = ProviderInstanceId.make("codex_work"); + const codexDriverKind = ProviderDriverKind.make("codex"); + + const configMap: ProviderInstanceConfigMap = { + [personalId]: { + driver: codexDriverKind, + displayName: "Codex (personal)", + enabled: false, + config: makeCodexConfig({ + binaryPath: "/opt/codex-personal/bin/codex", + homePath: "/home/julius/.codex_personal", + customModels: ["personal-preview"], + }), + }, + [workId]: { + driver: codexDriverKind, + displayName: "Codex (work)", + enabled: false, + config: makeCodexConfig({ + binaryPath: "/opt/codex-work/bin/codex", + homePath: "/home/julius/.codex", + customModels: ["work-preview"], + }), + }, + }; + + const { registry } = yield* makeProviderInstanceRegistry({ + drivers: [CodexDriver], + configMap, + }); + + const instances = yield* registry.listInstances; + expect(instances.map((instance) => instance.instanceId).toSorted()).toEqual( + [personalId, workId].toSorted(), + ); + expect(instances.every((instance) => instance.driverKind === codexDriverKind)).toBe(true); + expect(instances.map((instance) => instance.displayName).toSorted()).toEqual( + ["Codex (personal)", "Codex (work)"].toSorted(), + ); + + // Each instance must be retrievable by id and carry its *own* closures. + const personal = yield* registry.getInstance(personalId); + const work = yield* registry.getInstance(workId); + expect(personal).toBeDefined(); + expect(work).toBeDefined(); + expect(personal!.adapter).not.toBe(work!.adapter); + expect(personal!.textGeneration).not.toBe(work!.textGeneration); + expect(personal!.snapshot).not.toBe(work!.snapshot); + + // Snapshots identify themselves by instanceId + driver — this is + // what makes per-instance routing distinguishable downstream. + const personalSnapshot = yield* personal!.snapshot.getSnapshot; + expect(personalSnapshot.instanceId).toBe(personalId); + expect(personalSnapshot.driver).toBe(codexDriverKind); + expect(personalSnapshot.enabled).toBe(false); + expect(personalSnapshot.continuation?.groupKey).toBe( + "codex:home:/home/julius/.codex_personal", + ); + + const workSnapshot = yield* work!.snapshot.getSnapshot; + expect(workSnapshot.instanceId).toBe(workId); + expect(workSnapshot.driver).toBe(codexDriverKind); + expect(workSnapshot.enabled).toBe(false); + expect(workSnapshot.continuation?.groupKey).toBe("codex:home:/home/julius/.codex"); + + // Nothing goes to the unavailable bucket — both drivers are registered. + const unavailable = yield* registry.listUnavailable; + expect(unavailable).toEqual([]); + }).pipe(Effect.provide(testLayer)), + ); + + it.live( + "shadows instances whose driver is not registered in this build without failing boot", + () => + Effect.gen(function* () { + const codexId = ProviderInstanceId.make("codex_main"); + const ghostId = ProviderInstanceId.make("ghost_main"); + + const configMap: ProviderInstanceConfigMap = { + [codexId]: { + driver: ProviderDriverKind.make("codex"), + enabled: false, + config: makeCodexConfig({}), + }, + [ghostId]: { + driver: ProviderDriverKind.make("ghostDriver"), + displayName: "A fork-only driver we don't ship", + enabled: false, + config: { arbitrary: "payload", preserved: true }, + }, + }; + + const { registry } = yield* makeProviderInstanceRegistry({ + drivers: [CodexDriver], + configMap, + }); + + const instances = yield* registry.listInstances; + expect(instances).toHaveLength(1); + expect(instances[0]!.instanceId).toBe(codexId); + + const unavailable = yield* registry.listUnavailable; + expect(unavailable).toHaveLength(1); + const ghost = unavailable[0]!; + expect(ghost.instanceId).toBe(ghostId); + expect(ghost.driver).toBe("ghostDriver"); + expect(ghost.availability).toBe("unavailable"); + expect(ghost.unavailableReason).toMatch(/ghostDriver/); + }).pipe(Effect.provide(testLayer)), + ); +}); + +describe("ProviderInstanceRegistryLive — all drivers slice", () => { + // All four drivers need `NodeServices` (ChildProcessSpawner + FileSystem + + // Path). `OpenCodeDriver.create` additionally yields `OpenCodeRuntime` + // at construction time, so we wire `OpenCodeRuntimeLive` into the stack. + // `OpenCodeRuntimeLive` bundles its own `NetService.layer` via + // `Layer.provide`, so the only external requirement it still exposes is + // `ChildProcessSpawner` — resolved here by piping it through + // `provideMerge(NodeServices.layer)`. + // + // The nested `provideMerge`s read bottom-up: `NodeServices.layer` + // provides `OpenCodeRuntimeLive`'s deps while keeping its own outputs + // surfaced; that merged layer then provides `ServerConfig.layerTest`'s + // `FileSystem` dep while keeping everything else surfaced to the test. + const infraLayer = OpenCodeRuntimeLive.pipe(Layer.provideMerge(NodeServices.layer)); + const testLayer = ServerConfig.layerTest(process.cwd(), { + prefix: "provider-instance-registry-all-drivers-test", + }).pipe( + Layer.provideMerge(infraLayer), + Layer.provideMerge(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), + ); + + it.live("boots one instance of every shipped driver from a single config map", () => + Effect.gen(function* () { + const codexId = ProviderInstanceId.make("codex_default"); + const claudeId = ProviderInstanceId.make("claude_default"); + const cursorId = ProviderInstanceId.make("cursor_default"); + const openCodeId = ProviderInstanceId.make("opencode_default"); + + const codexDriverKind = ProviderDriverKind.make("codex"); + const claudeDriverKind = ProviderDriverKind.make("claudeAgent"); + const cursorDriverKind = ProviderDriverKind.make("cursor"); + const openCodeDriverKind = ProviderDriverKind.make("opencode"); + + const configMap: ProviderInstanceConfigMap = { + [codexId]: { + driver: codexDriverKind, + displayName: "Codex", + enabled: false, + config: makeCodexConfig({ homePath: "/home/julius/.codex" }), + }, + [claudeId]: { + driver: claudeDriverKind, + displayName: "Claude", + enabled: false, + config: makeClaudeConfig({ + homePath: "/home/julius/.claude-work", + launchArgs: "--verbose", + }), + }, + [cursorId]: { + driver: cursorDriverKind, + displayName: "Cursor", + enabled: false, + config: makeCursorConfig({}), + }, + [openCodeId]: { + driver: openCodeDriverKind, + displayName: "OpenCode", + enabled: false, + config: makeOpenCodeConfig({}), + }, + }; + + const { registry } = yield* makeProviderInstanceRegistry({ + drivers: [CodexDriver, ClaudeDriver, CursorDriver, OpenCodeDriver], + configMap, + }); + + // Every configured instance must materialize — none downgraded to a + // shadow snapshot, because every driver in the map is registered. + const unavailable = yield* registry.listUnavailable; + expect(unavailable).toEqual([]); + + const instances = yield* registry.listInstances; + expect(instances).toHaveLength(4); + expect(instances.map((instance) => instance.instanceId).toSorted()).toEqual( + [codexId, claudeId, cursorId, openCodeId].toSorted(), + ); + + // Instance lookup by id resolves each instance to its own bundle — + // this is how rest-of-server routes turn/session calls in the new + // model. Each driver's bundle carries its advertised `driverKind`. + const codex = yield* registry.getInstance(codexId); + const claude = yield* registry.getInstance(claudeId); + const cursor = yield* registry.getInstance(cursorId); + const openCode = yield* registry.getInstance(openCodeId); + expect(codex?.driverKind).toBe(codexDriverKind); + expect(claude?.driverKind).toBe(claudeDriverKind); + expect(cursor?.driverKind).toBe(cursorDriverKind); + expect(openCode?.driverKind).toBe(openCodeDriverKind); + expect(codex?.displayName).toBe("Codex"); + expect(claude?.displayName).toBe("Claude"); + expect(cursor?.displayName).toBe("Cursor"); + expect(openCode?.displayName).toBe("OpenCode"); + + // Every instance owns its own set of closures — no sharing across + // drivers. `adapter` / `textGeneration` / `snapshot` are all + // distinct references even when two instances happen to share a + // trait (e.g. Cursor + others all use a stub-or-real + // `textGeneration`; they must still be different object values). + const adapters = [codex!.adapter, claude!.adapter, cursor!.adapter, openCode!.adapter]; + expect(new Set(adapters).size).toBe(adapters.length); + const textGenerations = [ + codex!.textGeneration, + claude!.textGeneration, + cursor!.textGeneration, + openCode!.textGeneration, + ]; + expect(new Set(textGenerations).size).toBe(textGenerations.length); + const snapshots = [codex!.snapshot, claude!.snapshot, cursor!.snapshot, openCode!.snapshot]; + expect(new Set(snapshots).size).toBe(snapshots.length); + + // Snapshots identify themselves by `instanceId` + `driver` so + // downstream aggregation in `ProviderRegistry` can tell instances + // apart even when two share a driver. With `enabled: false`, the + // check short-circuits and we get a disabled/pending snapshot back + // — that's enough signal to validate the stamping wrapper without + // spawning real binaries. + const codexSnapshot = yield* codex!.snapshot.getSnapshot; + expect(codexSnapshot.instanceId).toBe(codexId); + expect(codexSnapshot.driver).toBe(codexDriverKind); + expect(codexSnapshot.enabled).toBe(false); + expect(codexSnapshot.continuation?.groupKey).toBe("codex:home:/home/julius/.codex"); + + const claudeSnapshot = yield* claude!.snapshot.getSnapshot; + expect(claudeSnapshot.instanceId).toBe(claudeId); + expect(claudeSnapshot.driver).toBe(claudeDriverKind); + expect(claudeSnapshot.enabled).toBe(false); + expect(claudeSnapshot.continuation?.groupKey).toBe("claude:home:/home/julius/.claude-work"); + + const cursorSnapshot = yield* cursor!.snapshot.getSnapshot; + expect(cursorSnapshot.instanceId).toBe(cursorId); + expect(cursorSnapshot.driver).toBe(cursorDriverKind); + expect(cursorSnapshot.enabled).toBe(false); + expect(cursorSnapshot.continuation?.groupKey).toBe( + `${cursorDriverKind}:instance:${cursorId}`, + ); + + const openCodeSnapshot = yield* openCode!.snapshot.getSnapshot; + expect(openCodeSnapshot.instanceId).toBe(openCodeId); + expect(openCodeSnapshot.driver).toBe(openCodeDriverKind); + expect(openCodeSnapshot.enabled).toBe(false); + expect(openCodeSnapshot.continuation?.groupKey).toBe( + `${openCodeDriverKind}:instance:${openCodeId}`, + ); + }).pipe(Effect.provide(testLayer)), + ); +}); diff --git a/apps/server/src/provider/Layers/ProviderInstanceRegistryLive.ts b/apps/server/src/provider/Layers/ProviderInstanceRegistryLive.ts new file mode 100644 index 00000000000..63f687f55b6 --- /dev/null +++ b/apps/server/src/provider/Layers/ProviderInstanceRegistryLive.ts @@ -0,0 +1,434 @@ +/** + * ProviderInstanceRegistryLive — runtime implementation of + * `ProviderInstanceRegistry` plus its sibling mutator. + * + * Materializes every entry in a `ProviderInstanceConfigMap`: + * + * - When the entry's `driver` matches a registered driver, the registry + * decodes the opaque `config` envelope through `driver.configSchema` + * and calls `driver.create()` inside a fresh child scope. The + * resulting `ProviderInstance` is stored keyed by instance id, + * alongside its scope so the entry can be torn down independently. + * - When the entry's `driver` is unknown to this build (fork, rollback, + * in-flight PR branch), the registry emits an `"unavailable"` shadow + * `ServerProvider` snapshot instead of failing. This is what makes + * downgrades and fork-hopping safe per the + * `forward/backward compatibility invariant` in + * `packages/contracts/src/providerInstance.ts`. + * - When the entry's config fails schema decode, the registry logs and + * emits a shadow snapshot with the schema detail — same bucket as an + * unknown driver. + * + * Unlike the pre-Slice-D layer, the registry now holds mutable state + * (`Ref`s + `PubSub`) and exposes an internal mutator + * (`ProviderInstanceRegistryMutator`) whose `reconcile` method diffs a + * fresh config map against the live state, tearing down removed instances + * and building new ones without disturbing unaffected instances. + * + * Every live instance runs inside its own child `Scope`. The registry's + * own scope owns all child scopes via finalizers, so closing the registry + * tears every instance down in reverse order; closing a single instance + * (via `reconcile` removing it) leaves the rest untouched. + * + * @module provider/Layers/ProviderInstanceRegistryLive + */ +import { + defaultInstanceIdForDriver, + ProviderInstanceId, + type ProviderInstanceConfig, + type ProviderInstanceConfigMap, + type ProviderDriverKind, + type ServerProvider, +} from "@t3tools/contracts"; +import { Context, Effect, Equal, Exit, Layer, PubSub, Ref, Schema, Scope, Stream } from "effect"; + +import { buildUnavailableProviderSnapshot } from "../unavailableProviderSnapshot.ts"; +import { + ProviderInstanceRegistry, + type ProviderInstanceRegistryShape, +} from "../Services/ProviderInstanceRegistry.ts"; +import { + ProviderInstanceRegistryMutator, + type ProviderInstanceRegistryMutatorShape, +} from "../Services/ProviderInstanceRegistryMutator.ts"; +import type { AnyProviderDriver, ProviderInstance } from "../ProviderDriver.ts"; + +/** + * Live registry entry: the materialized `ProviderInstance` + the fresh + * child scope its `create` effect ran in + the original `entry` envelope + * so `reconcile` can cheaply detect "no-op" updates. + */ +interface LiveEntry { + readonly instance: ProviderInstance; + readonly scope: Scope.Closeable; + readonly entry: ProviderInstanceConfig; +} + +/** + * Internal state shared between the public registry service and the + * mutator service. Both services are thin shells around these refs. + */ +interface RegistryState { + readonly entries: Ref.Ref>; + readonly unavailable: Ref.Ref>; + readonly changes: PubSub.PubSub; +} + +/** + * Structural equality on `ProviderInstanceConfig` envelopes. Used by + * `reconcile` to skip rebuilds when settings arrive unchanged. Config + * payloads are opaque `unknown` at the envelope layer; `Equal.equals` + * falls back to structural equality for plain records, which matches how + * the schema decode output is constructed. + */ +const entryEqual = (a: ProviderInstanceConfig, b: ProviderInstanceConfig): boolean => + Equal.equals(a, b); + +const decodedConfigEnabled = (config: unknown): boolean | undefined => { + if (!config || typeof config !== "object" || globalThis.Array.isArray(config)) { + return undefined; + } + const enabled = (config as { readonly enabled?: unknown }).enabled; + return typeof enabled === "boolean" ? enabled : undefined; +}; + +/** + * Build one live entry from a raw config envelope. Returns either a + * `LiveEntry` plus undefined unavailable shadow, or a shadow snapshot and + * undefined entry — callers dispatch to the appropriate Ref bucket. + */ +const buildEntry = (input: { + readonly driversById: ReadonlyMap>; + readonly parentScope: Scope.Scope; + readonly instanceId: ProviderInstanceId; + readonly rawInstanceId: string; + readonly entry: ProviderInstanceConfig; +}): Effect.Effect< + | { readonly kind: "live"; readonly live: LiveEntry } + | { readonly kind: "unavailable"; readonly snapshot: ServerProvider }, + never, + R +> => + Effect.gen(function* () { + const { driversById, parentScope, instanceId, rawInstanceId, entry } = input; + const driver = driversById.get(entry.driver); + if (!driver) { + return { + kind: "unavailable" as const, + snapshot: buildUnavailableProviderSnapshot({ + driverKind: entry.driver, + instanceId, + displayName: entry.displayName, + accentColor: entry.accentColor, + reason: `Driver '${entry.driver}' is not registered in this build.`, + }), + }; + } + + const decoder = Schema.decodeUnknownEffect(driver.configSchema); + const decodeResult = yield* decoder(entry.config ?? driver.defaultConfig()).pipe(Effect.result); + if (decodeResult._tag === "Failure") { + const issue = decodeResult.failure; + const detail = issue.message ?? String(issue); + yield* Effect.logError("Failed to decode provider instance config", { + instanceId: rawInstanceId, + driver: entry.driver, + detail, + }); + return { + kind: "unavailable" as const, + snapshot: buildUnavailableProviderSnapshot({ + driverKind: entry.driver, + instanceId, + displayName: entry.displayName, + accentColor: entry.accentColor, + reason: `Invalid config for instance '${rawInstanceId}': ${detail}`, + }), + }; + } + + const typedConfig = decodeResult.success; + const childScope = yield* Scope.make(); + // Attach the child scope to the registry's parent scope: if the + // registry scope closes, each surviving instance's child scope is + // closed through this finalizer. `reconcile` manually closes the + // child scope on remove/replace; subsequent close via the parent's + // finalizer is a no-op because `Scope.close` is idempotent. + yield* Scope.addFinalizer(parentScope, Scope.close(childScope, Exit.void).pipe(Effect.ignore)); + + const createResult = yield* driver + .create({ + instanceId, + displayName: entry.displayName, + accentColor: entry.accentColor, + environment: entry.environment ?? [], + enabled: entry.enabled ?? decodedConfigEnabled(typedConfig) ?? true, + config: typedConfig, + }) + .pipe(Effect.provideService(Scope.Scope, childScope), Effect.result); + if (createResult._tag === "Failure") { + yield* Effect.logError("Failed to create provider instance", { + instanceId: rawInstanceId, + driver: entry.driver, + detail: createResult.failure.detail, + }); + yield* Scope.close(childScope, Exit.void).pipe(Effect.ignore); + return { + kind: "unavailable" as const, + snapshot: buildUnavailableProviderSnapshot({ + driverKind: entry.driver, + instanceId, + displayName: entry.displayName, + accentColor: entry.accentColor, + reason: `Driver '${entry.driver}' failed to create instance: ${createResult.failure.detail}`, + }), + }; + } + + return { + kind: "live" as const, + live: { + instance: createResult.success, + scope: childScope, + entry, + }, + }; + }); + +/** + * Reconcile-only implementation of the mutator. Exposed to the hydration + * layer; never called directly by the rest of the server. + */ +const makeReconcile = (input: { + readonly state: RegistryState; + readonly driversById: ReadonlyMap>; + readonly parentScope: Scope.Scope; +}): ((configMap: ProviderInstanceConfigMap) => Effect.Effect) => { + const { state, driversById, parentScope } = input; + return (configMap: ProviderInstanceConfigMap) => + Effect.gen(function* () { + const previousEntries = yield* Ref.get(state.entries); + const previousUnavailable = yield* Ref.get(state.unavailable); + const nextRaw = Object.entries(configMap); + const nextKeys = new Set( + nextRaw.map(([raw]) => ProviderInstanceId.make(raw)), + ); + + // 1. Close scopes for instances that disappeared or whose config + // changed. Do this BEFORE creating replacements so ids map 1-to-1 + // to live scopes at all times. + const removedIds: Array = []; + const replacedIds = new Set(); + for (const [instanceId, live] of previousEntries) { + if (!nextKeys.has(instanceId)) { + removedIds.push(instanceId); + continue; + } + const nextEntry = configMap[instanceId]; + if (nextEntry !== undefined && !entryEqual(live.entry, nextEntry)) { + replacedIds.add(instanceId); + } + } + for (const id of [...removedIds, ...replacedIds]) { + const live = previousEntries.get(id); + if (live) { + yield* Scope.close(live.scope, Exit.void).pipe(Effect.ignore); + } + } + + // 2. Build additions and replacements. Walk `nextRaw` so the final + // entry order follows settings-author order. + const builtEntries = new Map(); + const builtUnavailable = new Map(); + let orderChanged = false; + const previousOrder = [...previousEntries.keys()]; + const nextOrder: Array = []; + + for (const [rawInstanceId, entry] of nextRaw) { + const instanceId = ProviderInstanceId.make(rawInstanceId); + nextOrder.push(instanceId); + + const existing = previousEntries.get(instanceId); + if (existing !== undefined && !replacedIds.has(instanceId)) { + // No-op update: keep the existing live entry and scope. + builtEntries.set(instanceId, existing); + continue; + } + + const result = yield* buildEntry({ + driversById, + parentScope, + instanceId, + rawInstanceId, + entry, + }); + if (result.kind === "live") { + builtEntries.set(instanceId, result.live); + } else { + builtUnavailable.set(instanceId, result.snapshot); + } + } + + if (previousOrder.length === nextOrder.length) { + for (let i = 0; i < previousOrder.length; i++) { + if (previousOrder[i] !== nextOrder[i]) { + orderChanged = true; + break; + } + } + } else { + orderChanged = true; + } + + const entriesChanged = + orderChanged || + removedIds.length > 0 || + replacedIds.size > 0 || + builtEntries.size !== previousEntries.size; + const unavailableChanged = + builtUnavailable.size !== previousUnavailable.size || + [...builtUnavailable].some(([id, snapshot]) => { + const prev = previousUnavailable.get(id); + return prev === undefined || !Equal.equals(prev, snapshot); + }) || + [...previousUnavailable].some(([id]) => !builtUnavailable.has(id)); + + yield* Ref.set(state.entries, builtEntries); + yield* Ref.set(state.unavailable, builtUnavailable); + + if (entriesChanged || unavailableChanged) { + yield* PubSub.publish(state.changes, undefined); + } + }); +}; + +/** + * Build the registry's runtime state from a concrete configMap. Returns a + * record containing: + * + * - `registry`: the read-only `ProviderInstanceRegistryShape` to expose + * under `ProviderInstanceRegistry`. + * - `mutator`: the `ProviderInstanceRegistryMutatorShape` to expose + * under `ProviderInstanceRegistryMutator`. + * - `reconcile`: the raw reconcile function, provided for convenience so + * boot-time layers can hydrate an initial map before publishing the + * services. + * + * The scope that this effect runs in owns every per-instance child scope + * created during `reconcile`. Closing that scope closes every live + * instance. + */ +export const makeProviderInstanceRegistry = (input: { + readonly drivers: ReadonlyArray>; + readonly configMap: ProviderInstanceConfigMap; +}): Effect.Effect< + { + readonly registry: ProviderInstanceRegistryShape; + readonly mutator: ProviderInstanceRegistryMutatorShape; + }, + never, + R | Scope.Scope +> => + Effect.gen(function* () { + const driversById = new Map>( + input.drivers.map((driver) => [driver.driverKind, driver]), + ); + + // Capture the enclosing scope so per-instance child scopes can be + // attached to it at `reconcile` time. Without this, `reconcile` + // called later (e.g. from the hydration layer) would attach child + // scopes to the *caller's* scope instead of the registry's. + const parentScope = yield* Scope.Scope; + + // Capture the driver R context at construction time so `reconcile` + // can be invoked later without re-providing driver dependencies. + // The service tag's declared `reconcile: Effect` hides R from + // consumers — we materialize that here. + const driverContext = yield* Effect.context(); + + const entries = yield* Ref.make>(new Map()); + const unavailable = yield* Ref.make>(new Map()); + const changes = yield* PubSub.unbounded(); + yield* Effect.addFinalizer(() => PubSub.shutdown(changes)); + + const state: RegistryState = { entries, unavailable, changes }; + const reconcileWithR = makeReconcile({ state, driversById, parentScope }); + const reconcile: ProviderInstanceRegistryMutatorShape["reconcile"] = (configMap) => + reconcileWithR(configMap).pipe(Effect.provideContext(driverContext)); + + // Hydrate the initial configMap synchronously so callers can read + // `listInstances` immediately after this effect completes. + yield* reconcile(input.configMap); + + const registry: ProviderInstanceRegistryShape = { + getInstance: (id) => Ref.get(entries).pipe(Effect.map((map) => map.get(id)?.instance)), + listInstances: Ref.get(entries).pipe( + Effect.map( + (map) => + Array.from(map.values(), (live) => live.instance) as ReadonlyArray, + ), + ), + listUnavailable: Ref.get(unavailable).pipe( + Effect.map((map) => Array.from(map.values()) as ReadonlyArray), + ), + // Getters: each read constructs a fresh Stream / Effect descriptor + // so multiple consumers don't share a single already-started + // Channel or subscription. Matches the pattern `ProviderRegistry` + // uses for its own `streamChanges`. + get streamChanges() { + return Stream.fromPubSub(changes); + }, + // Synchronous subscribe — callers that need to consume changes + // from a forked fibre must acquire the subscription in their own + // fibre first (via `yield* registry.subscribeChanges`) and only + // then fork a consumer loop on `Stream.fromSubscription(...)` / + // `PubSub.take(...)`. See the shape docs for the race this avoids. + get subscribeChanges() { + return PubSub.subscribe(changes); + }, + }; + + const mutator: ProviderInstanceRegistryMutatorShape = { reconcile }; + + return { registry, mutator }; + }); + +/** + * Assemble a `ProviderInstanceRegistry` Layer bound to a fixed set of + * drivers and a pre-resolved `ProviderInstanceConfigMap`. Used by tests + * that want explicit control over the registry's source-of-truth without + * wiring up the settings watcher. + * + * Only exposes the public registry tag — hot-reload consumers should use + * `ProviderInstanceRegistryMutableLayer` (below) or the hydration layer. + */ +export const ProviderInstanceRegistryLayer = (input: { + readonly drivers: ReadonlyArray>; + readonly configMap: ProviderInstanceConfigMap; +}): Layer.Layer => + Layer.effect( + ProviderInstanceRegistry, + makeProviderInstanceRegistry(input).pipe(Effect.map((built) => built.registry)), + ) as Layer.Layer; + +/** + * Layer variant that also exposes the mutator tag. Consumed by + * `ProviderInstanceRegistryHydrationLive` to reconcile on settings + * changes. Tests that exercise the mutator directly can pair this Layer + * with a test-local `ServerSettingsService`. + */ +export const ProviderInstanceRegistryMutableLayer = (input: { + readonly drivers: ReadonlyArray>; + readonly configMap: ProviderInstanceConfigMap; +}): Layer.Layer => + Layer.effectContext( + makeProviderInstanceRegistry(input).pipe( + Effect.map(({ registry, mutator }) => + Context.make(ProviderInstanceRegistry, registry).pipe( + Context.add(ProviderInstanceRegistryMutator, mutator), + ), + ), + ), + ) as Layer.Layer; + +export { defaultInstanceIdForDriver }; diff --git a/apps/server/src/provider/Layers/ProviderRegistry.test.ts b/apps/server/src/provider/Layers/ProviderRegistry.test.ts index e29d1ae1957..75f9c42936b 100644 --- a/apps/server/src/provider/Layers/ProviderRegistry.test.ts +++ b/apps/server/src/provider/Layers/ProviderRegistry.test.ts @@ -1,19 +1,28 @@ import * as NodeServices from "@effect/platform-node/NodeServices"; -import { describe, it, assert } from "@effect/vitest"; +import { describe, it, assert, live } from "@effect/vitest"; import { Effect, Exit, Layer, PubSub, Ref, Schema, Scope, Sink, Stream } from "effect"; import * as CodexErrors from "effect-codex-app-server/errors"; import { + ClaudeSettings, + CodexSettings, DEFAULT_SERVER_SETTINGS, + ProviderDriverKind, + ProviderInstanceId, ServerSettings, type ServerProvider, + type ServerProviderSlashCommand, type ServerSettings as ContractServerSettings, } from "@t3tools/contracts"; import * as PlatformError from "effect/PlatformError"; import { ChildProcessSpawner } from "effect/unstable/process"; import { deepMerge } from "@t3tools/shared/Struct"; +import { createModelCapabilities } from "@t3tools/shared/model"; import { checkCodexProviderStatus, type CodexAppServerProviderSnapshot } from "./CodexProvider.ts"; -import { checkClaudeProviderStatus, parseClaudeAuthStatusFromOutput } from "./ClaudeProvider.ts"; +import { checkClaudeProviderStatus } from "./ClaudeProvider.ts"; +import { OpenCodeRuntimeLive } from "../opencodeRuntime.ts"; +import { NoOpProviderEventLoggers, ProviderEventLoggers } from "./ProviderEventLoggers.ts"; +import { ProviderInstanceRegistryHydrationLive } from "./ProviderInstanceRegistryHydration.ts"; import { haveProvidersChanged, mergeProviderSnapshot, @@ -21,14 +30,67 @@ import { } from "./ProviderRegistry.ts"; import { ServerConfig } from "../../config.ts"; import { ServerSettingsService, type ServerSettingsShape } from "../../serverSettings.ts"; +import type { ProviderInstance } from "../ProviderDriver.ts"; +import { ProviderInstanceRegistry } from "../Services/ProviderInstanceRegistry.ts"; import { ProviderRegistry } from "../Services/ProviderRegistry.ts"; +const defaultClaudeSettings: ClaudeSettings = Schema.decodeSync(ClaudeSettings)({}); +const defaultCodexSettings: CodexSettings = Schema.decodeSync(CodexSettings)({}); +const disabledCodexSettings: CodexSettings = Schema.decodeSync(CodexSettings)({ + enabled: false, +}); + process.env.T3CODE_CURSOR_ENABLED = "1"; // ── Test helpers ──────────────────────────────────────────────────── const encoder = new TextEncoder(); +function selectDescriptor( + id: string, + label: string, + options: ReadonlyArray<{ id: string; label: string; isDefault?: boolean }>, +) { + return { + id, + label, + type: "select" as const, + options: [...options], + ...(options.find((option) => option.isDefault)?.id + ? { currentValue: options.find((option) => option.isDefault)?.id } + : {}), + }; +} + +function booleanDescriptor(id: string, label: string) { + return { + id, + label, + type: "boolean" as const, + }; +} + +type TestClaudeCapabilities = { + readonly email: string | undefined; + readonly subscriptionType: string | undefined; + readonly tokenSource: string | undefined; + readonly slashCommands: ReadonlyArray; +}; + +function claudeCapabilities(overrides: Partial = {}) { + return () => + Effect.succeed({ + email: undefined, + subscriptionType: undefined, + tokenSource: undefined, + slashCommands: [], + ...overrides, + }); +} + +const noClaudeCapabilities = () => + Effect.sync(() => undefined as TestClaudeCapabilities | undefined); + function mockHandle(result: { stdout: string; stderr: string; code: number }) { return ChildProcessSpawner.makeHandle({ pid: ChildProcessSpawner.ProcessId(1), @@ -46,7 +108,11 @@ function mockHandle(result: { stdout: string; stderr: string; code: number }) { } function mockSpawnerLayer( - handler: (args: ReadonlyArray) => { stdout: string; stderr: string; code: number }, + handler: (args: ReadonlyArray) => { + stdout: string; + stderr: string; + code: number; + }, ) { return Layer.succeed( ChildProcessSpawner.ChildProcessSpawner, @@ -57,6 +123,33 @@ function mockSpawnerLayer( ); } +function recordingMockSpawnerLayer( + handler: (args: ReadonlyArray) => { + stdout: string; + stderr: string; + code: number; + }, +) { + const commands: Array<{ + readonly args: ReadonlyArray; + readonly env: NodeJS.ProcessEnv | undefined; + }> = []; + const layer = Layer.succeed( + ChildProcessSpawner.ChildProcessSpawner, + ChildProcessSpawner.make((command) => { + const cmd = command as unknown as { + args: ReadonlyArray; + options?: { + readonly env?: NodeJS.ProcessEnv; + }; + }; + commands.push({ args: cmd.args, env: cmd.options?.env }); + return Effect.succeed(mockHandle(handler(cmd.args))); + }), + ); + return { layer, commands }; +} + function mockCommandSpawnerLayer( handler: ( command: string, @@ -66,7 +159,10 @@ function mockCommandSpawnerLayer( return Layer.succeed( ChildProcessSpawner.ChildProcessSpawner, ChildProcessSpawner.make((command) => { - const cmd = command as unknown as { command: string; args: ReadonlyArray }; + const cmd = command as unknown as { + command: string; + args: ReadonlyArray; + }; return Effect.succeed(mockHandle(handler(cmd.command, cmd.args))); }), ); @@ -88,16 +184,15 @@ function failingSpawnerLayer(description: string) { ); } -const codexModelCapabilities = { - reasoningEffortLevels: [ - { value: "high", label: "High", isDefault: true }, - { value: "low", label: "Low" }, +const codexModelCapabilities = createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("reasoningEffort", "Reasoning", [ + { id: "high", label: "High", isDefault: true }, + { id: "low", label: "Low" }, + ]), + booleanDescriptor("fastMode", "Fast Mode"), ], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], -} satisfies NonNullable; +}) satisfies NonNullable; function makeCodexProbeSnapshot( input: Partial = {}, @@ -151,638 +246,1074 @@ function makeMutableServerSettingsService( }); } -it.layer( - Layer.mergeAll( - NodeServices.layer, - ServerSettingsService.layerTest(), - ServerConfig.layerTest(process.cwd(), { prefix: "provider-registry-test-" }).pipe( - Layer.provide(NodeServices.layer), - ), - ), -)("ProviderRegistry", (it) => { - describe("checkCodexProviderStatus", () => { - it.effect("uses the app-server account and model list for provider status", () => - Effect.gen(function* () { - const status = yield* checkCodexProviderStatus(() => - Effect.succeed( - makeCodexProbeSnapshot({ - skills: [ - { - name: "github:gh-fix-ci", - path: "/Users/test/.codex/skills/gh-fix-ci/SKILL.md", - enabled: true, - displayName: "CI Debug", - shortDescription: "Debug failing GitHub Actions checks", +it.layer(Layer.mergeAll(NodeServices.layer, ServerSettingsService.layerTest()))( + "ProviderRegistry", + (it) => { + describe("checkCodexProviderStatus", () => { + it.effect("uses the app-server account and model list for provider status", () => + Effect.gen(function* () { + const status = yield* checkCodexProviderStatus(defaultCodexSettings, () => + Effect.succeed( + makeCodexProbeSnapshot({ + skills: [ + { + name: "github:gh-fix-ci", + path: "/Users/test/.codex/skills/gh-fix-ci/SKILL.md", + enabled: true, + displayName: "CI Debug", + shortDescription: "Debug failing GitHub Actions checks", + }, + ], + }), + ), + ); + assert.strictEqual(status.status, "ready"); + assert.strictEqual(status.installed, true); + assert.strictEqual(status.version, "1.0.0"); + assert.strictEqual(status.auth.status, "authenticated"); + assert.strictEqual(status.auth.type, "chatgpt"); + assert.strictEqual(status.auth.label, "ChatGPT Pro 20x Subscription"); + assert.strictEqual(status.auth.email, "test@example.com"); + assert.deepStrictEqual(status.models, [ + { + slug: "gpt-live-codex", + name: "GPT Live Codex", + isCustom: false, + capabilities: codexModelCapabilities, + }, + ]); + assert.deepStrictEqual(status.skills, [ + { + name: "github:gh-fix-ci", + path: "/Users/test/.codex/skills/gh-fix-ci/SKILL.md", + enabled: true, + displayName: "CI Debug", + shortDescription: "Debug failing GitHub Actions checks", + }, + ]); + }), + ); + + it.effect("returns unauthenticated when app-server requires OpenAI auth", () => + Effect.gen(function* () { + const status = yield* checkCodexProviderStatus(defaultCodexSettings, () => + Effect.succeed( + makeCodexProbeSnapshot({ + account: { + account: null, + requiresOpenaiAuth: true, }, - ], - }), - ), - ); - - assert.strictEqual(status.provider, "codex"); - assert.strictEqual(status.status, "ready"); - assert.strictEqual(status.installed, true); - assert.strictEqual(status.version, "1.0.0"); - assert.strictEqual(status.auth.status, "authenticated"); - assert.strictEqual(status.auth.type, "chatgpt"); - assert.strictEqual(status.auth.label, "ChatGPT Pro 20x Subscription"); - assert.deepStrictEqual(status.models, [ + }), + ), + ); + + assert.strictEqual(status.status, "error"); + assert.strictEqual(status.auth.status, "unauthenticated"); + assert.strictEqual( + status.message, + "Codex CLI is not authenticated. Run `codex login` and try again.", + ); + }), + ); + + it.effect( + "returns ready with unknown auth when app-server does not require OpenAI auth", + () => + Effect.gen(function* () { + const status = yield* checkCodexProviderStatus(defaultCodexSettings, () => + Effect.succeed( + makeCodexProbeSnapshot({ + account: { + account: null, + requiresOpenaiAuth: false, + }, + }), + ), + ); + + assert.strictEqual(status.status, "ready"); + assert.strictEqual(status.auth.status, "unknown"); + }), + ); + + it.effect("returns an api key label for codex api key auth", () => + Effect.gen(function* () { + const status = yield* checkCodexProviderStatus(defaultCodexSettings, () => + Effect.succeed( + makeCodexProbeSnapshot({ + account: { + account: { type: "apiKey" }, + requiresOpenaiAuth: false, + }, + }), + ), + ); + + assert.strictEqual(status.status, "ready"); + assert.strictEqual(status.auth.status, "authenticated"); + assert.strictEqual(status.auth.type, "apiKey"); + assert.strictEqual(status.auth.label, "OpenAI API Key"); + }), + ); + + it.effect("returns unavailable when codex is missing", () => + Effect.gen(function* () { + const status = yield* checkCodexProviderStatus(defaultCodexSettings, () => + Effect.fail( + new CodexErrors.CodexAppServerSpawnError({ + command: "codex app-server", + cause: new Error("spawn codex ENOENT"), + }), + ), + ); + assert.strictEqual(status.status, "error"); + assert.strictEqual(status.installed, false); + assert.strictEqual(status.auth.status, "unknown"); + assert.strictEqual( + status.message, + "Codex CLI (`codex`) is not installed or not on PATH.", + ); + }), + ); + }); + + describe("ProviderRegistryLive", () => { + it("treats equal provider snapshots as unchanged", () => { + const providers = [ { - slug: "gpt-live-codex", - name: "GPT Live Codex", - isCustom: false, - capabilities: codexModelCapabilities, + instanceId: ProviderInstanceId.make("codex"), + driver: ProviderDriverKind.make("codex"), + status: "ready", + enabled: true, + installed: true, + auth: { status: "authenticated" }, + checkedAt: "2026-03-25T00:00:00.000Z", + version: "1.0.0", + models: [], + slashCommands: [], + skills: [], }, - ]); - assert.deepStrictEqual(status.skills, [ { - name: "github:gh-fix-ci", - path: "/Users/test/.codex/skills/gh-fix-ci/SKILL.md", + instanceId: ProviderInstanceId.make("claudeAgent"), + driver: ProviderDriverKind.make("claudeAgent"), + status: "warning", enabled: true, - displayName: "CI Debug", - shortDescription: "Debug failing GitHub Actions checks", + installed: true, + auth: { status: "unknown" }, + checkedAt: "2026-03-25T00:00:00.000Z", + version: "1.0.0", + models: [], + slashCommands: [], + skills: [], }, - ]); - }), - ); - - it.effect("returns unauthenticated when app-server requires OpenAI auth", () => - Effect.gen(function* () { - const status = yield* checkCodexProviderStatus(() => - Effect.succeed( - makeCodexProbeSnapshot({ - account: { - account: null, - requiresOpenaiAuth: true, - }, - }), - ), - ); - - assert.strictEqual(status.status, "error"); - assert.strictEqual(status.auth.status, "unauthenticated"); - assert.strictEqual( - status.message, - "Codex CLI is not authenticated. Run `codex login` and try again.", - ); - }), - ); - - it.effect("returns ready with unknown auth when app-server does not require OpenAI auth", () => - Effect.gen(function* () { - const status = yield* checkCodexProviderStatus(() => - Effect.succeed( - makeCodexProbeSnapshot({ - account: { - account: null, - requiresOpenaiAuth: false, - }, - }), - ), - ); - - assert.strictEqual(status.status, "ready"); - assert.strictEqual(status.auth.status, "unknown"); - }), - ); - - it.effect("returns an api key label for codex api key auth", () => - Effect.gen(function* () { - const status = yield* checkCodexProviderStatus(() => - Effect.succeed( - makeCodexProbeSnapshot({ - account: { - account: { type: "apiKey" }, - requiresOpenaiAuth: false, - }, - }), - ), - ); - - assert.strictEqual(status.status, "ready"); - assert.strictEqual(status.auth.status, "authenticated"); - assert.strictEqual(status.auth.type, "apiKey"); - assert.strictEqual(status.auth.label, "OpenAI API Key"); - }), - ); - - it.effect("returns unavailable when codex is missing", () => - Effect.gen(function* () { - const status = yield* checkCodexProviderStatus(() => - Effect.fail( - new CodexErrors.CodexAppServerSpawnError({ - command: "codex app-server", - cause: new Error("spawn codex ENOENT"), - }), - ), - ); - assert.strictEqual(status.provider, "codex"); - assert.strictEqual(status.status, "error"); - assert.strictEqual(status.installed, false); - assert.strictEqual(status.auth.status, "unknown"); - assert.strictEqual(status.message, "Codex CLI (`codex`) is not installed or not on PATH."); - }), - ); - }); + ] as const satisfies ReadonlyArray; - describe("ProviderRegistryLive", () => { - it("treats equal provider snapshots as unchanged", () => { - const providers = [ - { - provider: "codex", + assert.strictEqual(haveProvidersChanged(providers, [...providers]), false); + }); + + it("preserves previously discovered provider models when a refresh returns none", () => { + const previousProvider = { + instanceId: ProviderInstanceId.make("cursor"), + driver: ProviderDriverKind.make("cursor"), status: "ready", enabled: true, installed: true, auth: { status: "authenticated" }, - checkedAt: "2026-03-25T00:00:00.000Z", - version: "1.0.0", - models: [], + checkedAt: "2026-04-14T00:00:00.000Z", + version: "2026.04.09-f2b0fcd", + models: [ + { + slug: "claude-opus-4-6", + name: "Opus 4.6", + isCustom: false, + capabilities: createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("reasoning", "Reasoning", [ + { id: "high", label: "High", isDefault: true }, + ]), + booleanDescriptor("fastMode", "Fast Mode"), + booleanDescriptor("thinking", "Thinking"), + ], + }), + }, + ], slashCommands: [], skills: [], - }, - { - provider: "claudeAgent", - status: "warning", - enabled: true, - installed: true, - auth: { status: "unknown" }, - checkedAt: "2026-03-25T00:00:00.000Z", - version: "1.0.0", + } as const satisfies ServerProvider; + const refreshedProvider = { + ...previousProvider, + checkedAt: "2026-04-14T00:01:00.000Z", models: [], - slashCommands: [], - skills: [], - }, - ] as const satisfies ReadonlyArray; + } satisfies ServerProvider; - assert.strictEqual(haveProvidersChanged(providers, [...providers]), false); - }); + assert.deepStrictEqual(mergeProviderSnapshot(previousProvider, refreshedProvider).models, [ + ...previousProvider.models, + ]); + }); - it.skip("ignores checkedAt-only changes when comparing provider snapshots", () => { - const previousProviders = [ - { - provider: "codex", + it("fills missing capabilities from the previous provider snapshot", () => { + const previousProvider = { + instanceId: ProviderInstanceId.make("cursor"), + driver: ProviderDriverKind.make("cursor"), status: "ready", enabled: true, installed: true, auth: { status: "authenticated" }, - checkedAt: "2026-03-25T00:00:00.000Z", - version: "1.0.0", - message: "Ready", - models: [], + checkedAt: "2026-04-14T00:00:00.000Z", + version: "2026.04.09-f2b0fcd", + models: [ + { + slug: "claude-opus-4-6", + name: "Opus 4.6", + isCustom: false, + capabilities: createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("reasoning", "Reasoning", [ + { id: "high", label: "High", isDefault: true }, + ]), + booleanDescriptor("fastMode", "Fast Mode"), + booleanDescriptor("thinking", "Thinking"), + ], + }), + }, + ], slashCommands: [], skills: [], - }, - ] as const satisfies ReadonlyArray; - const nextProviders = [ - { - ...previousProviders[0], - checkedAt: "2026-03-25T00:01:00.000Z", - }, - ] as const satisfies ReadonlyArray; - - assert.strictEqual(haveProvidersChanged(previousProviders, nextProviders), false); - }); + } as const satisfies ServerProvider; + const refreshedProvider = { + ...previousProvider, + checkedAt: "2026-04-14T00:01:00.000Z", + models: [ + { + slug: "claude-opus-4-6", + name: "Opus 4.6", + isCustom: false, + capabilities: createModelCapabilities({ + optionDescriptors: [], + }), + }, + ], + } satisfies ServerProvider; - it("preserves previously discovered provider models when a refresh returns none", () => { - const previousProvider = { - provider: "cursor", - status: "ready", - enabled: true, - installed: true, - auth: { status: "authenticated" }, - checkedAt: "2026-04-14T00:00:00.000Z", - version: "2026.04.09-f2b0fcd", - models: [ - { - slug: "claude-opus-4-6", - name: "Opus 4.6", - isCustom: false, - capabilities: { - reasoningEffortLevels: [{ value: "high", label: "High", isDefault: true }], - supportsFastMode: true, - supportsThinkingToggle: true, - contextWindowOptions: [], - promptInjectedEffortLevels: [], + assert.deepStrictEqual(mergeProviderSnapshot(previousProvider, refreshedProvider).models, [ + ...previousProvider.models, + ]); + }); + + it.effect("returns the cached provider list when a manual refresh fails", () => + Effect.gen(function* () { + const codexDriver = ProviderDriverKind.make("codex"); + const codexInstanceId = ProviderInstanceId.make("codex"); + const cachedProvider = { + instanceId: codexInstanceId, + driver: codexDriver, + status: "ready", + enabled: true, + installed: true, + auth: { status: "authenticated" }, + checkedAt: "2026-04-29T10:00:00.000Z", + version: "1.0.0", + models: [], + slashCommands: [], + skills: [], + } as const satisfies ServerProvider; + const instance = { + instanceId: codexInstanceId, + driverKind: codexDriver, + continuationIdentity: { + driverKind: codexDriver, + continuationKey: "codex:instance:codex", }, - }, - ], - slashCommands: [], - skills: [], - } as const satisfies ServerProvider; - const refreshedProvider = { - ...previousProvider, - checkedAt: "2026-04-14T00:01:00.000Z", - models: [], - } satisfies ServerProvider; - - assert.deepStrictEqual(mergeProviderSnapshot(previousProvider, refreshedProvider).models, [ - ...previousProvider.models, - ]); - }); + displayName: undefined, + enabled: true, + snapshot: { + getSnapshot: Effect.succeed(cachedProvider), + refresh: Effect.die(new Error("simulated refresh failure")), + streamChanges: Stream.empty, + }, + adapter: {} as ProviderInstance["adapter"], + textGeneration: {} as ProviderInstance["textGeneration"], + } satisfies ProviderInstance; + const instanceRegistryLayer = Layer.succeed(ProviderInstanceRegistry, { + getInstance: (instanceId) => + Effect.succeed(instanceId === codexInstanceId ? instance : undefined), + listInstances: Effect.succeed([instance]), + listUnavailable: Effect.succeed([]), + streamChanges: Stream.empty, + subscribeChanges: Effect.flatMap(PubSub.unbounded(), (pubsub) => + PubSub.subscribe(pubsub), + ), + }); + const scope = yield* Scope.make(); + yield* Effect.addFinalizer(() => Scope.close(scope, Exit.void)); + const runtimeServices = yield* Layer.build( + ProviderRegistryLive.pipe( + Layer.provideMerge(instanceRegistryLayer), + Layer.provideMerge( + ServerConfig.layerTest(process.cwd(), { + prefix: "t3-provider-registry-refresh-failure-", + }), + ), + Layer.provideMerge(NodeServices.layer), + ), + ).pipe(Scope.provide(scope)); + + yield* Effect.gen(function* () { + const registry = yield* ProviderRegistry; + + assert.deepStrictEqual(yield* registry.getProviders, [cachedProvider]); + assert.deepStrictEqual(yield* registry.refresh(codexDriver), [cachedProvider]); + assert.deepStrictEqual(yield* registry.refreshInstance(codexInstanceId), [ + cachedProvider, + ]); + }).pipe(Effect.provide(runtimeServices)); + }), + ); - it("fills missing capabilities from the previous provider snapshot", () => { - const previousProvider = { - provider: "cursor", - status: "ready", - enabled: true, - installed: true, - auth: { status: "authenticated" }, - checkedAt: "2026-04-14T00:00:00.000Z", - version: "2026.04.09-f2b0fcd", - models: [ - { - slug: "claude-opus-4-6", - name: "Opus 4.6", - isCustom: false, - capabilities: { - reasoningEffortLevels: [{ value: "high", label: "High", isDefault: true }], - supportsFastMode: true, - supportsThinkingToggle: true, - contextWindowOptions: [], - promptInjectedEffortLevels: [], + it.effect("keeps consuming registry changes after one sync fails", () => + Effect.gen(function* () { + const codexDriver = ProviderDriverKind.make("codex"); + const codexInstanceId = ProviderInstanceId.make("codex"); + const claudeDriver = ProviderDriverKind.make("claudeAgent"); + const claudeInstanceId = ProviderInstanceId.make("claudeAgent"); + const codexProvider = { + instanceId: codexInstanceId, + driver: codexDriver, + status: "ready", + enabled: true, + installed: true, + auth: { status: "authenticated" }, + checkedAt: "2026-04-29T10:00:00.000Z", + version: "1.0.0", + models: [], + slashCommands: [], + skills: [], + } as const satisfies ServerProvider; + const claudeProvider = { + instanceId: claudeInstanceId, + driver: claudeDriver, + status: "ready", + enabled: true, + installed: true, + auth: { status: "authenticated" }, + checkedAt: "2026-04-29T10:01:00.000Z", + version: "1.0.0", + models: [], + slashCommands: [], + skills: [], + } as const satisfies ServerProvider; + const makeInstance = (provider: ServerProvider): ProviderInstance => ({ + instanceId: provider.instanceId, + driverKind: provider.driver, + continuationIdentity: { + driverKind: provider.driver, + continuationKey: `${provider.driver}:instance:${provider.instanceId}`, }, - }, - ], - slashCommands: [], - skills: [], - } as const satisfies ServerProvider; - const refreshedProvider = { - ...previousProvider, - checkedAt: "2026-04-14T00:01:00.000Z", - models: [ - { - slug: "claude-opus-4-6", - name: "Opus 4.6", - isCustom: false, - capabilities: { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], + displayName: undefined, + enabled: true, + snapshot: { + getSnapshot: Effect.succeed(provider), + refresh: Effect.succeed(provider), + streamChanges: Stream.empty, }, - }, - ], - } satisfies ServerProvider; + adapter: {} as ProviderInstance["adapter"], + textGeneration: {} as ProviderInstance["textGeneration"], + }); + const codexInstance = makeInstance(codexProvider); + const claudeInstance = makeInstance(claudeProvider); + const changes = yield* PubSub.unbounded(); + const instancesRef = yield* Ref.make>([codexInstance]); + const failNextList = yield* Ref.make(false); + const wait = (millis: number) => + Effect.promise(() => new Promise((resolve) => setTimeout(resolve, millis))); + const instanceRegistryLayer = Layer.succeed(ProviderInstanceRegistry, { + getInstance: (instanceId) => + Ref.get(instancesRef).pipe( + Effect.map((instances) => + instances.find((instance) => instance.instanceId === instanceId), + ), + ), + listInstances: Effect.gen(function* () { + const shouldFail = yield* Ref.get(failNextList); + if (shouldFail) { + yield* Ref.set(failNextList, false); + return yield* Effect.die(new Error("simulated registry list failure")); + } + return yield* Ref.get(instancesRef); + }), + listUnavailable: Effect.succeed([]), + streamChanges: Stream.fromPubSub(changes), + subscribeChanges: PubSub.subscribe(changes), + }); + const scope = yield* Scope.make(); + yield* Effect.addFinalizer(() => Scope.close(scope, Exit.void)); + const runtimeServices = yield* Layer.build( + ProviderRegistryLive.pipe( + Layer.provideMerge(instanceRegistryLayer), + Layer.provideMerge( + ServerConfig.layerTest(process.cwd(), { + prefix: "t3-provider-registry-sync-failure-", + }), + ), + Layer.provideMerge(NodeServices.layer), + ), + ).pipe(Scope.provide(scope)); + + yield* Effect.gen(function* () { + const registry = yield* ProviderRegistry; + assert.deepStrictEqual(yield* registry.getProviders, [codexProvider]); + + yield* Ref.set(failNextList, true); + yield* PubSub.publish(changes, undefined); + + yield* Ref.set(instancesRef, [codexInstance, claudeInstance]); + yield* PubSub.publish(changes, undefined); + + let providers = yield* registry.getProviders; + for ( + let attempt = 0; + attempt < 50 && + !providers.some((provider) => provider.instanceId === claudeInstanceId); + attempt += 1 + ) { + yield* wait(10); + providers = yield* registry.getProviders; + } - assert.deepStrictEqual(mergeProviderSnapshot(previousProvider, refreshedProvider).models, [ - ...previousProvider.models, - ]); - }); + assert.deepStrictEqual( + providers.map((provider) => provider.instanceId).toSorted(), + [codexInstanceId, claudeInstanceId].toSorted(), + ); + }).pipe(Effect.provide(runtimeServices)); + }), + ); + + // This test intentionally avoids `mockCommandSpawnerLayer` so the real + // `probeCodexAppServerProvider` path runs — including the full + // `codex app-server` RPC handshake via `CodexClient.layerCommand`. + // We point `binaryPath` at a name that cannot exist on any machine so + // the real `ChildProcessSpawner` deterministically returns ENOENT; the + // probe wraps that as `CodexAppServerSpawnError` and + // `checkCodexProviderStatus` turns it into the user-visible "not + // installed" error snapshot. If the aggregator's `syncLiveSources` + // breaks — the `codex_personal`-never-probes bug we are guarding + // against — that snapshot never lands in `getProviders` and the + // assertions below fail. + it.effect("propagates real Codex probe failures to the aggregator at boot", () => + Effect.gen(function* () { + const missingBinary = `t3code_codex_missing_${process.pid}_${Date.now()}`; + const serverSettings = yield* makeMutableServerSettingsService( + Schema.decodeSync(ServerSettings)( + deepMerge(DEFAULT_SERVER_SETTINGS, { + providers: { + // Disable every built-in probe that would otherwise spawn + // on the CI host. `enabled: false` short-circuits each + // driver's probe *before* it touches the spawner, so the + // test environment stays isolated from the dev + // machine's PATH. + codex: { enabled: false }, + claudeAgent: { enabled: false }, + cursor: { enabled: false }, + opencode: { enabled: false }, + }, + // `providerInstances` keys are branded `ProviderInstanceId`; + // the branded index signature rejects plain string literals + // at the TS level even though the runtime schema happily + // accepts + decodes them. Cast the patch to `unknown` so + // the `Schema.decodeSync` below does the real validation. + providerInstances: { + // Matches the shape the user had in `.t3/dev/settings.json` + // when the bug was reported: a custom enabled Codex instance + // pointing at a binary the server has to actually spawn. + codex_personal: { + driver: "codex", + displayName: "Codex Personal", + enabled: true, + config: { + binaryPath: missingBinary, + homePath: `/tmp/${missingBinary}_home`, + }, + }, + } as unknown as ContractServerSettings["providerInstances"], + }), + ), + ); + const scope = yield* Scope.make(); + yield* Effect.addFinalizer(() => Scope.close(scope, Exit.void)); + const providerRegistryLayer = ProviderRegistryLive.pipe( + Layer.provideMerge(ProviderInstanceRegistryHydrationLive), + Layer.provideMerge(Layer.succeed(ServerSettingsService, serverSettings)), + Layer.provideMerge( + ServerConfig.layerTest(process.cwd(), { + prefix: "t3-provider-registry-", + }), + ), + Layer.provideMerge(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), + Layer.provideMerge(OpenCodeRuntimeLive), + // NO spawner mock — `ChildProcessSpawner` is supplied by the + // outer `NodeServices.layer` on `it.layer(...)` and will + // genuinely spawn a subprocess. The missing-binary ENOENT is + // what exercises the same failure mode as a misconfigured + // production `binaryPath`. + ); + const runtimeServices = yield* Layer.build(providerRegistryLayer).pipe( + Scope.provide(scope), + ); + + yield* Effect.gen(function* () { + const registry = yield* ProviderRegistry; + const providers = yield* registry.getProviders; + const codexPersonal = providers.find( + (provider) => provider.instanceId === "codex_personal", + ); + assert.notStrictEqual( + codexPersonal, + undefined, + `Expected the aggregator to know about codex_personal; instead saw: ${providers + .map((provider) => provider.instanceId) + .join(", ")}`, + ); + assert.strictEqual( + codexPersonal?.status, + "error", + "Real Codex probe against a missing binary should surface as 'error' in the aggregator", + ); + assert.strictEqual(codexPersonal?.installed, false); + assert.strictEqual( + codexPersonal?.message, + "Codex CLI (`codex`) is not installed or not on PATH.", + ); + }).pipe(Effect.provide(runtimeServices)); + }), + ); + + // Guards the second half of the reported bug: changing + // `providers.codex.binaryPath` in settings must tear down the live + // instance and rebuild it so a fresh probe runs with the new binary. + // This test drives the real settings stream → registry reconcile → + // aggregator sync pipeline and asserts that `getProviders` reflects + // the new probe's outcome. If `syncLiveSources` stops awaiting the + // rebuilt instance's refresh (previous bug mode), the aggregator + // keeps the old snapshot and this test fails. + // + // `live` (imported from `@effect/vitest`) is used instead of + // `it.effect` so real timers coordinate the fibres that drive the + // settings → reconcile → sync pipeline. Under `it.effect`'s + // TestClock, `Effect.sleep` blocks until `TestClock.adjust`, which + // would require this test to reach into the internals of the + // reconcile pipeline to advance it step by step. + // + // The nested `it` handed to `it.layer(…, (it) => …)` is the + // `MethodsNonLive` variant and therefore lacks `.live`; the + // top-level `live` export from `@effect/vitest` is the equivalent. + live("re-probes when settings change the codex binaryPath", () => + Effect.gen(function* () { + const firstMissing = `t3code_codex_first_${process.pid}_${Date.now()}`; + const secondMissing = `t3code_codex_second_${process.pid}_${Date.now()}`; + const serverSettings = yield* makeMutableServerSettingsService( + Schema.decodeSync(ServerSettings)( + deepMerge(DEFAULT_SERVER_SETTINGS, { + providers: { + codex: { enabled: true, binaryPath: firstMissing }, + claudeAgent: { enabled: false }, + cursor: { enabled: false }, + opencode: { enabled: false }, + }, + }), + ), + ); + const scope = yield* Scope.make(); + yield* Effect.addFinalizer(() => Scope.close(scope, Exit.void)); + const providerRegistryLayer = ProviderRegistryLive.pipe( + Layer.provideMerge(ProviderInstanceRegistryHydrationLive), + Layer.provideMerge(Layer.succeed(ServerSettingsService, serverSettings)), + Layer.provideMerge( + ServerConfig.layerTest(process.cwd(), { + prefix: "t3-provider-registry-", + }), + ), + Layer.provideMerge(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), + Layer.provideMerge(OpenCodeRuntimeLive), + // `it.live` does not inherit layers from the outer `it.layer` + // wrapper, so provide `NodeServices.layer` inline. This is the + // same real `ChildProcessSpawner` + `FileSystem` + `Path` + // services that production uses. + Layer.provideMerge(NodeServices.layer), + ); + const runtimeServices = yield* Layer.build(providerRegistryLayer).pipe( + Scope.provide(scope), + ); - it.effect("probes enabled providers in the background during registry startup", () => - Effect.gen(function* () { - let spawnCount = 0; - const serverSettings = yield* makeMutableServerSettingsService( - Schema.decodeSync(ServerSettings)( - deepMerge(DEFAULT_SERVER_SETTINGS, { + yield* Effect.gen(function* () { + const registry = yield* ProviderRegistry; + // Boot-time probe: the default codex instance is enabled with + // `firstMissing`, so the real spawner yields ENOENT and the + // snapshot should be `status: "error"`. What *distinguishes* + // the two probe runs is `checkedAt` — each probe stamps a + // fresh DateTime, so we capture it and assert it advances + // after the settings mutation. + const initialProviders = yield* registry.getProviders; + const initialCodex = initialProviders.find( + (provider) => provider.instanceId === "codex", + ); + assert.strictEqual(initialCodex?.status, "error"); + assert.strictEqual(initialCodex?.installed, false); + const initialCheckedAt = initialCodex?.checkedAt; + assert.notStrictEqual(initialCheckedAt, undefined); + + // Drive a settings change. The Hydration layer's + // `SettingsWatcherLive` consumes this via `streamChanges`, + // calls `reconcile`, which rebuilds the codex instance (the + // envelope changed because `binaryPath` differs → `entryEqual` + // is false). The registry's `Stream.runForEach( + // instanceRegistry.streamChanges, () => syncLiveSources)` + // fires `syncLiveSources`, which subscribes + awaits a fresh + // refresh on the rebuilt instance. + yield* serverSettings.updateSettings({ providers: { - codex: { enabled: false }, - cursor: { enabled: false }, + codex: { enabled: true, binaryPath: secondMissing }, }, - }), - ), - ); - const scope = yield* Scope.make(); - yield* Effect.addFinalizer(() => Scope.close(scope, Exit.void)); - const providerRegistryLayer = ProviderRegistryLive.pipe( - Layer.provideMerge(Layer.succeed(ServerSettingsService, serverSettings)), - Layer.provideMerge( - ServerConfig.layerTest(process.cwd(), { - prefix: "t3-provider-registry-", - }), - ), - Layer.provideMerge( - mockCommandSpawnerLayer((command, args) => { - spawnCount += 1; - const joined = args.join(" "); - if (joined === "--version") { - return { stdout: "claude 1.0.0\n", stderr: "", code: 0 }; - } - if (joined === "auth status") { - return { stdout: '{"authenticated":true}\n', stderr: "", code: 0 }; + }); + + // Poll with real timers (via `it.live`) until `checkedAt` + // advances or we hit a generous 3-second ceiling. Anything + // slower than that is a regression — the real probe fails + // fast on ENOENT, and the reconcile + sync pipeline is + // purely in-process. + const refreshed = yield* Effect.gen(function* () { + for (let attempts = 0; attempts < 60; attempts += 1) { + const providers = yield* registry.getProviders; + const codex = providers.find((provider) => provider.instanceId === "codex"); + if (codex !== undefined && codex.checkedAt !== initialCheckedAt) { + return providers; + } + yield* Effect.sleep("50 millis"); } - throw new Error(`Unexpected args: ${command} ${joined}`); - }), - ), - ); - const runtimeServices = yield* Layer.build(providerRegistryLayer).pipe( - Scope.provide(scope), - ); - - yield* Effect.gen(function* () { - const registry = yield* ProviderRegistry; - assert.strictEqual(spawnCount > 0, true); - const refreshed = yield* Effect.gen(function* () { - for (let remainingAttempts = 50; remainingAttempts > 0; remainingAttempts -= 1) { + return yield* registry.getProviders; + }); + + const reprobedCodex = refreshed.find((provider) => provider.instanceId === "codex"); + assert.notStrictEqual( + reprobedCodex?.checkedAt, + initialCheckedAt, + "Expected a fresh probe after settings change, got the stale snapshot", + ); + assert.strictEqual(reprobedCodex?.status, "error"); + assert.strictEqual(reprobedCodex?.installed, false); + }).pipe(Effect.provide(runtimeServices)); + }), + ); + + it.effect("includes unavailable instance snapshots in getProviders", () => + Effect.gen(function* () { + const serverSettings = yield* makeMutableServerSettingsService( + Schema.decodeSync(ServerSettings)( + deepMerge(DEFAULT_SERVER_SETTINGS, { + providers: { + codex: { enabled: false }, + claudeAgent: { enabled: false }, + cursor: { enabled: false }, + opencode: { enabled: false }, + }, + providerInstances: { + ghost_main: { + driver: "ghostDriver", + displayName: "A fork-only driver we don't ship", + enabled: false, + config: { arbitrary: "payload" }, + }, + } as unknown as ContractServerSettings["providerInstances"], + }), + ), + ); + const scope = yield* Scope.make(); + yield* Effect.addFinalizer(() => Scope.close(scope, Exit.void)); + const providerRegistryLayer = ProviderRegistryLive.pipe( + Layer.provideMerge(ProviderInstanceRegistryHydrationLive), + Layer.provideMerge(Layer.succeed(ServerSettingsService, serverSettings)), + Layer.provideMerge( + ServerConfig.layerTest(process.cwd(), { + prefix: "t3-provider-registry-", + }), + ), + Layer.provideMerge(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), + Layer.provideMerge(OpenCodeRuntimeLive), + Layer.provideMerge(NodeServices.layer), + ); + const runtimeServices = yield* Layer.build(providerRegistryLayer).pipe( + Scope.provide(scope), + ); + + yield* Effect.gen(function* () { + const registry = yield* ProviderRegistry; + const providers = yield* registry.getProviders; + const ghost = providers.find((provider) => provider.instanceId === "ghost_main"); + + assert.notStrictEqual(ghost, undefined); + assert.strictEqual(ghost?.driver, "ghostDriver"); + assert.strictEqual(ghost?.availability, "unavailable"); + assert.match(ghost?.unavailableReason ?? "", /ghostDriver/); + }).pipe(Effect.provide(runtimeServices)); + }), + ); + + it.effect( + "keeps cursor disabled and skips probing when the provider setting is disabled", + () => + Effect.gen(function* () { + const serverSettings = yield* makeMutableServerSettingsService( + Schema.decodeSync(ServerSettings)( + deepMerge(DEFAULT_SERVER_SETTINGS, { + providers: { + codex: { + enabled: false, + }, + cursor: { + enabled: false, + }, + }, + }), + ), + ); + let cursorSpawned = false; + const scope = yield* Scope.make(); + yield* Effect.addFinalizer(() => Scope.close(scope, Exit.void)); + const providerRegistryLayer = ProviderRegistryLive.pipe( + Layer.provideMerge(ProviderInstanceRegistryHydrationLive), + Layer.provideMerge(Layer.succeed(ServerSettingsService, serverSettings)), + Layer.provideMerge( + ServerConfig.layerTest(process.cwd(), { + prefix: "t3-provider-registry-", + }), + ), + Layer.provideMerge(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), + Layer.provideMerge(OpenCodeRuntimeLive), + Layer.provideMerge( + mockCommandSpawnerLayer((command, args) => { + if (command === "agent") { + cursorSpawned = true; + } + const joined = args.join(" "); + if (joined === "--version") { + return { + stdout: `${command} 1.0.0\n`, + stderr: "", + code: 0, + }; + } + if (joined === "auth status") { + return { + stdout: '{"authenticated":true}\n', + stderr: "", + code: 0, + }; + } + throw new Error(`Unexpected args: ${command} ${joined}`); + }), + ), + ); + const runtimeServices = yield* Layer.build( + Layer.mergeAll( + Layer.succeed(ServerSettingsService, serverSettings), + providerRegistryLayer, + ), + ).pipe(Scope.provide(scope)); + + yield* Effect.gen(function* () { + const registry = yield* ProviderRegistry; const providers = yield* registry.getProviders; - const claudeProvider = providers.find( - (provider) => provider.provider === "claudeAgent", + const cursorProvider = providers.find( + (provider) => provider.instanceId === ProviderInstanceId.make("cursor"), ); - if (claudeProvider?.status === "ready") { - return providers; - } - yield* Effect.sleep("10 millis"); - } - return yield* registry.getProviders; - }); - assert.strictEqual( - refreshed.find((provider) => provider.provider === "claudeAgent")?.status, - "ready", + + assert.deepStrictEqual(providers.map((provider) => provider.instanceId).toSorted(), [ + "claudeAgent", + "codex", + "cursor", + "opencode", + ]); + assert.strictEqual(cursorProvider?.enabled, false); + assert.strictEqual(cursorProvider?.status, "disabled"); + assert.strictEqual( + cursorProvider?.message, + "Cursor is disabled in T3 Code settings.", + ); + assert.strictEqual(cursorSpawned, false); + }).pipe(Effect.provide(runtimeServices)); + }), + ); + + it.effect("skips codex probes entirely when the provider is disabled", () => + Effect.gen(function* () { + const status = yield* checkCodexProviderStatus(disabledCodexSettings).pipe( + Effect.provide(failingSpawnerLayer("spawn codex ENOENT")), ); - }).pipe(Effect.provide(runtimeServices)); - }), - ); - - it.effect("keeps cursor disabled and skips probing when the provider setting is disabled", () => - Effect.gen(function* () { - const serverSettings = yield* makeMutableServerSettingsService( - Schema.decodeSync(ServerSettings)( - deepMerge(DEFAULT_SERVER_SETTINGS, { - providers: { - codex: { - enabled: false, - }, - cursor: { - enabled: false, - }, - }, - }), - ), - ); - let cursorSpawned = false; - const scope = yield* Scope.make(); - yield* Effect.addFinalizer(() => Scope.close(scope, Exit.void)); - const providerRegistryLayer = ProviderRegistryLive.pipe( - Layer.provideMerge(Layer.succeed(ServerSettingsService, serverSettings)), - Layer.provideMerge( - ServerConfig.layerTest(process.cwd(), { - prefix: "t3-provider-registry-", - }), - ), - Layer.provideMerge( - mockCommandSpawnerLayer((command, args) => { - if (command === "agent") { - cursorSpawned = true; - } + assert.strictEqual(status.enabled, false); + assert.strictEqual(status.status, "disabled"); + assert.strictEqual(status.installed, false); + assert.strictEqual(status.message, "Codex is disabled in T3 Code settings."); + }), + ); + }); + + // ── checkClaudeProviderStatus tests ────────────────────────── + + describe("checkClaudeProviderStatus", () => { + it.effect("returns ready when claude is installed and authenticated", () => + Effect.gen(function* () { + const status = yield* checkClaudeProviderStatus( + defaultClaudeSettings, + claudeCapabilities(), + ); + assert.strictEqual(status.status, "ready"); + assert.strictEqual(status.installed, true); + assert.strictEqual(status.auth.status, "authenticated"); + }).pipe( + Effect.provide( + mockSpawnerLayer((args) => { const joined = args.join(" "); - if (joined === "--version") { - return { stdout: `${command} 1.0.0\n`, stderr: "", code: 0 }; - } - if (joined === "auth status") { - return { stdout: '{"authenticated":true}\n', stderr: "", code: 0 }; - } - throw new Error(`Unexpected args: ${command} ${joined}`); + if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; + if (joined === "auth status") + return { + stdout: '{"loggedIn":true,"authMethod":"claude.ai"}\n', + stderr: "", + code: 0, + }; + throw new Error(`Unexpected args: ${joined}`); }), ), - ); - const runtimeServices = yield* Layer.build( - Layer.mergeAll( - Layer.succeed(ServerSettingsService, serverSettings), - providerRegistryLayer, + ), + ); + + it.effect( + "includes Claude Opus 4.7 with xhigh as the default effort on supported versions", + () => + Effect.gen(function* () { + const status = yield* checkClaudeProviderStatus( + defaultClaudeSettings, + claudeCapabilities(), + ); + const opus47 = status.models.find((model) => model.slug === "claude-opus-4-7"); + if (!opus47) { + assert.fail("Expected Claude Opus 4.7 to be present for Claude Code v2.1.111."); + } + if (!opus47.capabilities) { + assert.fail( + "Expected Claude Opus 4.7 capabilities to be present for Claude Code v2.1.111.", + ); + } + const effortDescriptor = opus47.capabilities.optionDescriptors?.find( + (descriptor) => descriptor.type === "select" && descriptor.id === "effort", + ); + assert.deepStrictEqual( + effortDescriptor?.type === "select" + ? effortDescriptor.options.find((option) => option.isDefault) + : undefined, + { id: "xhigh", label: "Extra High", isDefault: true }, + ); + }).pipe( + Effect.provide( + mockSpawnerLayer((args) => { + const joined = args.join(" "); + if (joined === "--version") return { stdout: "2.1.111\n", stderr: "", code: 0 }; + if (joined === "auth status") + return { + stdout: '{"loggedIn":true,"authMethod":"claude.ai"}\n', + stderr: "", + code: 0, + }; + throw new Error(`Unexpected args: ${joined}`); + }), + ), ), - ).pipe(Scope.provide(scope)); + ); - yield* Effect.gen(function* () { - const registry = yield* ProviderRegistry; - const providers = yield* registry.getProviders; - const cursorProvider = providers.find((provider) => provider.provider === "cursor"); - - assert.deepStrictEqual( - providers.map((provider) => provider.provider), - ["codex", "claudeAgent", "opencode", "cursor"], + it.effect("hides Claude Opus 4.7 on older Claude Code versions", () => + Effect.gen(function* () { + const status = yield* checkClaudeProviderStatus( + defaultClaudeSettings, + claudeCapabilities(), + ); + assert.strictEqual( + status.models.some((model) => model.slug === "claude-opus-4-7"), + false, ); - assert.strictEqual(cursorProvider?.enabled, false); - assert.strictEqual(cursorProvider?.status, "disabled"); - assert.strictEqual(cursorProvider?.message, "Cursor is disabled in T3 Code settings."); - assert.strictEqual(cursorSpawned, false); - }).pipe(Effect.provide(runtimeServices)); - }), - ); - - it.effect.skip("probes Copilot from its default command when binary path is unset", () => - Effect.gen(function* () { - const serverSettingsLayer = ServerSettingsService.layerTest(); - const providerRegistryLayer = ProviderRegistryLive.pipe( - Layer.provideMerge(serverSettingsLayer), - Layer.provideMerge( - ServerConfig.layerTest(process.cwd(), { - prefix: "t3-provider-registry-", + assert.strictEqual( + status.message, + "Claude Code v2.1.110 is too old for Claude Opus 4.7. Upgrade to v2.1.111 or newer to access it.", + ); + }).pipe( + Effect.provide( + mockSpawnerLayer((args) => { + const joined = args.join(" "); + if (joined === "--version") return { stdout: "2.1.110\n", stderr: "", code: 0 }; + if (joined === "auth status") + return { + stdout: '{"loggedIn":true,"authMethod":"claude.ai"}\n', + stderr: "", + code: 0, + }; + throw new Error(`Unexpected args: ${joined}`); }), ), - Layer.provideMerge( - mockCommandSpawnerLayer((command, args) => { + ), + ); + + it.effect("returns a display label for claude subscription types", () => + Effect.gen(function* () { + const status = yield* checkClaudeProviderStatus( + defaultClaudeSettings, + claudeCapabilities({ subscriptionType: "maxplan" }), + ); + assert.strictEqual(status.status, "ready"); + assert.strictEqual(status.auth.status, "authenticated"); + assert.strictEqual(status.auth.type, "maxplan"); + assert.strictEqual(status.auth.label, "Claude Max Subscription"); + }).pipe( + Effect.provide( + mockSpawnerLayer((args) => { const joined = args.join(" "); - if (joined === "--version") { - if (command === "codex") { - return { stdout: "codex 1.0.0\n", stderr: "", code: 0 }; - } - if (command === "claude") { - return { stdout: "claude 1.0.0\n", stderr: "", code: 0 }; - } - if (command === "copilot") { - return { stdout: "copilot 2.3.4\n", stderr: "", code: 0 }; - } - return { stdout: "", stderr: "spawn ENOENT", code: 1 }; - } - if (joined === "login status") { - return { stdout: "Logged in\n", stderr: "", code: 0 }; - } - if (joined === "auth status") { - return { stdout: "Authenticated\n", stderr: "", code: 0 }; - } - throw new Error(`Unexpected command: ${command} ${joined}`); + if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; + if (joined === "auth status") + return { + stdout: '{"loggedIn":true,"authMethod":"claude.ai"}\n', + stderr: "", + code: 0, + }; + throw new Error(`Unexpected args: ${joined}`); }), ), - ); - - const providers = yield* Effect.gen(function* () { - const registry = yield* ProviderRegistry; - return yield* registry.getProviders; - }).pipe(Effect.provide(providerRegistryLayer)); - - const copilot = providers.find((provider) => provider.provider === "copilot"); - assert.isDefined(copilot); - assert.strictEqual(copilot?.status, "ready"); - assert.strictEqual(copilot?.installed, true); - assert.notStrictEqual( - copilot?.message, - "Copilot is enabled, but no binary path is configured for probing.", - ); - }), - ); - - it.effect.skip("reports cursor as unavailable when its CLI command is missing", () => - Effect.gen(function* () { - const serverSettingsLayer = ServerSettingsService.layerTest({ - providers: { - cursor: { - enabled: true, - binaryPath: "/tmp/t3-missing-cursor-cli", - }, - }, - }); - const providerRegistryLayer = ProviderRegistryLive.pipe( - Layer.provideMerge(serverSettingsLayer), - Layer.provideMerge( - ServerConfig.layerTest(process.cwd(), { - prefix: "t3-provider-registry-", + ), + ); + + it.effect("does not duplicate Claude in full subscription labels", () => + Effect.gen(function* () { + const status = yield* checkClaudeProviderStatus( + defaultClaudeSettings, + claudeCapabilities({ + subscriptionType: "Claude Max Subscription", }), - ), - Layer.provideMerge( - mockCommandSpawnerLayer((command, args) => { + ); + assert.strictEqual(status.auth.status, "authenticated"); + assert.strictEqual(status.auth.type, "Claude Max Subscription"); + assert.strictEqual(status.auth.label, "Claude Max Subscription"); + }).pipe( + Effect.provide( + mockSpawnerLayer((args) => { const joined = args.join(" "); - if (joined === "--version") { - if (command === "codex") { - return { stdout: "codex 1.0.0\n", stderr: "", code: 0 }; - } - if (command === "claude") { - return { stdout: "claude 1.0.0\n", stderr: "", code: 0 }; - } - return { stdout: "", stderr: "spawn ENOENT", code: 1 }; - } - if (joined === "login status") { - return { stdout: "Logged in\n", stderr: "", code: 0 }; - } - if (joined === "auth status") { - return { stdout: "Authenticated\n", stderr: "", code: 0 }; - } - throw new Error(`Unexpected command: ${command} ${joined}`); + if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; + throw new Error(`Unexpected args: ${joined}`); }), ), - ); - - const providers = yield* Effect.gen(function* () { - const registry = yield* ProviderRegistry; - return yield* registry.getProviders; - }).pipe(Effect.provide(providerRegistryLayer)); - - const cursor = providers.find((provider) => provider.provider === "cursor"); - assert.isDefined(cursor); - assert.strictEqual(cursor?.status, "warning"); - assert.strictEqual(cursor?.installed, false); - assert.strictEqual(cursor?.message, "Cursor CLI not found on PATH."); - }), - ); - - it.effect("serves cached provider snapshots from getProviders without re-probing", () => - Effect.gen(function* () { - let probeCount = 0; - const providerRegistryLayer = ProviderRegistryLive.pipe( - Layer.provideMerge(ServerSettingsService.layerTest()), - Layer.provideMerge( - ServerConfig.layerTest(process.cwd(), { - prefix: "t3-provider-registry-", + ), + ); + + it.effect("does not duplicate Claude in provider-prefixed subscription names", () => + Effect.gen(function* () { + const status = yield* checkClaudeProviderStatus( + defaultClaudeSettings, + claudeCapabilities({ + subscriptionType: "Claude Max", }), - ), - Layer.provideMerge( - mockCommandSpawnerLayer((command, args) => { - probeCount += 1; + ); + assert.strictEqual(status.auth.status, "authenticated"); + assert.strictEqual(status.auth.type, "Claude Max"); + assert.strictEqual(status.auth.label, "Claude Max Subscription"); + }).pipe( + Effect.provide( + mockSpawnerLayer((args) => { const joined = args.join(" "); - if (joined === "--version") { - if (command === "codex") { - return { stdout: "codex 1.0.0\n", stderr: "", code: 0 }; - } - if (command === "claude") { - return { stdout: "claude 1.0.0\n", stderr: "", code: 0 }; - } - return { stdout: "", stderr: "spawn ENOENT", code: 1 }; - } - if (joined === "login status") { - return { stdout: "Logged in\n", stderr: "", code: 0 }; - } - if (joined === "auth status") { - return { stdout: "Authenticated\n", stderr: "", code: 0 }; - } - throw new Error(`Unexpected command: ${command} ${joined}`); + if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; + throw new Error(`Unexpected args: ${joined}`); }), ), - ); - - yield* Effect.gen(function* () { - const registry = yield* ProviderRegistry; - yield* registry.getProviders; - const initialProbeCount = probeCount; - yield* registry.getProviders; - assert.strictEqual(probeCount, initialProbeCount); - }).pipe(Effect.provide(providerRegistryLayer)); - }), - ); - - it.effect("skips codex probes entirely when the provider is disabled", () => - Effect.gen(function* () { - const serverSettingsLayer = ServerSettingsService.layerTest({ - providers: { - codex: { - enabled: false, - }, - }, - }); + ), + ); - const status = yield* checkCodexProviderStatus().pipe( + it.effect("returns claude auth email from initialization result", () => + Effect.gen(function* () { + const status = yield* checkClaudeProviderStatus( + defaultClaudeSettings, + claudeCapabilities({ email: "claude@example.com" }), + ); + assert.strictEqual(status.auth.status, "authenticated"); + assert.strictEqual(status.auth.email, "claude@example.com"); + }).pipe( Effect.provide( - Layer.mergeAll(serverSettingsLayer, failingSpawnerLayer("spawn codex ENOENT")), + mockSpawnerLayer((args) => { + const joined = args.join(" "); + if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; + if (joined === "auth status") + return { + stdout: + '{"loggedIn":true,"authMethod":"claude.ai","account":{"email":"claude@example.com"}}\n', + stderr: "", + code: 0, + }; + throw new Error(`Unexpected args: ${joined}`); + }), ), - ); - assert.strictEqual(status.provider, "codex"); - assert.strictEqual(status.enabled, false); - assert.strictEqual(status.status, "disabled"); - assert.strictEqual(status.installed, false); - assert.strictEqual(status.message, "Codex is disabled in T3 Code settings."); - }), - ); - }); - - // ── checkClaudeProviderStatus tests ────────────────────────── - - describe("checkClaudeProviderStatus", () => { - it.effect("returns ready when claude is installed and authenticated", () => - Effect.gen(function* () { - const status = yield* checkClaudeProviderStatus(); - assert.strictEqual(status.provider, "claudeAgent"); - assert.strictEqual(status.status, "ready"); - assert.strictEqual(status.installed, true); - assert.strictEqual(status.auth.status, "authenticated"); - }).pipe( - Effect.provide( - mockSpawnerLayer((args) => { - const joined = args.join(" "); - if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; - if (joined === "auth status") - return { - stdout: '{"loggedIn":true,"authMethod":"claude.ai"}\n', - stderr: "", - code: 0, - }; - throw new Error(`Unexpected args: ${joined}`); - }), ), - ), - ); + ); + + it.effect("runs Claude status probes with the configured Claude HOME", () => { + const claudeHome = "/tmp/t3code-claude-home"; + const recorded = recordingMockSpawnerLayer((args) => { + const joined = args.join(" "); + if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; + if (joined === "auth status") + return { + stdout: '{"loggedIn":true,"authMethod":"claude.ai"}\n', + stderr: "", + code: 0, + }; + throw new Error(`Unexpected args: ${joined}`); + }); - it.effect( - "includes Claude Opus 4.7 with xhigh as the default effort on supported versions", - () => - Effect.gen(function* () { - const status = yield* checkClaudeProviderStatus(); - const opus47 = status.models.find((model) => model.slug === "claude-opus-4-7"); - if (!opus47) { - assert.fail("Expected Claude Opus 4.7 to be present for Claude Code v2.1.111."); - } - if (!opus47.capabilities) { - assert.fail( - "Expected Claude Opus 4.7 capabilities to be present for Claude Code v2.1.111.", - ); - } + return Effect.gen(function* () { + const status = yield* checkClaudeProviderStatus( + { + ...defaultClaudeSettings, + homePath: claudeHome, + }, + claudeCapabilities(), + ); + assert.strictEqual(status.status, "ready"); assert.deepStrictEqual( - opus47.capabilities.reasoningEffortLevels.find((level) => level.isDefault), - { value: "xhigh", label: "Extra High", isDefault: true }, + recorded.commands.map((command) => command.env?.HOME), + [claudeHome], + ); + }).pipe(Effect.provide(recorded.layer)); + }); + + it.effect("includes probed claude slash commands in the provider snapshot", () => + Effect.gen(function* () { + const status = yield* checkClaudeProviderStatus( + defaultClaudeSettings, + claudeCapabilities({ + subscriptionType: "maxplan", + slashCommands: [ + { + name: "review", + description: "Review a pull request", + input: { hint: "pr-or-branch" }, + }, + ], + }), ); + + assert.deepStrictEqual(status.slashCommands, [ + { + name: "review", + description: "Review a pull request", + input: { hint: "pr-or-branch" }, + }, + ]); }).pipe( Effect.provide( mockSpawnerLayer((args) => { const joined = args.join(" "); - if (joined === "--version") return { stdout: "2.1.111\n", stderr: "", code: 0 }; + if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; if (joined === "auth status") return { stdout: '{"loggedIn":true,"authMethod":"claude.ai"}\n', @@ -793,306 +1324,147 @@ it.layer( }), ), ), - ); - - it.effect("hides Claude Opus 4.7 on older Claude Code versions", () => - Effect.gen(function* () { - const status = yield* checkClaudeProviderStatus(); - assert.strictEqual( - status.models.some((model) => model.slug === "claude-opus-4-7"), - false, - ); - assert.strictEqual( - status.message, - "Claude Code v2.1.110 is too old for Claude Opus 4.7. Upgrade to v2.1.111 or newer to access it.", - ); - }).pipe( - Effect.provide( - mockSpawnerLayer((args) => { - const joined = args.join(" "); - if (joined === "--version") return { stdout: "2.1.110\n", stderr: "", code: 0 }; - if (joined === "auth status") - return { - stdout: '{"loggedIn":true,"authMethod":"claude.ai"}\n', - stderr: "", - code: 0, - }; - throw new Error(`Unexpected args: ${joined}`); - }), - ), - ), - ); - - it.effect("returns a display label for claude subscription types", () => - Effect.gen(function* () { - const status = yield* checkClaudeProviderStatus(() => Effect.succeed("maxplan")); - assert.strictEqual(status.provider, "claudeAgent"); - assert.strictEqual(status.status, "ready"); - assert.strictEqual(status.auth.status, "authenticated"); - assert.strictEqual(status.auth.type, "maxplan"); - assert.strictEqual(status.auth.label, "Claude Max Subscription"); - }).pipe( - Effect.provide( - mockSpawnerLayer((args) => { - const joined = args.join(" "); - if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; - if (joined === "auth status") - return { - stdout: '{"loggedIn":true,"authMethod":"claude.ai"}\n', - stderr: "", - code: 0, - }; - throw new Error(`Unexpected args: ${joined}`); - }), - ), - ), - ); - - it.effect("includes probed claude slash commands in the provider snapshot", () => - Effect.gen(function* () { - const status = yield* checkClaudeProviderStatus( - () => Effect.succeed("maxplan"), - () => - Effect.succeed([ - { - name: "review", - description: "Review a pull request", - input: { hint: "pr-or-branch" }, - }, - ]), - ); + ); - assert.deepStrictEqual(status.slashCommands, [ - { - name: "review", - description: "Review a pull request", - input: { hint: "pr-or-branch" }, - }, - ]); - }).pipe( - Effect.provide( - mockSpawnerLayer((args) => { - const joined = args.join(" "); - if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; - if (joined === "auth status") - return { - stdout: '{"loggedIn":true,"authMethod":"claude.ai"}\n', - stderr: "", - code: 0, - }; - throw new Error(`Unexpected args: ${joined}`); - }), - ), - ), - ); - - it.effect("deduplicates probed claude slash commands by name", () => - Effect.gen(function* () { - const status = yield* checkClaudeProviderStatus( - () => Effect.succeed("maxplan"), - () => - Effect.succeed([ - { - name: "ui", - description: "Explore and refine UI", - }, - { - name: "ui", - input: { hint: "component-or-screen" }, - }, - ]), - ); + it.effect("deduplicates probed claude slash commands by name", () => + Effect.gen(function* () { + const status = yield* checkClaudeProviderStatus( + defaultClaudeSettings, + claudeCapabilities({ + subscriptionType: "maxplan", + slashCommands: [ + { + name: "ui", + description: "Explore and refine UI", + }, + { + name: "ui", + input: { hint: "component-or-screen" }, + }, + ], + }), + ); - assert.deepStrictEqual(status.slashCommands, [ - { - name: "ui", - description: "Explore and refine UI", - input: { hint: "component-or-screen" }, - }, - ]); - }).pipe( - Effect.provide( - mockSpawnerLayer((args) => { - const joined = args.join(" "); - if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; - if (joined === "auth status") - return { - stdout: '{"loggedIn":true,"authMethod":"claude.ai"}\n', - stderr: "", - code: 0, - }; - throw new Error(`Unexpected args: ${joined}`); - }), - ), - ), - ); - - it.effect("returns an api key label for claude api key auth", () => - Effect.gen(function* () { - const status = yield* checkClaudeProviderStatus(); - assert.strictEqual(status.provider, "claudeAgent"); - assert.strictEqual(status.status, "ready"); - assert.strictEqual(status.auth.status, "authenticated"); - assert.strictEqual(status.auth.type, "apiKey"); - assert.strictEqual(status.auth.label, "Claude API Key"); - }).pipe( - Effect.provide( - mockSpawnerLayer((args) => { - const joined = args.join(" "); - if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; - if (joined === "auth status") - return { - stdout: '{"loggedIn":true,"authMethod":"api-key"}\n', - stderr: "", - code: 0, - }; - throw new Error(`Unexpected args: ${joined}`); - }), - ), - ), - ); - - it.effect("returns unavailable when claude is missing", () => - Effect.gen(function* () { - const status = yield* checkClaudeProviderStatus(); - assert.strictEqual(status.provider, "claudeAgent"); - assert.strictEqual(status.status, "error"); - assert.strictEqual(status.installed, false); - assert.strictEqual(status.auth.status, "unknown"); - assert.strictEqual( - status.message, - "Claude Agent CLI (`claude`) is not installed or not on PATH.", - ); - }).pipe(Effect.provide(failingSpawnerLayer("spawn claude ENOENT"))), - ); - - it.effect("returns error when version check fails with non-zero exit code", () => - Effect.gen(function* () { - const status = yield* checkClaudeProviderStatus(); - assert.strictEqual(status.provider, "claudeAgent"); - assert.strictEqual(status.status, "error"); - assert.strictEqual(status.installed, true); - }).pipe( - Effect.provide( - mockSpawnerLayer((args) => { - const joined = args.join(" "); - if (joined === "--version") - return { stdout: "", stderr: "Something went wrong", code: 1 }; - throw new Error(`Unexpected args: ${joined}`); - }), - ), - ), - ); - - it.effect("returns unauthenticated when auth status reports not logged in", () => - Effect.gen(function* () { - const status = yield* checkClaudeProviderStatus(); - assert.strictEqual(status.provider, "claudeAgent"); - assert.strictEqual(status.status, "error"); - assert.strictEqual(status.installed, true); - assert.strictEqual(status.auth.status, "unauthenticated"); - assert.strictEqual( - status.message, - "Claude is not authenticated. Run `claude auth login` and try again.", - ); - }).pipe( - Effect.provide( - mockSpawnerLayer((args) => { - const joined = args.join(" "); - if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; - if (joined === "auth status") - return { - stdout: '{"loggedIn":false}\n', - stderr: "", - code: 1, - }; - throw new Error(`Unexpected args: ${joined}`); - }), - ), - ), - ); - - it.effect("returns unauthenticated when output includes 'not logged in'", () => - Effect.gen(function* () { - const status = yield* checkClaudeProviderStatus(); - assert.strictEqual(status.provider, "claudeAgent"); - assert.strictEqual(status.status, "error"); - assert.strictEqual(status.installed, true); - assert.strictEqual(status.auth.status, "unauthenticated"); - }).pipe( - Effect.provide( - mockSpawnerLayer((args) => { - const joined = args.join(" "); - if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; - if (joined === "auth status") return { stdout: "Not logged in\n", stderr: "", code: 1 }; - throw new Error(`Unexpected args: ${joined}`); - }), - ), - ), - ); - - it.effect("returns warning when auth status command is unsupported", () => - Effect.gen(function* () { - const status = yield* checkClaudeProviderStatus(); - assert.strictEqual(status.provider, "claudeAgent"); - assert.strictEqual(status.status, "warning"); - assert.strictEqual(status.installed, true); - assert.strictEqual(status.auth.status, "unknown"); - assert.strictEqual( - status.message, - "Claude Agent authentication status command is unavailable in this version of Claude.", - ); - }).pipe( - Effect.provide( - mockSpawnerLayer((args) => { - const joined = args.join(" "); - if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; - if (joined === "auth status") - return { stdout: "", stderr: "error: unknown command 'auth'", code: 2 }; - throw new Error(`Unexpected args: ${joined}`); - }), + assert.deepStrictEqual(status.slashCommands, [ + { + name: "ui", + description: "Explore and refine UI", + input: { hint: "component-or-screen" }, + }, + ]); + }).pipe( + Effect.provide( + mockSpawnerLayer((args) => { + const joined = args.join(" "); + if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; + if (joined === "auth status") + return { + stdout: '{"loggedIn":true,"authMethod":"claude.ai"}\n', + stderr: "", + code: 0, + }; + throw new Error(`Unexpected args: ${joined}`); + }), + ), ), - ), - ); - }); - - // ── parseClaudeAuthStatusFromOutput pure tests ──────────────────── + ); - describe("parseClaudeAuthStatusFromOutput", () => { - it("exit code 0 with no auth markers is ready", () => { - const parsed = parseClaudeAuthStatusFromOutput({ stdout: "OK\n", stderr: "", code: 0 }); - assert.strictEqual(parsed.status, "ready"); - assert.strictEqual(parsed.auth.status, "authenticated"); - }); + it.effect("returns an api key label for claude api key auth", () => + Effect.gen(function* () { + const status = yield* checkClaudeProviderStatus( + defaultClaudeSettings, + claudeCapabilities({ tokenSource: "ANTHROPIC_AUTH_TOKEN" }), + ); + assert.strictEqual(status.status, "ready"); + assert.strictEqual(status.auth.status, "authenticated"); + assert.strictEqual(status.auth.type, "apiKey"); + assert.strictEqual(status.auth.label, "Claude API Key"); + }).pipe( + Effect.provide( + mockSpawnerLayer((args) => { + const joined = args.join(" "); + if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; + if (joined === "auth status") + return { + stdout: '{"loggedIn":true,"authMethod":"api-key"}\n', + stderr: "", + code: 0, + }; + throw new Error(`Unexpected args: ${joined}`); + }), + ), + ), + ); - it("JSON with loggedIn=true is authenticated", () => { - const parsed = parseClaudeAuthStatusFromOutput({ - stdout: '{"loggedIn":true,"authMethod":"claude.ai"}\n', - stderr: "", - code: 0, - }); - assert.strictEqual(parsed.status, "ready"); - assert.strictEqual(parsed.auth.status, "authenticated"); - }); + it.effect("returns unavailable when claude is missing", () => + Effect.gen(function* () { + const status = yield* checkClaudeProviderStatus( + defaultClaudeSettings, + claudeCapabilities(), + ); + assert.strictEqual(status.status, "error"); + assert.strictEqual(status.installed, false); + assert.strictEqual(status.auth.status, "unknown"); + assert.strictEqual( + status.message, + "Claude Agent CLI (`claude`) is not installed or not on PATH.", + ); + }).pipe(Effect.provide(failingSpawnerLayer("spawn claude ENOENT"))), + ); - it("JSON with loggedIn=false is unauthenticated", () => { - const parsed = parseClaudeAuthStatusFromOutput({ - stdout: '{"loggedIn":false}\n', - stderr: "", - code: 0, - }); - assert.strictEqual(parsed.status, "error"); - assert.strictEqual(parsed.auth.status, "unauthenticated"); - }); + it.effect("returns error when version check fails with non-zero exit code", () => + Effect.gen(function* () { + const status = yield* checkClaudeProviderStatus( + defaultClaudeSettings, + claudeCapabilities(), + ); + assert.strictEqual(status.status, "error"); + assert.strictEqual(status.installed, true); + }).pipe( + Effect.provide( + mockSpawnerLayer((args) => { + const joined = args.join(" "); + if (joined === "--version") + return { + stdout: "", + stderr: "Something went wrong", + code: 1, + }; + throw new Error(`Unexpected args: ${joined}`); + }), + ), + ), + ); - it("JSON without auth marker is warning", () => { - const parsed = parseClaudeAuthStatusFromOutput({ - stdout: '{"ok":true}\n', - stderr: "", - code: 0, - }); - assert.strictEqual(parsed.status, "warning"); - assert.strictEqual(parsed.auth.status, "unknown"); + it.effect("returns warning when the Claude initialization result is unavailable", () => + Effect.gen(function* () { + const status = yield* checkClaudeProviderStatus( + defaultClaudeSettings, + noClaudeCapabilities, + ); + assert.strictEqual(status.status, "warning"); + assert.strictEqual(status.installed, true); + assert.strictEqual(status.auth.status, "unknown"); + assert.strictEqual( + status.message, + "Could not verify Claude authentication status from initialization result.", + ); + }).pipe( + Effect.provide( + mockSpawnerLayer((args) => { + const joined = args.join(" "); + if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; + if (joined === "auth status") + return { + stdout: '{"loggedIn":false}\n', + stderr: "", + code: 1, + }; + throw new Error(`Unexpected args: ${joined}`); + }), + ), + ), + ); }); - }); -}); + }, +); diff --git a/apps/server/src/provider/Layers/ProviderRegistry.ts b/apps/server/src/provider/Layers/ProviderRegistry.ts index b1d20b4d603..4f586d881e3 100644 --- a/apps/server/src/provider/Layers/ProviderRegistry.ts +++ b/apps/server/src/provider/Layers/ProviderRegistry.ts @@ -1,51 +1,66 @@ /** - * ProviderRegistryLive - Aggregates provider-specific snapshot services. + * ProviderRegistryLive — aggregates per-instance snapshot streams into a + * single materialized list. + * + * Historically this Layer composed four per-kind Live Layers + * (`CodexProviderLive`, `ClaudeProviderLive`, …) that each exposed a + * `ServerProviderShape`. Those Lives were deleted during the driver / + * instance refactor — every driver now carries its `snapshot: ServerProviderShape` + * bundled onto the `ProviderInstance` the registry produces. + * + * Each configured instance (including multi-instance setups like + * `codex_personal` + `codex_work`) contributes one `ProviderSnapshotSource`, + * keyed by `instanceId`. Instances whose driver is unavailable or whose + * config failed to decode are merged from `instanceRegistry.listUnavailable` + * as shadow snapshots so the UI can render their exact unavailable reason. + * + * Cache paths on disk are now keyed by `instanceId`. Because + * `defaultInstanceIdForDriver(kind) === kind` for built-in kinds, existing + * `.json` files remain the on-disk location for that driver's default + * instance. Identity-less legacy cache contents are ignored and replaced by + * the first live refresh. * * @module ProviderRegistryLive */ -import type { ProviderKind, ServerProvider } from "@t3tools/contracts"; -import { Effect, Equal, FileSystem, Layer, Path, PubSub, Ref, Stream } from "effect"; +import { + defaultInstanceIdForDriver, + ProviderDriverKind, + type ProviderInstanceId, + type ServerProvider, +} from "@t3tools/contracts"; +import { Cause, Effect, Equal, FileSystem, Layer, Path, PubSub, Ref, Stream } from "effect"; +import * as Semaphore from "effect/Semaphore"; import { ServerConfig } from "../../config.ts"; -import { ClaudeProviderLive } from "./ClaudeProvider.ts"; -import { CodexProviderLive } from "./CodexProvider.ts"; -import { CursorProviderLive } from "./CursorProvider.ts"; -import { OpenCodeProviderLive } from "./OpenCodeProvider.ts"; -import { ClaudeProvider } from "../Services/ClaudeProvider.ts"; -import { CodexProvider } from "../Services/CodexProvider.ts"; -import { CursorProvider } from "../Services/CursorProvider.ts"; -import { OpenCodeProvider } from "../Services/OpenCodeProvider.ts"; +import { ProviderInstanceRegistry } from "../Services/ProviderInstanceRegistry.ts"; import { ProviderRegistry, type ProviderRegistryShape } from "../Services/ProviderRegistry.ts"; -import { OpenCodeRuntimeLive } from "../opencodeRuntime.ts"; import { hydrateCachedProvider, - PROVIDER_CACHE_IDS, + isCachedProviderCorrelated, orderProviderSnapshots, readProviderStatusCache, resolveProviderStatusCachePath, writeProviderStatusCache, } from "../providerStatusCache.ts"; - -type ProviderSnapshotSource = { - readonly provider: ProviderKind; - readonly getSnapshot: Effect.Effect; - readonly refresh: Effect.Effect; - readonly streamChanges: Stream.Stream; -}; +import type { ProviderInstance } from "../ProviderDriver.ts"; +import type { ProviderSnapshotSource } from "../builtInProviderCatalog.ts"; const loadProviders = ( providerSources: ReadonlyArray, ): Effect.Effect> => - Effect.forEach(providerSources, (providerSource) => providerSource.getSnapshot, { - concurrency: "unbounded", - }); + Effect.forEach( + providerSources, + (providerSource) => + providerSource.getSnapshot.pipe( + Effect.flatMap((snapshot) => correlateSnapshotWithSource(providerSource, snapshot)), + ), + { + concurrency: "unbounded", + }, + ); const hasModelCapabilities = (model: ServerProvider["models"][number]): boolean => - (model.capabilities?.reasoningEffortLevels.length ?? 0) > 0 || - model.capabilities?.supportsFastMode === true || - model.capabilities?.supportsThinkingToggle === true || - (model.capabilities?.contextWindowOptions.length ?? 0) > 0 || - (model.capabilities?.promptInjectedEffortLevels.length ?? 0) > 0; + (model.capabilities?.optionDescriptors?.length ?? 0) > 0; const mergeProviderModels = ( previousModels: ReadonlyArray, @@ -86,85 +101,123 @@ export const haveProvidersChanged = ( nextProviders: ReadonlyArray, ): boolean => !Equal.equals(previousProviders, nextProviders); -const ProviderRegistryLiveBase = Layer.effect( +const correlateSnapshotWithSource = ( + source: ProviderSnapshotSource, + snapshot: ServerProvider, +): Effect.Effect => { + if (snapshot.instanceId !== source.instanceId) { + return Effect.die( + new Error( + `Provider snapshot instance mismatch: source '${source.instanceId}' emitted '${snapshot.instanceId}'.`, + ), + ); + } + if (snapshot.driver !== source.driverKind) { + return Effect.die( + new Error( + `Provider snapshot driver mismatch for instance '${source.instanceId}': source '${source.driverKind}' emitted '${snapshot.driver}'.`, + ), + ); + } + return Effect.succeed(snapshot); +}; + +/** + * Key a snapshot for aggregation and persistence. Snapshot sources + * must be correlated by instance id before reaching this map; missing + * identities are defects, not runtime routing fallbacks. + */ +const snapshotInstanceKey = (provider: ServerProvider): ProviderInstanceId => { + return provider.instanceId; +}; + +// Project a live `ProviderInstance` into the aggregator's consumption +// shape. Each call re-captures the instance's `snapshot` closures, so +// after `ProviderInstanceRegistry` rebuilds an instance (e.g. because +// its settings changed), a fresh source rides the new PubSub instead +// of a closed one. +const buildSnapshotSource = (instance: ProviderInstance): ProviderSnapshotSource => ({ + instanceId: instance.instanceId, + driverKind: instance.driverKind, + getSnapshot: instance.snapshot.getSnapshot, + refresh: instance.snapshot.refresh, + streamChanges: instance.snapshot.streamChanges, +}); + +export const ProviderRegistryLive = Layer.effect( ProviderRegistry, Effect.gen(function* () { - const codexProvider = yield* CodexProvider; - const claudeProvider = yield* ClaudeProvider; - const openCodeProvider = yield* OpenCodeProvider; - const cursorProvider = yield* CursorProvider; + const instanceRegistry = yield* ProviderInstanceRegistry; const config = yield* ServerConfig; const fileSystem = yield* FileSystem.FileSystem; const path = yield* Path.Path; - const providerSources = [ - { - provider: "codex", - getSnapshot: codexProvider.getSnapshot, - refresh: codexProvider.refresh, - streamChanges: codexProvider.streamChanges, - }, - { - provider: "claudeAgent", - getSnapshot: claudeProvider.getSnapshot, - refresh: claudeProvider.refresh, - streamChanges: claudeProvider.streamChanges, - }, - { - provider: "opencode", - getSnapshot: openCodeProvider.getSnapshot, - refresh: openCodeProvider.refresh, - streamChanges: openCodeProvider.streamChanges, - }, - { - provider: "cursor", - getSnapshot: cursorProvider.getSnapshot, - refresh: cursorProvider.refresh, - streamChanges: cursorProvider.streamChanges, - }, - ] satisfies ReadonlyArray; - const activeProviders = PROVIDER_CACHE_IDS; + // Aggregator PubSub — consumers (WS gateway, etc.) subscribe here for + // coalesced updates across every instance. const changesPubSub = yield* Effect.acquireRelease( PubSub.unbounded>(), PubSub.shutdown, ); - const fallbackProviders = yield* loadProviders(providerSources); - const cachePathByProvider = new Map( - activeProviders.map( - (provider) => - [ - provider, - resolveProviderStatusCachePath({ - cacheDir: config.providerStatusCacheDir, - provider, - }), - ] as const, - ), - ); - const fallbackByProvider = new Map( - fallbackProviders.map((provider) => [provider.provider, provider] as const), - ); + + // Boot-only: hydrate `providersRef` from the on-disk per-instance + // cache so the UI has something to render during the first refresh. + // Instances added post-boot skip this path; their first entry in + // `providersRef` comes from the reactive `syncLiveSources` pass + // below. + const bootInstances = yield* instanceRegistry.listInstances; + const bootSources = bootInstances.map(buildSnapshotSource); + const fallbackProviders = yield* loadProviders(bootSources); + const fallbackByInstance = new Map(); + for (let index = 0; index < fallbackProviders.length; index++) { + const provider = fallbackProviders[index]; + const source = bootSources[index]; + if (provider === undefined || source === undefined) { + continue; + } + fallbackByInstance.set(source.instanceId, provider); + } const cachedProviders = yield* Effect.forEach( - activeProviders, - (provider) => { - const filePath = cachePathByProvider.get(provider); - const fallbackProvider = fallbackByProvider.get(provider); - if (!filePath || !fallbackProvider) { - return Effect.succeed(undefined); - } - return readProviderStatusCache(filePath).pipe( - Effect.provideService(FileSystem.FileSystem, fileSystem), - Effect.map((cachedProvider) => - cachedProvider === undefined - ? undefined - : hydrateCachedProvider({ - cachedProvider, - fallbackProvider, - }), - ), - ); - }, + bootSources, + (source) => + Effect.gen(function* () { + // One cache file per configured instance. For the default + // instance of a built-in kind the path equals `.json` — + // identical to the legacy filename. We still require the cache + // payload to carry matching instance id + driver kind; old + // identity-less payloads are discarded and the awaited refresh + // below repopulates the cache. + const filePath = yield* resolveProviderStatusCachePath({ + cacheDir: config.providerStatusCacheDir, + instanceId: source.instanceId, + }).pipe(Effect.provideService(Path.Path, path)); + const fallbackProvider = fallbackByInstance.get(source.instanceId); + if (fallbackProvider === undefined) { + return undefined; + } + return yield* readProviderStatusCache(filePath).pipe( + Effect.provideService(FileSystem.FileSystem, fileSystem), + Effect.flatMap((cachedProvider) => { + if (cachedProvider === undefined) { + return Effect.void.pipe(Effect.as(undefined as ServerProvider | undefined)); + } + const correlation = { + cachedProvider, + fallbackProvider, + } as const; + if (!isCachedProviderCorrelated(correlation)) { + return Effect.logWarning("provider status cache identity mismatch, ignoring", { + path: filePath, + instanceId: source.instanceId, + cachedInstanceId: cachedProvider.instanceId ?? null, + driver: source.driverKind, + cachedDriver: cachedProvider.driver ?? null, + }).pipe(Effect.as(undefined as ServerProvider | undefined)); + } + return Effect.succeed(hydrateCachedProvider(correlation)); + }), + ); + }), { concurrency: "unbounded" }, ).pipe( Effect.map((providers) => @@ -175,39 +228,64 @@ const ProviderRegistryLiveBase = Layer.effect( ); const providersRef = yield* Ref.make>(cachedProviders); - const persistProvider = (provider: ServerProvider) => { - const filePath = cachePathByProvider.get( - provider.provider as (typeof PROVIDER_CACHE_IDS)[number], - ); - if (!filePath) return Effect.void; - return writeProviderStatusCache({ - filePath, - provider, - }).pipe( - Effect.provideService(FileSystem.FileSystem, fileSystem), - Effect.provideService(Path.Path, path), - Effect.tapError(Effect.logError), - Effect.ignore, - ); - }; + // Live-source registry — the dynamic counterpart to the boot-time + // `bootSources`. Keyed by `instanceId`; the stored `ProviderInstance` + // reference is used for identity equality so "no-op" reconciles + // (settings unchanged) skip re-subscribing + re-probing. + const liveSubsRef = yield* Ref.make>( + new Map(), + ); + // Serialize `syncLiveSources` so a rapid burst of reconciles doesn't + // interleave two passes clobbering each other's fiber bookkeeping. + const syncSemaphore = yield* Semaphore.make(1); + + const getLiveSources: Effect.Effect> = Ref.get( + liveSubsRef, + ).pipe(Effect.map((map) => Array.from(map.values(), buildSnapshotSource))); + + const persistProvider = (provider: ServerProvider) => + Effect.gen(function* () { + // Persist every instance — the file name is the instance id, so + // multi-instance setups (e.g. `codex_personal`, `codex_work`) each + // get their own cache. We resolve the path fresh so snapshots + // produced by newly-added instances post-boot still land on disk + // without the aggregator holding a stale `cachePathByInstance` + // entry. + const key = snapshotInstanceKey(provider); + const filePath = yield* resolveProviderStatusCachePath({ + cacheDir: config.providerStatusCacheDir, + instanceId: key, + }).pipe(Effect.provideService(Path.Path, path)); + yield* writeProviderStatusCache({ filePath, provider }).pipe( + Effect.provideService(FileSystem.FileSystem, fileSystem), + Effect.provideService(Path.Path, path), + Effect.tapError(Effect.logError), + Effect.ignore, + ); + }); const upsertProviders = Effect.fn("upsertProviders")(function* ( nextProviders: ReadonlyArray, options?: { readonly publish?: boolean; + readonly persist?: boolean; + readonly replace?: boolean; }, ) { const [previousProviders, providers] = yield* Ref.modify( providersRef, (previousProviders) => { const mergedProviders = new Map( - previousProviders.map((provider) => [provider.provider, provider] as const), + previousProviders.map((provider) => [snapshotInstanceKey(provider), provider] as const), ); for (const provider of nextProviders) { + const key = snapshotInstanceKey(provider); mergedProviders.set( - provider.provider, - mergeProviderSnapshot(mergedProviders.get(provider.provider), provider), + key, + options?.replace === true + ? provider + : mergeProviderSnapshot(mergedProviders.get(key), provider), ); } @@ -217,10 +295,12 @@ const ProviderRegistryLiveBase = Layer.effect( ); if (haveProvidersChanged(previousProviders, providers)) { - yield* Effect.forEach(nextProviders, persistProvider, { - concurrency: "unbounded", - discard: true, - }); + if (options?.persist !== false) { + yield* Effect.forEach(nextProviders, persistProvider, { + concurrency: "unbounded", + discard: true, + }); + } if (options?.publish !== false) { yield* PubSub.publish(changesPubSub, providers); } @@ -238,64 +318,246 @@ const ProviderRegistryLiveBase = Layer.effect( return yield* upsertProviders([provider], options); }); - const refresh = Effect.fn("refresh")(function* (provider?: ProviderKind) { - if (provider) { - const providerSource = providerSources.find((candidate) => candidate.provider === provider); - if (!providerSource) { - return yield* Ref.get(providersRef); - } - return yield* providerSource.refresh.pipe( - Effect.flatMap((nextProvider) => syncProvider(nextProvider)), - ); - } - - return yield* Effect.forEach( - providerSources, - (providerSource) => providerSource.refresh.pipe(Effect.flatMap(syncProvider)), - { - concurrency: "unbounded", - discard: true, - }, - ).pipe(Effect.andThen(Ref.get(providersRef))); + const refreshOneSource = Effect.fn("refreshOneSource")(function* ( + providerSource: ProviderSnapshotSource, + ) { + return yield* providerSource.refresh.pipe( + Effect.flatMap((nextProvider) => + correlateSnapshotWithSource(providerSource, nextProvider).pipe( + Effect.flatMap(syncProvider), + ), + ), + ); }); - yield* Effect.forEach( - providerSources, - (providerSource) => - Stream.runForEach(providerSource.streamChanges, (provider) => syncProvider(provider)).pipe( - Effect.forkScoped, - ), - { + const refreshAll = Effect.fn("refreshAll")(function* () { + const sources = yield* getLiveSources; + return yield* Effect.forEach(sources, (source) => refreshOneSource(source), { concurrency: "unbounded", discard: true, - }, + }).pipe(Effect.andThen(Ref.get(providersRef))); + }); + + const refresh = Effect.fn("refresh")(function* (provider?: ProviderDriverKind) { + if (provider === undefined) { + return yield* refreshAll(); + } + // Kind-scoped refreshes target the default instance for that driver. + const defaultInstanceId = defaultInstanceIdForDriver(provider); + const sources = yield* getLiveSources; + const providerSource = sources.find( + (candidate) => candidate.instanceId === defaultInstanceId, + ); + if (!providerSource) { + return yield* Ref.get(providersRef); + } + return yield* refreshOneSource(providerSource); + }); + + const refreshInstance = Effect.fn("refreshInstance")(function* ( + instanceId: ProviderInstanceId, + ) { + const sources = yield* getLiveSources; + const providerSource = sources.find((candidate) => candidate.instanceId === instanceId); + if (!providerSource) { + return yield* Ref.get(providersRef); + } + return yield* refreshOneSource(providerSource); + }); + + /** + * Diff the aggregator's live-source set against the current + * `ProviderInstanceRegistry` and: + * - subscribe to each newly-added or rebuilt instance's + * `streamChanges` (so periodic + enrichment refreshes land in + * `providersRef`); + * - force-refresh each newly-added/rebuilt instance and feed the + * result directly into `providersRef`, bypassing the PubSub + * attachment race that otherwise drops the initial probe; + * - prune `providersRef` of instances that no longer exist. + * + * Initial refreshes are awaited in parallel rather than forked, so + * callers (layer build; `streamChanges` watcher) see fully-probed + * state on return. This matters for layer build in particular: + * consumers reading `getProviders` immediately after layer build + * expect the probe to have already landed. + * + * Per-instance subscription fibers are not tracked explicitly. When + * a rebuilt instance's old child scope closes, its PubSub shuts + * down and our `Stream.runForEach` fiber exits naturally. + */ + const syncLiveSources = syncSemaphore.withPermits(1)( + Effect.gen(function* () { + const instances = yield* instanceRegistry.listInstances; + const unavailableProviders = yield* instanceRegistry.listUnavailable; + const nextByInstance = new Map( + instances.map((instance) => [instance.instanceId, instance] as const), + ); + const knownInstanceIds = new Set(nextByInstance.keys()); + for (const provider of unavailableProviders) { + knownInstanceIds.add(snapshotInstanceKey(provider)); + } + const previousSubs = yield* Ref.get(liveSubsRef); + + // Carry over subscriptions for instances whose identity is + // unchanged (reconcile treated them as no-op). Instances that + // disappeared, or were rebuilt with a different reference, + // fall through to the "newly-added" branch below. + const carriedOver = new Map(); + for (const [instanceId, previousInstance] of previousSubs) { + const nextInstance = nextByInstance.get(instanceId); + if (nextInstance !== undefined && nextInstance === previousInstance) { + carriedOver.set(instanceId, previousInstance); + } + } + + // Collect new/rebuilt instances in `nextByInstance` insertion + // order (which preserves settings-author order). + const newlyAdded: Array = []; + for (const [instanceId, instance] of nextByInstance) { + if (carriedOver.has(instanceId)) { + continue; + } + newlyAdded.push([instanceId, instance] as const); + } + + // Fork long-lived subscriptions to each new/rebuilt instance's + // change stream BEFORE kicking off refreshes — if the driver's + // own initial probe (line 140 in `makeManagedServerProvider`) + // wins the refreshSemaphore race, its PubSub publish must land + // in an active subscriber or the result is dropped. + for (const [, instance] of newlyAdded) { + const source = buildSnapshotSource(instance); + yield* Stream.runForEach(source.streamChanges, (provider) => + correlateSnapshotWithSource(source, provider).pipe(Effect.flatMap(syncProvider)), + ).pipe(Effect.forkScoped); + } + + // Force-refresh every new/rebuilt instance in parallel and wait + // for them all to complete. The refresh's result is piped + // directly into `syncProvider`, so `providersRef` is populated + // deterministically by the time this block returns — regardless + // of PubSub subscription timing. Failures are logged and + // swallowed so one bad driver can't wedge the whole registry. + yield* Effect.forEach( + newlyAdded, + ([, instance]) => + refreshOneSource(buildSnapshotSource(instance)).pipe(Effect.ignoreCause({ log: true })), + { concurrency: "unbounded", discard: true }, + ); + yield* upsertProviders(unavailableProviders, { + persist: false, + replace: true, + }); + + const nextSubs = new Map(carriedOver); + for (const [instanceId, instance] of newlyAdded) { + nextSubs.set(instanceId, instance); + } + yield* Ref.set(liveSubsRef, nextSubs); + + // Drop aggregator state for instances that have disappeared — + // otherwise the UI would keep rendering ghosts. + const [previousProviders, providers] = yield* Ref.modify( + providersRef, + (previousProviders) => { + const providers = orderProviderSnapshots( + previousProviders.filter((provider) => + knownInstanceIds.has(snapshotInstanceKey(provider)), + ), + ); + return [[previousProviders, providers] as const, providers]; + }, + ); + if (haveProvidersChanged(previousProviders, providers)) { + yield* PubSub.publish(changesPubSub, providers); + } + }), ); - yield* loadProviders(providerSources).pipe( - Effect.flatMap((providers) => upsertProviders(providers, { publish: false })), + const syncLiveSourcesAndContinue = syncLiveSources.pipe( + Effect.catchCause((cause) => { + if (Cause.hasInterruptsOnly(cause)) { + return Effect.interrupt; + } + return Effect.logError( + "provider registry instance sync failed; keeping subscription alive", + { + cause: Cause.pretty(cause), + }, + ); + }), ); + // Seed `providersRef` with the boot-time fallback snapshots so + // consumers calling `getProviders` immediately after layer build see + // a populated list — even before the first `syncLiveSources` refresh + // resolves. Cached snapshots (already in `providersRef`) merge with + // these via `upsertProviders` so on-disk state wins where present + // and pending fallbacks fill the gaps. + yield* upsertProviders(fallbackProviders, { publish: false }); + // Subscribe to registry mutations BEFORE running the initial sync. + // `subscribeChanges` acquires the dequeue synchronously in this + // fibre; the subscription is active the instant this `yield*` + // returns. Forking the consumer loop later cannot lose a publish + // because no publish can reach a not-yet-subscribed dequeue. + // + // (Contrast with the pre-fix code that did + // `Stream.runForEach(instanceRegistry.streamChanges, …).pipe(Effect.forkScoped)`. + // `Stream.fromPubSub` defers `PubSub.subscribe` to stream start, + // and `forkScoped` only schedules the fibre — so a reconcile that + // published between "fibre scheduled" and "fibre starts running" + // was dropped, which made any settings change that replaced an + // instance never propagate to the aggregator's `providersRef`.) + // Subscribe to registry mutations BEFORE running the initial sync. + // `subscribeChanges` acquires the `PubSub.Subscription` synchronously + // in this fibre; the subscription is registered with the PubSub the + // instant this `yield*` returns, so any subsequent publish is + // buffered in the subscription regardless of when the consumer + // fibre below actually starts running. + // + // (Contrast with the pre-fix code that did + // `Stream.runForEach(instanceRegistry.streamChanges, …).pipe(Effect.forkScoped)`. + // `instanceRegistry.streamChanges` is `Stream.fromPubSub(changes)`, + // which defers `PubSub.subscribe` to stream start. `forkScoped` only + // schedules the consumer fibre — so a reconcile that published + // between "fibre scheduled" and "fibre starts running + subscribes" + // was dropped, which made any settings change that replaced an + // instance never propagate to the aggregator's `providersRef`.) + const instanceChanges = yield* instanceRegistry.subscribeChanges; + // Initial sync: subscribe + kick off refreshes for every instance + // present at boot. Run synchronously so consumers pulling immediately + // after the layer build see the correct aggregator state. + yield* syncLiveSources; + // React to registry mutations — instance added / removed / rebuilt. + // `Stream.fromSubscription` builds a stream over the pre-acquired + // subscription rather than subscribing on stream start, which is + // what closes the race. + yield* Stream.runForEach( + Stream.fromSubscription(instanceChanges), + () => syncLiveSourcesAndContinue, + ).pipe(Effect.forkScoped); + + const recoverRefreshFailure = Effect.fn("recoverRefreshFailure")(function* ( + cause: Cause.Cause, + ) { + if (Cause.hasInterruptsOnly(cause)) { + return yield* Effect.interrupt; + } + yield* Effect.logError("provider registry refresh failed; preserving cached providers", { + cause: Cause.pretty(cause), + }); + return yield* Ref.get(providersRef); + }); + return { getProviders: Ref.get(providersRef), - refresh: (provider?: ProviderKind) => - refresh(provider).pipe( - Effect.tapError(Effect.logError), - Effect.orElseSucceed(() => [] as ReadonlyArray), - ), + refresh: (provider?: ProviderDriverKind) => + refresh(provider).pipe(Effect.catchCause(recoverRefreshFailure)), + refreshInstance: (instanceId: ProviderInstanceId) => + refreshInstance(instanceId).pipe(Effect.catchCause(recoverRefreshFailure)), get streamChanges() { return Stream.fromPubSub(changesPubSub); }, } satisfies ProviderRegistryShape; }), ); - -export const ProviderRegistryLive = Layer.unwrap( - Effect.sync(() => - ProviderRegistryLiveBase.pipe( - Layer.provideMerge(CodexProviderLive), - Layer.provideMerge(ClaudeProviderLive), - Layer.provideMerge(CursorProviderLive), - Layer.provideMerge(OpenCodeProviderLive), - Layer.provideMerge(OpenCodeRuntimeLive), - ), - ), -); diff --git a/apps/server/src/provider/Layers/ProviderService.test.ts b/apps/server/src/provider/Layers/ProviderService.test.ts index 9bb40596a06..7e771251437 100644 --- a/apps/server/src/provider/Layers/ProviderService.test.ts +++ b/apps/server/src/provider/Layers/ProviderService.test.ts @@ -12,28 +12,34 @@ import type { import { ApprovalRequestId, EventId, - type ProviderKind, + ProviderDriverKind, + ProviderInstanceId, ProviderSessionStartInput, ThreadId, TurnId, } from "@t3tools/contracts"; +import { createModelSelection } from "@t3tools/shared/model"; import { it, assert, vi } from "@effect/vitest"; -import { Effect, Fiber, Layer, Metric, Option, PubSub, Ref, Stream } from "effect"; +import { Effect, Exit, Fiber, Layer, Metric, Option, PubSub, Ref, Scope, Stream } from "effect"; import * as SqlClient from "effect/unstable/sql/SqlClient"; import { + ProviderAdapterRequestError, ProviderAdapterSessionNotFoundError, ProviderUnsupportedError, ProviderValidationError, type ProviderAdapterError, } from "../Errors.ts"; import type { ProviderAdapterShape } from "../Services/ProviderAdapter.ts"; -import { getProviderCapabilities } from "../Services/ProviderAdapter.ts"; -import { ProviderAdapterRegistry } from "../Services/ProviderAdapterRegistry.ts"; +import { + ProviderAdapterRegistry, + type ProviderAdapterRegistryShape, +} from "../Services/ProviderAdapterRegistry.ts"; import { ProviderService } from "../Services/ProviderService.ts"; import { ProviderSessionDirectory } from "../Services/ProviderSessionDirectory.ts"; import { makeProviderServiceLive } from "./ProviderService.ts"; +import { NoOpProviderEventLoggers, ProviderEventLoggers } from "./ProviderEventLoggers.ts"; import { ProviderSessionDirectoryLive } from "./ProviderSessionDirectory.ts"; import * as NodeServices from "@effect/platform-node/NodeServices"; import { ProviderSessionRuntimeRepositoryLive } from "../../persistence/Layers/ProviderSessionRuntime.ts"; @@ -44,6 +50,7 @@ import { } from "../../persistence/Layers/Sqlite.ts"; import { ServerSettingsService } from "../../serverSettings.ts"; import { AnalyticsService } from "../../telemetry/Services/AnalyticsService.ts"; +import { makeAdapterRegistryMock } from "../testUtils/providerAdapterRegistryMock.ts"; const defaultServerSettingsLayer = ServerSettingsService.layerTest(); @@ -51,11 +58,16 @@ const asRequestId = (value: string): ApprovalRequestId => ApprovalRequestId.make const asEventId = (value: string): EventId => EventId.make(value); const asThreadId = (value: string): ThreadId => ThreadId.make(value); const asTurnId = (value: string): TurnId => TurnId.make(value); +const codexInstanceId = ProviderInstanceId.make("codex"); +const claudeAgentInstanceId = ProviderInstanceId.make("claudeAgent"); +const CODEX_DRIVER = ProviderDriverKind.make("codex"); +const CLAUDE_AGENT_DRIVER = ProviderDriverKind.make("claudeAgent"); +const CURSOR_DRIVER = ProviderDriverKind.make("cursor"); type LegacyProviderRuntimeEvent = { readonly type: string; readonly eventId: EventId; - readonly provider: ProviderKind; + readonly provider: ProviderDriverKind; readonly createdAt: string; readonly threadId: ThreadId; readonly turnId?: string | undefined; @@ -65,7 +77,7 @@ type LegacyProviderRuntimeEvent = { readonly [key: string]: unknown; }; -function makeFakeCodexAdapter(provider: ProviderKind = "codex") { +function makeFakeCodexAdapter(provider: ProviderDriverKind = CODEX_DRIVER) { const sessions = new Map(); const runtimeEventPubSub = Effect.runSync(PubSub.unbounded()); @@ -74,10 +86,15 @@ function makeFakeCodexAdapter(provider: ProviderKind = "codex") { const now = new Date().toISOString(); const session: ProviderSession = { provider, + ...(input.providerInstanceId !== undefined + ? { providerInstanceId: input.providerInstanceId } + : {}), status: "ready", runtimeMode: input.runtimeMode, threadId: input.threadId, - resumeCursor: input.resumeCursor ?? { opaque: `resume-${String(input.threadId)}` }, + resumeCursor: input.resumeCursor ?? { + opaque: `resume-${String(input.threadId)}`, + }, cwd: input.cwd ?? process.cwd(), createdAt: now, updatedAt: now, @@ -177,7 +194,9 @@ function makeFakeCodexAdapter(provider: ProviderKind = "codex") { const adapter: ProviderAdapterShape = { provider, - capabilities: getProviderCapabilities(provider), + capabilities: { + sessionModelSwitch: "in-session", + }, startSession, sendTurn, interruptTurn, @@ -243,19 +262,13 @@ const hasMetricSnapshot = ( function makeProviderServiceLayer() { const codex = makeFakeCodexAdapter(); - const claude = makeFakeCodexAdapter("claudeAgent"); - const cursor = makeFakeCodexAdapter("cursor"); - const registry: typeof ProviderAdapterRegistry.Service = { - getByProvider: (provider) => - provider === "codex" - ? Effect.succeed(codex.adapter) - : provider === "claudeAgent" - ? Effect.succeed(claude.adapter) - : provider === "cursor" - ? Effect.succeed(cursor.adapter) - : Effect.fail(new ProviderUnsupportedError({ provider })), - listProviders: () => Effect.succeed(["codex", "claudeAgent", "cursor"]), - }; + const claude = makeFakeCodexAdapter(CLAUDE_AGENT_DRIVER); + const cursor = makeFakeCodexAdapter(CURSOR_DRIVER); + const registry = makeAdapterRegistryMock({ + [ProviderDriverKind.make("codex")]: codex.adapter, + [ProviderDriverKind.make("claudeAgent")]: claude.adapter, + [ProviderDriverKind.make("cursor")]: cursor.adapter, + }); const providerAdapterLayer = Layer.succeed(ProviderAdapterRegistry, registry); const runtimeRepositoryLayer = ProviderSessionRuntimeRepositoryLive.pipe( @@ -270,6 +283,7 @@ function makeProviderServiceLayer() { Layer.provide(directoryLayer), Layer.provide(defaultServerSettingsLayer), Layer.provideMerge(AnalyticsService.layerTest), + Layer.provide(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), ), directoryLayer, @@ -286,27 +300,76 @@ function makeProviderServiceLayer() { }; } +it.effect("ProviderServiceLive catches stopAll failures during shutdown", () => + Effect.gen(function* () { + const codex = makeFakeCodexAdapter(); + codex.stopAll.mockImplementation(() => + Effect.fail( + new ProviderAdapterRequestError({ + provider: String(CODEX_DRIVER), + method: "stopAll", + detail: "simulated stopAll failure", + }), + ), + ); + const registry = makeAdapterRegistryMock({ + [CODEX_DRIVER]: codex.adapter, + }); + const providerAdapterLayer = Layer.succeed(ProviderAdapterRegistry, registry); + const runtimeRepositoryLayer = ProviderSessionRuntimeRepositoryLive.pipe( + Layer.provide(SqlitePersistenceMemory), + ); + const directoryLayer = ProviderSessionDirectoryLive.pipe(Layer.provide(runtimeRepositoryLayer)); + const providerLayer = Layer.mergeAll( + makeProviderServiceLive().pipe( + Layer.provide(providerAdapterLayer), + Layer.provide(directoryLayer), + Layer.provide(defaultServerSettingsLayer), + Layer.provideMerge(AnalyticsService.layerTest), + Layer.provide(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), + ), + directoryLayer, + runtimeRepositoryLayer, + NodeServices.layer, + ); + const scope = yield* Scope.make(); + const runtimeServices = yield* Layer.build(providerLayer).pipe(Scope.provide(scope)); + + yield* Effect.gen(function* () { + yield* ProviderService; + }).pipe(Effect.provide(runtimeServices)); + const closeExit = yield* Scope.close(scope, Exit.void).pipe(Effect.exit); + + assert.equal(Exit.isSuccess(closeExit), true); + assert.equal(codex.stopAll.mock.calls.length, 1); + }), +); + it.effect("ProviderServiceLive rejects new sessions for disabled providers", () => Effect.gen(function* () { const codex = makeFakeCodexAdapter(); - const claude = makeFakeCodexAdapter("claudeAgent"); - const registry: typeof ProviderAdapterRegistry.Service = { - getByProvider: (provider) => - provider === "codex" - ? Effect.succeed(codex.adapter) - : provider === "claudeAgent" - ? Effect.succeed(claude.adapter) - : Effect.fail(new ProviderUnsupportedError({ provider })), - listProviders: () => Effect.succeed(["codex", "claudeAgent"]), + const claude = makeFakeCodexAdapter(CLAUDE_AGENT_DRIVER); + const registryBase = makeAdapterRegistryMock({ + [CODEX_DRIVER]: codex.adapter, + [CLAUDE_AGENT_DRIVER]: claude.adapter, + }); + const registry: ProviderAdapterRegistryShape = { + ...registryBase, + getInstanceInfo: (instanceId) => + instanceId === claudeAgentInstanceId + ? Effect.succeed({ + instanceId, + driverKind: CLAUDE_AGENT_DRIVER, + displayName: undefined, + enabled: false, + continuationIdentity: { + driverKind: CLAUDE_AGENT_DRIVER, + continuationKey: "claudeAgent:instance:claudeAgent", + }, + }) + : registryBase.getInstanceInfo(instanceId), }; const providerAdapterLayer = Layer.succeed(ProviderAdapterRegistry, registry); - const serverSettingsLayer = ServerSettingsService.layerTest({ - providers: { - claudeAgent: { - enabled: false, - }, - }, - }); const runtimeRepositoryLayer = ProviderSessionRuntimeRepositoryLive.pipe( Layer.provide(SqlitePersistenceMemory), ); @@ -314,15 +377,17 @@ it.effect("ProviderServiceLive rejects new sessions for disabled providers", () const providerLayer = makeProviderServiceLive().pipe( Layer.provide(providerAdapterLayer), Layer.provide(directoryLayer), - Layer.provide(serverSettingsLayer), + Layer.provide(defaultServerSettingsLayer), Layer.provide(AnalyticsService.layerTest), + Layer.provide(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), ); const failure = yield* Effect.flip( Effect.gen(function* () { const provider = yield* ProviderService; return yield* provider.startSession(asThreadId("thread-disabled"), { - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), + providerInstanceId: claudeAgentInstanceId, threadId: asThreadId("thread-disabled"), runtimeMode: "full-access", }); @@ -330,11 +395,149 @@ it.effect("ProviderServiceLive rejects new sessions for disabled providers", () ); assert.instanceOf(failure, ProviderValidationError); - assert.include(failure.issue, "Provider 'claudeAgent' is disabled in T3 Code settings."); + assert.include(failure.issue, "Provider instance 'claudeAgent' is disabled"); assert.equal(claude.startSession.mock.calls.length, 0); }).pipe(Effect.provide(NodeServices.layer)), ); +it.effect( + "ProviderServiceLive allows enabled custom instances when legacy driver is disabled", + () => + Effect.gen(function* () { + const instanceId = ProviderInstanceId.make("codex_personal"); + const driverKind = CODEX_DRIVER; + const codex = makeFakeCodexAdapter(); + const unsupported = () => + new ProviderUnsupportedError({ + provider: driverKind, + }); + const registry: ProviderAdapterRegistryShape = { + getByInstance: (requestedInstanceId) => + requestedInstanceId === instanceId + ? Effect.succeed(codex.adapter) + : Effect.fail(unsupported()), + getInstanceInfo: (requestedInstanceId) => + requestedInstanceId === instanceId + ? Effect.succeed({ + instanceId, + driverKind, + displayName: "Codex Personal", + enabled: true, + continuationIdentity: { + driverKind, + continuationKey: "codex:/Users/example/.codex", + }, + }) + : Effect.fail(unsupported()), + listInstances: () => Effect.succeed([instanceId]), + listProviders: () => Effect.succeed([driverKind] as const), + streamChanges: Stream.empty, + subscribeChanges: Effect.flatMap(PubSub.unbounded(), (pubsub) => + PubSub.subscribe(pubsub), + ), + }; + const providerAdapterLayer = Layer.succeed(ProviderAdapterRegistry, registry); + const serverSettingsLayer = ServerSettingsService.layerTest({ + providers: { + codex: { + enabled: false, + }, + }, + }); + const runtimeRepositoryLayer = ProviderSessionRuntimeRepositoryLive.pipe( + Layer.provide(SqlitePersistenceMemory), + ); + const directoryLayer = ProviderSessionDirectoryLive.pipe( + Layer.provide(runtimeRepositoryLayer), + ); + const providerLayer = makeProviderServiceLive().pipe( + Layer.provide(providerAdapterLayer), + Layer.provide(directoryLayer), + Layer.provide(serverSettingsLayer), + Layer.provide(AnalyticsService.layerTest), + Layer.provide(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), + ); + + const session = yield* Effect.gen(function* () { + const provider = yield* ProviderService; + return yield* provider.startSession(asThreadId("thread-enabled-custom"), { + provider: driverKind, + providerInstanceId: instanceId, + threadId: asThreadId("thread-enabled-custom"), + runtimeMode: "full-access", + }); + }).pipe(Effect.provide(providerLayer)); + + assert.equal(session.providerInstanceId, instanceId); + assert.equal(codex.startSession.mock.calls.length, 1); + }).pipe(Effect.provide(NodeServices.layer)), +); + +it.effect("ProviderServiceLive rejects new sessions for disabled custom instances", () => + Effect.gen(function* () { + const instanceId = ProviderInstanceId.make("codex_personal"); + const driverKind = ProviderDriverKind.make("codex"); + const codex = makeFakeCodexAdapter(); + const unsupported = () => + new ProviderUnsupportedError({ + provider: ProviderDriverKind.make("codex"), + }); + const registry: ProviderAdapterRegistryShape = { + getByInstance: (requestedInstanceId) => + requestedInstanceId === instanceId + ? Effect.succeed(codex.adapter) + : Effect.fail(unsupported()), + getInstanceInfo: (requestedInstanceId) => + requestedInstanceId === instanceId + ? Effect.succeed({ + instanceId, + driverKind, + displayName: "Codex Personal", + enabled: false, + continuationIdentity: { + driverKind, + continuationKey: "codex:/Users/example/.codex", + }, + }) + : Effect.fail(unsupported()), + listInstances: () => Effect.succeed([instanceId]), + listProviders: () => Effect.succeed([CODEX_DRIVER] as const), + streamChanges: Stream.empty, + subscribeChanges: Effect.flatMap(PubSub.unbounded(), (pubsub) => + PubSub.subscribe(pubsub), + ), + }; + const providerAdapterLayer = Layer.succeed(ProviderAdapterRegistry, registry); + const runtimeRepositoryLayer = ProviderSessionRuntimeRepositoryLive.pipe( + Layer.provide(SqlitePersistenceMemory), + ); + const directoryLayer = ProviderSessionDirectoryLive.pipe(Layer.provide(runtimeRepositoryLayer)); + const providerLayer = makeProviderServiceLive().pipe( + Layer.provide(providerAdapterLayer), + Layer.provide(directoryLayer), + Layer.provide(defaultServerSettingsLayer), + Layer.provide(AnalyticsService.layerTest), + Layer.provide(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), + ); + + const failure = yield* Effect.flip( + Effect.gen(function* () { + const provider = yield* ProviderService; + return yield* provider.startSession(asThreadId("thread-disabled-instance"), { + provider: ProviderDriverKind.make("codex"), + providerInstanceId: instanceId, + threadId: asThreadId("thread-disabled-instance"), + runtimeMode: "full-access", + }); + }).pipe(Effect.provide(providerLayer)), + ); + + assert.instanceOf(failure, ProviderValidationError); + assert.include(failure.issue, "Provider instance 'codex_personal' is disabled"); + assert.equal(codex.startSession.mock.calls.length, 0); + }).pipe(Effect.provide(NodeServices.layer)), +); + const routing = makeProviderServiceLayer(); it.effect("ProviderServiceLive writes canonical events to the emitting thread segment", () => @@ -342,13 +545,9 @@ it.effect("ProviderServiceLive writes canonical events to the emitting thread se const codex = makeFakeCodexAdapter(); const canonicalEvents: ProviderRuntimeEvent[] = []; const canonicalThreadIds: Array = []; - const registry: typeof ProviderAdapterRegistry.Service = { - getByProvider: (provider) => - provider === "codex" - ? Effect.succeed(codex.adapter) - : Effect.fail(new ProviderUnsupportedError({ provider })), - listProviders: () => Effect.succeed(["codex"]), - }; + const registry = makeAdapterRegistryMock({ + [ProviderDriverKind.make("codex")]: codex.adapter, + }); const runtimeRepositoryLayer = ProviderSessionRuntimeRepositoryLive.pipe( Layer.provide(SqlitePersistenceMemory), ); @@ -368,6 +567,7 @@ it.effect("ProviderServiceLive writes canonical events to the emitting thread se Layer.provide(directoryLayer), Layer.provide(defaultServerSettingsLayer), Layer.provide(AnalyticsService.layerTest), + Layer.provide(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), ); yield* Effect.gen(function* () { @@ -375,7 +575,7 @@ it.effect("ProviderServiceLive writes canonical events to the emitting thread se yield* sleep(10); codex.emit({ eventId: asEventId("evt-canonical-thread-segment"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: asThreadId("thread-canonical-thread-segment"), createdAt: new Date().toISOString(), type: "turn.completed", @@ -398,13 +598,9 @@ it.effect("ProviderServiceLive keeps persisted resumable sessions on startup", ( const dbPath = path.join(tempDir, "orchestration.sqlite"); const codex = makeFakeCodexAdapter(); - const registry: typeof ProviderAdapterRegistry.Service = { - getByProvider: (provider) => - provider === "codex" - ? Effect.succeed(codex.adapter) - : Effect.fail(new ProviderUnsupportedError({ provider })), - listProviders: () => Effect.succeed(["codex"]), - }; + const registry = makeAdapterRegistryMock({ + [ProviderDriverKind.make("codex")]: codex.adapter, + }); const persistenceLayer = makeSqlitePersistenceLive(dbPath); const runtimeRepositoryLayer = ProviderSessionRuntimeRepositoryLive.pipe( @@ -415,7 +611,8 @@ it.effect("ProviderServiceLive keeps persisted resumable sessions on startup", ( yield* Effect.gen(function* () { const directory = yield* ProviderSessionDirectory; yield* directory.upsert({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, threadId: ThreadId.make("thread-stale"), }); }).pipe(Effect.provide(directoryLayer)); @@ -425,6 +622,7 @@ it.effect("ProviderServiceLive keeps persisted resumable sessions on startup", ( Layer.provide(directoryLayer), Layer.provide(defaultServerSettingsLayer), Layer.provide(AnalyticsService.layerTest), + Layer.provide(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), ); yield* Effect.gen(function* () { @@ -439,7 +637,9 @@ it.effect("ProviderServiceLive keeps persisted resumable sessions on startup", ( const runtime = yield* Effect.gen(function* () { const repository = yield* ProviderSessionRuntimeRepository; - return yield* repository.getByThreadId({ threadId: asThreadId("thread-stale") }); + return yield* repository.getByThreadId({ + threadId: asThreadId("thread-stale"), + }); }).pipe(Effect.provide(runtimeRepositoryLayer)); assert.equal(Option.isSome(runtime), true); @@ -469,13 +669,9 @@ it.effect( ); const firstCodex = makeFakeCodexAdapter(); - const firstRegistry: typeof ProviderAdapterRegistry.Service = { - getByProvider: (provider) => - provider === "codex" - ? Effect.succeed(firstCodex.adapter) - : Effect.fail(new ProviderUnsupportedError({ provider })), - listProviders: () => Effect.succeed(["codex"]), - }; + const firstRegistry = makeAdapterRegistryMock({ + [ProviderDriverKind.make("codex")]: firstCodex.adapter, + }); const firstDirectoryLayer = ProviderSessionDirectoryLive.pipe( Layer.provide(runtimeRepositoryLayer), @@ -485,6 +681,7 @@ it.effect( Layer.provide(firstDirectoryLayer), Layer.provide(defaultServerSettingsLayer), Layer.provide(AnalyticsService.layerTest), + Layer.provide(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), ); const updatedResumeCursor = { threadId: asThreadId("thread-1"), @@ -497,7 +694,8 @@ it.effect( const provider = yield* ProviderService; const threadId = asThreadId("thread-1"); const session = yield* provider.startSession(threadId, { - provider: "codex", + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, cwd: "/tmp/project", runtimeMode: "full-access", threadId, @@ -513,7 +711,9 @@ it.effect( const persistedAfterStopAll = yield* Effect.gen(function* () { const repository = yield* ProviderSessionRuntimeRepository; - return yield* repository.getByThreadId({ threadId: startedSession.threadId }); + return yield* repository.getByThreadId({ + threadId: startedSession.threadId, + }); }).pipe(Effect.provide(runtimeRepositoryLayer)); assert.equal(Option.isSome(persistedAfterStopAll), true); if (Option.isSome(persistedAfterStopAll)) { @@ -522,13 +722,9 @@ it.effect( } const secondCodex = makeFakeCodexAdapter(); - const secondRegistry: typeof ProviderAdapterRegistry.Service = { - getByProvider: (provider) => - provider === "codex" - ? Effect.succeed(secondCodex.adapter) - : Effect.fail(new ProviderUnsupportedError({ provider })), - listProviders: () => Effect.succeed(["codex"]), - }; + const secondRegistry = makeAdapterRegistryMock({ + [ProviderDriverKind.make("codex")]: secondCodex.adapter, + }); const secondDirectoryLayer = ProviderSessionDirectoryLive.pipe( Layer.provide(runtimeRepositoryLayer), ); @@ -537,6 +733,7 @@ it.effect( Layer.provide(secondDirectoryLayer), Layer.provide(defaultServerSettingsLayer), Layer.provide(AnalyticsService.layerTest), + Layer.provide(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), ); secondCodex.startSession.mockClear(); @@ -580,7 +777,8 @@ routing.layer("ProviderServiceLive routing", (it) => { const provider = yield* ProviderService; const session = yield* provider.startSession(asThreadId("thread-1"), { - provider: "codex", + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, threadId: asThreadId("thread-1"), cwd: "/tmp/project", runtimeMode: "full-access", @@ -660,35 +858,13 @@ routing.layer("ProviderServiceLive routing", (it) => { }), ); - it.effect("routes explicit claudeAgent provider session starts to the claude adapter", () => - Effect.gen(function* () { - const provider = yield* ProviderService; - - const session = yield* provider.startSession(asThreadId("thread-claude"), { - provider: "claudeAgent", - threadId: asThreadId("thread-claude"), - cwd: "/tmp/project-claude", - runtimeMode: "full-access", - }); - - assert.equal(session.provider, "claudeAgent"); - assert.equal(routing.claude.startSession.mock.calls.length, 1); - const startInput = routing.claude.startSession.mock.calls[0]?.[0]; - assert.equal(typeof startInput === "object" && startInput !== null, true); - if (startInput && typeof startInput === "object") { - const startPayload = startInput as { provider?: string; cwd?: string }; - assert.equal(startPayload.provider, "claudeAgent"); - assert.equal(startPayload.cwd, "/tmp/project-claude"); - } - }), - ); - it.effect("recovers stale persisted sessions for rollback by resuming thread identity", () => Effect.gen(function* () { const provider = yield* ProviderService; const initial = yield* provider.startSession(asThreadId("thread-1"), { - provider: "codex", + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, threadId: asThreadId("thread-1"), cwd: "/tmp/project", runtimeMode: "full-access", @@ -729,7 +905,8 @@ routing.layer("ProviderServiceLive routing", (it) => { const runtimeRepository = yield* ProviderSessionRuntimeRepository; const initial = yield* provider.startSession(asThreadId("thread-reap-preserve"), { - provider: "codex", + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, threadId: asThreadId("thread-reap-preserve"), cwd: "/tmp/project-reap-preserve", runtimeMode: "full-access", @@ -774,13 +951,74 @@ routing.layer("ProviderServiceLive routing", (it) => { }), ); + it.effect("routes explicit claudeAgent provider session starts to the claude adapter", () => + Effect.gen(function* () { + const provider = yield* ProviderService; + + const session = yield* provider.startSession(asThreadId("thread-claude"), { + provider: ProviderDriverKind.make("claudeAgent"), + providerInstanceId: claudeAgentInstanceId, + threadId: asThreadId("thread-claude"), + cwd: "/tmp/project-claude", + runtimeMode: "full-access", + }); + + assert.equal(session.provider, "claudeAgent"); + assert.equal(routing.claude.startSession.mock.calls.length, 1); + const startInput = routing.claude.startSession.mock.calls[0]?.[0]; + assert.equal(typeof startInput === "object" && startInput !== null, true); + if (startInput && typeof startInput === "object") { + const startPayload = startInput as { + provider?: string; + providerInstanceId?: ProviderInstanceId; + cwd?: string; + }; + assert.equal(startPayload.provider, "claudeAgent"); + assert.equal(startPayload.providerInstanceId, claudeAgentInstanceId); + assert.equal(startPayload.cwd, "/tmp/project-claude"); + } + }), + ); + + it.effect("dies when an active session conflicts with its persisted binding", () => + Effect.gen(function* () { + const provider = yield* ProviderService; + const directory = yield* ProviderSessionDirectory; + const threadId = asThreadId("thread-binding-mismatch"); + + yield* provider.startSession(threadId, { + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, + threadId, + cwd: "/tmp/project-binding-mismatch", + runtimeMode: "full-access", + }); + yield* directory.upsert({ + threadId, + provider: ProviderDriverKind.make("claudeAgent"), + providerInstanceId: claudeAgentInstanceId, + runtimeMode: "full-access", + }); + + const exit = yield* Effect.exit(provider.listSessions()); + assert.equal(Exit.hasDies(exit), true); + yield* directory.upsert({ + threadId, + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, + runtimeMode: "full-access", + }); + }), + ); + it.effect("stops stale sessions in other providers after a successful replacement start", () => Effect.gen(function* () { const provider = yield* ProviderService; const threadId = asThreadId("thread-provider-replacement"); const codexSession = yield* provider.startSession(threadId, { - provider: "codex", + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, threadId, cwd: "/tmp/project-provider-replacement", runtimeMode: "full-access", @@ -790,7 +1028,8 @@ routing.layer("ProviderServiceLive routing", (it) => { routing.claude.stopSession.mockClear(); const claudeSession = yield* provider.startSession(threadId, { - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), + providerInstanceId: claudeAgentInstanceId, threadId, cwd: "/tmp/project-provider-replacement", runtimeMode: "full-access", @@ -816,7 +1055,8 @@ routing.layer("ProviderServiceLive routing", (it) => { const provider = yield* ProviderService; const initial = yield* provider.startSession(asThreadId("thread-1"), { - provider: "codex", + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, threadId: asThreadId("thread-1"), cwd: "/tmp/project-send-turn", runtimeMode: "full-access", @@ -856,16 +1096,15 @@ routing.layer("ProviderServiceLive routing", (it) => { const provider = yield* ProviderService; const initial = yield* provider.startSession(asThreadId("thread-claude-send-turn"), { - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), + providerInstanceId: claudeAgentInstanceId, threadId: asThreadId("thread-claude-send-turn"), cwd: "/tmp/project-claude-send-turn", - modelSelection: { - provider: "claudeAgent", - model: "claude-opus-4-6", - options: { - effort: "max", - }, - }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-opus-4-6", + [{ id: "effort", value: "max" }], + ), runtimeMode: "full-access", }); @@ -892,13 +1131,12 @@ routing.layer("ProviderServiceLive routing", (it) => { }; assert.equal(startPayload.provider, "claudeAgent"); assert.equal(startPayload.cwd, "/tmp/project-claude-send-turn"); - assert.deepEqual(startPayload.modelSelection, { - provider: "claudeAgent", - model: "claude-opus-4-6", - options: { - effort: "max", - }, - }); + assert.deepEqual( + startPayload.modelSelection, + createModelSelection(ProviderInstanceId.make("claudeAgent"), "claude-opus-4-6", [ + { id: "effort", value: "max" }, + ]), + ); assert.deepEqual(startPayload.resumeCursor, initial.resumeCursor); assert.equal(startPayload.threadId, initial.threadId); } @@ -911,12 +1149,14 @@ routing.layer("ProviderServiceLive routing", (it) => { const provider = yield* ProviderService; yield* provider.startSession(asThreadId("thread-1"), { - provider: "codex", + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, threadId: asThreadId("thread-1"), runtimeMode: "full-access", }); yield* provider.startSession(asThreadId("thread-2"), { - provider: "codex", + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, threadId: asThreadId("thread-2"), runtimeMode: "full-access", }); @@ -934,10 +1174,11 @@ routing.layer("ProviderServiceLive routing", (it) => { const provider = yield* ProviderService; const runtimeRepository = yield* ProviderSessionRuntimeRepository; - const session = yield* provider.startSession(asThreadId("thread-1"), { - provider: "codex", - threadId: asThreadId("thread-1"), - cwd: "/tmp/project-send-turn", + const threadId = asThreadId("thread-runtime-status"); + const session = yield* provider.startSession(threadId, { + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, + threadId, runtimeMode: "full-access", }); yield* provider.sendTurn({ @@ -963,7 +1204,7 @@ routing.layer("ProviderServiceLive routing", (it) => { lastError: string | null; lastRuntimeEvent: string | null; }; - assert.equal(runtimePayload.cwd, "/tmp/project-send-turn"); + assert.equal(runtimePayload.cwd, session.cwd); assert.equal(runtimePayload.model, null); assert.equal(runtimePayload.activeTurnId, `turn-${String(session.threadId)}`); assert.equal(runtimePayload.lastError, null); @@ -982,14 +1223,10 @@ routing.layer("ProviderServiceLive routing", (it) => { Layer.provide(persistenceLayer), ); - const firstClaude = makeFakeCodexAdapter("claudeAgent"); - const firstRegistry: typeof ProviderAdapterRegistry.Service = { - getByProvider: (provider) => - provider === "claudeAgent" - ? Effect.succeed(firstClaude.adapter) - : Effect.fail(new ProviderUnsupportedError({ provider })), - listProviders: () => Effect.succeed(["claudeAgent"]), - }; + const firstClaude = makeFakeCodexAdapter(CLAUDE_AGENT_DRIVER); + const firstRegistry = makeAdapterRegistryMock({ + [ProviderDriverKind.make("claudeAgent")]: firstClaude.adapter, + }); const firstDirectoryLayer = ProviderSessionDirectoryLive.pipe( Layer.provide(runtimeRepositoryLayer), ); @@ -998,12 +1235,14 @@ routing.layer("ProviderServiceLive routing", (it) => { Layer.provide(firstDirectoryLayer), Layer.provide(defaultServerSettingsLayer), Layer.provide(AnalyticsService.layerTest), + Layer.provide(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), ); const initial = yield* Effect.gen(function* () { const provider = yield* ProviderService; return yield* provider.startSession(asThreadId("thread-claude-start"), { - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), + providerInstanceId: claudeAgentInstanceId, threadId: asThreadId("thread-claude-start"), cwd: "/tmp/project-claude-start", runtimeMode: "full-access", @@ -1015,14 +1254,10 @@ routing.layer("ProviderServiceLive routing", (it) => { yield* provider.listSessions(); }).pipe(Effect.provide(firstProviderLayer)); - const secondClaude = makeFakeCodexAdapter("claudeAgent"); - const secondRegistry: typeof ProviderAdapterRegistry.Service = { - getByProvider: (provider) => - provider === "claudeAgent" - ? Effect.succeed(secondClaude.adapter) - : Effect.fail(new ProviderUnsupportedError({ provider })), - listProviders: () => Effect.succeed(["claudeAgent"]), - }; + const secondClaude = makeFakeCodexAdapter(CLAUDE_AGENT_DRIVER); + const secondRegistry = makeAdapterRegistryMock({ + [ProviderDriverKind.make("claudeAgent")]: secondClaude.adapter, + }); const secondDirectoryLayer = ProviderSessionDirectoryLive.pipe( Layer.provide(runtimeRepositoryLayer), ); @@ -1031,6 +1266,7 @@ routing.layer("ProviderServiceLive routing", (it) => { Layer.provide(secondDirectoryLayer), Layer.provide(defaultServerSettingsLayer), Layer.provide(AnalyticsService.layerTest), + Layer.provide(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), ); secondClaude.startSession.mockClear(); @@ -1038,7 +1274,8 @@ routing.layer("ProviderServiceLive routing", (it) => { yield* Effect.gen(function* () { const provider = yield* ProviderService; yield* provider.startSession(initial.threadId, { - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), + providerInstanceId: claudeAgentInstanceId, threadId: initial.threadId, cwd: "/tmp/project-claude-start", runtimeMode: "full-access", @@ -1064,6 +1301,90 @@ routing.layer("ProviderServiceLive routing", (it) => { fs.rmSync(tempDir, { recursive: true, force: true }); }).pipe(Effect.provide(NodeServices.layer)), ); + + it.effect( + "reuses persisted cwd when startSession resumes a claude session without cwd input", + () => + Effect.gen(function* () { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "t3-provider-service-cwd-")); + const dbPath = path.join(tempDir, "orchestration.sqlite"); + const persistenceLayer = makeSqlitePersistenceLive(dbPath); + const runtimeRepositoryLayer = ProviderSessionRuntimeRepositoryLive.pipe( + Layer.provide(persistenceLayer), + ); + + const firstClaude = makeFakeCodexAdapter(CLAUDE_AGENT_DRIVER); + const firstRegistry = makeAdapterRegistryMock({ + [ProviderDriverKind.make("claudeAgent")]: firstClaude.adapter, + }); + const firstDirectoryLayer = ProviderSessionDirectoryLive.pipe( + Layer.provide(runtimeRepositoryLayer), + ); + const firstProviderLayer = makeProviderServiceLive().pipe( + Layer.provide(Layer.succeed(ProviderAdapterRegistry, firstRegistry)), + Layer.provide(firstDirectoryLayer), + Layer.provide(defaultServerSettingsLayer), + Layer.provide(AnalyticsService.layerTest), + Layer.provide(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), + ); + + const initial = yield* Effect.gen(function* () { + const provider = yield* ProviderService; + return yield* provider.startSession(asThreadId("thread-claude-cwd"), { + provider: ProviderDriverKind.make("claudeAgent"), + providerInstanceId: claudeAgentInstanceId, + threadId: asThreadId("thread-claude-cwd"), + cwd: "/tmp/project-claude-cwd", + runtimeMode: "full-access", + }); + }).pipe(Effect.provide(firstProviderLayer)); + + const secondClaude = makeFakeCodexAdapter(CLAUDE_AGENT_DRIVER); + const secondRegistry = makeAdapterRegistryMock({ + [ProviderDriverKind.make("claudeAgent")]: secondClaude.adapter, + }); + const secondDirectoryLayer = ProviderSessionDirectoryLive.pipe( + Layer.provide(runtimeRepositoryLayer), + ); + const secondProviderLayer = makeProviderServiceLive().pipe( + Layer.provide(Layer.succeed(ProviderAdapterRegistry, secondRegistry)), + Layer.provide(secondDirectoryLayer), + Layer.provide(defaultServerSettingsLayer), + Layer.provide(AnalyticsService.layerTest), + Layer.provide(Layer.succeed(ProviderEventLoggers, NoOpProviderEventLoggers)), + ); + + secondClaude.startSession.mockClear(); + + yield* Effect.gen(function* () { + const provider = yield* ProviderService; + yield* provider.startSession(initial.threadId, { + provider: ProviderDriverKind.make("claudeAgent"), + providerInstanceId: claudeAgentInstanceId, + threadId: initial.threadId, + runtimeMode: "full-access", + }); + }).pipe(Effect.provide(secondProviderLayer)); + + assert.equal(secondClaude.startSession.mock.calls.length, 1); + const resumedStartInput = secondClaude.startSession.mock.calls[0]?.[0]; + assert.equal(typeof resumedStartInput === "object" && resumedStartInput !== null, true); + if (resumedStartInput && typeof resumedStartInput === "object") { + const startPayload = resumedStartInput as { + provider?: string; + cwd?: string; + resumeCursor?: unknown; + threadId?: string; + }; + assert.equal(startPayload.provider, "claudeAgent"); + assert.equal(startPayload.cwd, "/tmp/project-claude-cwd"); + assert.deepEqual(startPayload.resumeCursor, initial.resumeCursor); + assert.equal(startPayload.threadId, initial.threadId); + } + + fs.rmSync(tempDir, { recursive: true, force: true }); + }).pipe(Effect.provide(NodeServices.layer)), + ); }); const fanout = makeProviderServiceLayer(); @@ -1072,7 +1393,8 @@ fanout.layer("ProviderServiceLive fanout", (it) => { Effect.gen(function* () { const provider = yield* ProviderService; const session = yield* provider.startSession(asThreadId("thread-1"), { - provider: "codex", + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, threadId: asThreadId("thread-1"), runtimeMode: "full-access", }); @@ -1086,7 +1408,7 @@ fanout.layer("ProviderServiceLive fanout", (it) => { const completedEvent: LegacyProviderRuntimeEvent = { type: "turn.completed", eventId: asEventId("evt-1"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: session.threadId, turnId: asTurnId("turn-1"), @@ -1103,6 +1425,13 @@ fanout.layer("ProviderServiceLive fanout", (it) => { events.some((entry) => entry.type === "turn.completed"), true, ); + assert.equal( + events.some( + (entry) => + entry.type === "turn.completed" && entry.providerInstanceId === codexInstanceId, + ), + true, + ); }), ); @@ -1110,7 +1439,8 @@ fanout.layer("ProviderServiceLive fanout", (it) => { Effect.gen(function* () { const provider = yield* ProviderService; const session = yield* provider.startSession(asThreadId("thread-seq"), { - provider: "codex", + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, threadId: asThreadId("thread-seq"), runtimeMode: "full-access", }); @@ -1125,7 +1455,7 @@ fanout.layer("ProviderServiceLive fanout", (it) => { fanout.codex.emit({ type: "tool.started", eventId: asEventId("evt-seq-1"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: session.threadId, turnId: asTurnId("turn-1"), @@ -1135,7 +1465,7 @@ fanout.layer("ProviderServiceLive fanout", (it) => { fanout.codex.emit({ type: "tool.completed", eventId: asEventId("evt-seq-2"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: session.threadId, turnId: asTurnId("turn-1"), @@ -1145,7 +1475,7 @@ fanout.layer("ProviderServiceLive fanout", (it) => { fanout.codex.emit({ type: "turn.completed", eventId: asEventId("evt-seq-3"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: session.threadId, turnId: asTurnId("turn-1"), @@ -1165,7 +1495,8 @@ fanout.layer("ProviderServiceLive fanout", (it) => { Effect.gen(function* () { const provider = yield* ProviderService; const session = yield* provider.startSession(asThreadId("thread-1"), { - provider: "codex", + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, threadId: asThreadId("thread-1"), runtimeMode: "full-access", }); @@ -1190,7 +1521,7 @@ fanout.layer("ProviderServiceLive fanout", (it) => { { type: "tool.completed", eventId: asEventId("evt-ordered-1"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: session.threadId, turnId: asTurnId("turn-1"), @@ -1201,7 +1532,7 @@ fanout.layer("ProviderServiceLive fanout", (it) => { { type: "message.delta", eventId: asEventId("evt-ordered-2"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: session.threadId, turnId: asTurnId("turn-1"), @@ -1210,7 +1541,7 @@ fanout.layer("ProviderServiceLive fanout", (it) => { { type: "turn.completed", eventId: asEventId("evt-ordered-3"), - provider: "codex", + provider: ProviderDriverKind.make("codex"), createdAt: new Date().toISOString(), threadId: session.threadId, turnId: asTurnId("turn-1"), @@ -1237,7 +1568,8 @@ fanout.layer("ProviderServiceLive fanout", (it) => { const provider = yield* ProviderService; const session = yield* provider.startSession(asThreadId("thread-metrics"), { - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), + providerInstanceId: claudeAgentInstanceId, threadId: asThreadId("thread-metrics"), cwd: "/tmp/project", runtimeMode: "full-access", @@ -1266,7 +1598,7 @@ fanout.layer("ProviderServiceLive fanout", (it) => { assert.equal( hasMetricSnapshot(snapshots, "t3_provider_turns_total", { - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), operation: "interrupt", outcome: "success", }), @@ -1274,7 +1606,7 @@ fanout.layer("ProviderServiceLive fanout", (it) => { ); assert.equal( hasMetricSnapshot(snapshots, "t3_provider_turns_total", { - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), operation: "approval-response", outcome: "success", }), @@ -1282,7 +1614,7 @@ fanout.layer("ProviderServiceLive fanout", (it) => { ); assert.equal( hasMetricSnapshot(snapshots, "t3_provider_turns_total", { - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), operation: "user-input-response", outcome: "success", }), @@ -1290,7 +1622,7 @@ fanout.layer("ProviderServiceLive fanout", (it) => { ); assert.equal( hasMetricSnapshot(snapshots, "t3_provider_turns_total", { - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), operation: "rollback", outcome: "success", }), @@ -1298,7 +1630,7 @@ fanout.layer("ProviderServiceLive fanout", (it) => { ); assert.equal( hasMetricSnapshot(snapshots, "t3_provider_sessions_total", { - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), operation: "stop", outcome: "success", }), @@ -1314,7 +1646,8 @@ fanout.layer("ProviderServiceLive fanout", (it) => { const provider = yield* ProviderService; const session = yield* provider.startSession(asThreadId("thread-send-metrics"), { - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), + providerInstanceId: claudeAgentInstanceId, threadId: asThreadId("thread-send-metrics"), cwd: "/tmp/project-send-metrics", runtimeMode: "full-access", @@ -1330,7 +1663,7 @@ fanout.layer("ProviderServiceLive fanout", (it) => { assert.equal( hasMetricSnapshot(snapshots, "t3_provider_turns_total", { - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), operation: "send", outcome: "success", }), @@ -1338,7 +1671,7 @@ fanout.layer("ProviderServiceLive fanout", (it) => { ); assert.equal( hasMetricSnapshot(snapshots, "t3_provider_turn_duration", { - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), operation: "send", }), true, @@ -1349,6 +1682,50 @@ fanout.layer("ProviderServiceLive fanout", (it) => { const validation = makeProviderServiceLayer(); validation.layer("ProviderServiceLive validation", (it) => { + it.effect("rejects session starts without an explicit provider instance id", () => + Effect.gen(function* () { + const provider = yield* ProviderService; + + validation.codex.startSession.mockClear(); + const failure = yield* Effect.flip( + provider.startSession(asThreadId("thread-missing-instance-id"), { + provider: ProviderDriverKind.make("codex"), + threadId: asThreadId("thread-missing-instance-id"), + runtimeMode: "full-access", + }), + ); + + assert.instanceOf(failure, ProviderValidationError); + assert.include(failure.issue, "Provider instance id is required for provider 'codex'."); + assert.equal(validation.codex.startSession.mock.calls.length, 0); + }), + ); + + it.effect("rejects mismatched provider kind and provider instance id", () => + Effect.gen(function* () { + const provider = yield* ProviderService; + + validation.codex.startSession.mockClear(); + validation.claude.startSession.mockClear(); + const failure = yield* Effect.flip( + provider.startSession(asThreadId("thread-instance-mismatch"), { + provider: ProviderDriverKind.make("codex"), + providerInstanceId: claudeAgentInstanceId, + threadId: asThreadId("thread-instance-mismatch"), + runtimeMode: "full-access", + }), + ); + + assert.instanceOf(failure, ProviderValidationError); + assert.include( + failure.issue, + "Provider instance 'claudeAgent' belongs to driver 'claudeAgent', not 'codex'.", + ); + assert.equal(validation.codex.startSession.mock.calls.length, 0); + assert.equal(validation.claude.startSession.mock.calls.length, 0); + }), + ); + it.effect("returns ProviderValidationError for invalid input payloads", () => Effect.gen(function* () { const provider = yield* ProviderService; @@ -1383,7 +1760,7 @@ validation.layer("ProviderServiceLive validation", (it) => { Effect.sync(() => { const now = new Date().toISOString(); return { - provider: "codex", + provider: ProviderDriverKind.make("codex"), status: "ready", threadId: input.threadId, runtimeMode: input.runtimeMode, @@ -1395,7 +1772,8 @@ validation.layer("ProviderServiceLive validation", (it) => { ); const session = yield* provider.startSession(asThreadId("thread-missing"), { - provider: "codex", + provider: ProviderDriverKind.make("codex"), + providerInstanceId: codexInstanceId, threadId: asThreadId("thread-missing"), cwd: "/tmp/project", runtimeMode: "full-access", diff --git a/apps/server/src/provider/Layers/ProviderService.ts b/apps/server/src/provider/Layers/ProviderService.ts index 94630d3bca9..05b4e72ec18 100644 --- a/apps/server/src/provider/Layers/ProviderService.ts +++ b/apps/server/src/provider/Layers/ProviderService.ts @@ -19,10 +19,12 @@ import { ProviderSendTurnInput, ProviderSessionStartInput, ProviderStopSessionInput, + type ProviderInstanceId, + type ProviderDriverKind, type ProviderRuntimeEvent, type ProviderSession, } from "@t3tools/contracts"; -import { Effect, Layer, Option, PubSub, Schema, SchemaIssue, Stream } from "effect"; +import { Cause, Effect, Layer, Option, PubSub, Ref, Schema, SchemaIssue, Stream } from "effect"; import { increment, @@ -34,19 +36,24 @@ import { providerTurnMetricAttributes, withMetrics, } from "../../observability/Metrics.ts"; -import { ProviderValidationError } from "../Errors.ts"; +import { type ProviderAdapterError, ProviderValidationError } from "../Errors.ts"; +import type { ProviderAdapterShape } from "../Services/ProviderAdapter.ts"; import { ProviderAdapterRegistry } from "../Services/ProviderAdapterRegistry.ts"; import { ProviderService, type ProviderServiceShape } from "../Services/ProviderService.ts"; import { ProviderSessionDirectory, type ProviderRuntimeBinding, } from "../Services/ProviderSessionDirectory.ts"; -import { type EventNdjsonLogger, makeEventNdjsonLogger } from "./EventNdjsonLogger.ts"; +import { type EventNdjsonLogger } from "./EventNdjsonLogger.ts"; +import { ProviderEventLoggers } from "./ProviderEventLoggers.ts"; import { AnalyticsService } from "../../telemetry/Services/AnalyticsService.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; +/** + * Hook for tests that want to override the canonical event logger pulled + * from `ProviderEventLoggers`. Production wiring leaves this undefined and + * reads the logger off the tag. + */ export interface ProviderServiceLiveOptions { - readonly canonicalEventLogPath?: string; readonly canonicalEventLogger?: EventNdjsonLogger; } @@ -141,18 +148,53 @@ function readPersistedCwd( return trimmed.length > 0 ? trimmed : undefined; } +const dieOnMissingBindingInstanceId = ( + operation: string, + payload: { + readonly providerInstanceId?: ProviderInstanceId | undefined; + readonly provider?: ProviderDriverKind | undefined; + }, +): ProviderInstanceId => { + if (payload.providerInstanceId !== undefined) { + return payload.providerInstanceId; + } + throw new Error( + payload.provider + ? `${operation}: provider instance id is required for provider '${payload.provider}'.` + : `${operation}: provider instance id is required.`, + ); +}; + +const correlateRuntimeEventWithInstance = ( + source: { + readonly instanceId: ProviderInstanceId; + readonly provider: ProviderDriverKind; + }, + event: ProviderRuntimeEvent, +): ProviderRuntimeEvent => { + if (event.provider !== source.provider) { + throw new Error( + `ProviderService.streamEvents: provider instance '${source.instanceId}' is backed by driver '${source.provider}' but emitted driver '${event.provider}'.`, + ); + } + if (event.providerInstanceId !== undefined && event.providerInstanceId !== source.instanceId) { + throw new Error( + `ProviderService.streamEvents: provider instance '${source.instanceId}' emitted event for instance '${event.providerInstanceId}'.`, + ); + } + return { ...event, providerInstanceId: source.instanceId }; +}; + const makeProviderService = Effect.fn("makeProviderService")(function* ( options?: ProviderServiceLiveOptions, ) { const analytics = yield* Effect.service(AnalyticsService); - const serverSettings = yield* ServerSettingsService; - const canonicalEventLogger = - options?.canonicalEventLogger ?? - (options?.canonicalEventLogPath !== undefined - ? yield* makeEventNdjsonLogger(options.canonicalEventLogPath, { - stream: "canonical", - }) - : undefined); + const eventLoggers = yield* ProviderEventLoggers; + // Options-provided logger wins (test overrides); otherwise we take whatever + // the `ProviderEventLoggers` tag exposes — `undefined` means "no canonical + // log writer is attached", which downstream code already handles as a + // no-op. + const canonicalEventLogger = options?.canonicalEventLogger ?? eventLoggers.canonical; const registry = yield* ProviderAdapterRegistry; const directory = yield* ProviderSessionDirectory; @@ -169,6 +211,24 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( Effect.asVoid, ); + const requireBindingInstanceId = ( + operation: string, + payload: { + readonly providerInstanceId?: ProviderInstanceId | undefined; + readonly provider?: ProviderDriverKind | undefined; + }, + ): Effect.Effect => + payload.providerInstanceId !== undefined + ? Effect.succeed(payload.providerInstanceId) + : Effect.fail( + toValidationError( + operation, + payload.provider + ? `Provider instance id is required for provider '${payload.provider}'.` + : "Provider instance id is required.", + ), + ); + const upsertSessionBinding = ( session: ProviderSession, threadId: ThreadId, @@ -178,38 +238,106 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( readonly lastRuntimeEventAt?: string; }, ) => - directory.upsert({ - threadId, - provider: session.provider, - runtimeMode: session.runtimeMode, - status: toRuntimeStatus(session), - ...(session.resumeCursor !== undefined ? { resumeCursor: session.resumeCursor } : {}), - runtimePayload: toRuntimePayloadFromSession(session, extra), + Effect.gen(function* () { + const providerInstanceId = yield* requireBindingInstanceId( + "ProviderService.upsertSessionBinding", + session, + ); + yield* directory.upsert({ + threadId, + provider: session.provider, + providerInstanceId, + runtimeMode: session.runtimeMode, + status: toRuntimeStatus(session), + ...(session.resumeCursor !== undefined ? { resumeCursor: session.resumeCursor } : {}), + runtimePayload: toRuntimePayloadFromSession(session, extra), + }); }); - const providers = yield* registry.listProviders(); - const adapters = yield* Effect.forEach(providers, (provider) => registry.getByProvider(provider)); - const processRuntimeEvent = (event: ProviderRuntimeEvent): Effect.Effect => - increment(providerRuntimeEventsTotal, { - provider: event.provider, - eventType: event.type, - }).pipe(Effect.andThen(publishRuntimeEvent(event))); + const processRuntimeEvent = ( + source: { + readonly instanceId: ProviderInstanceId; + readonly provider: ProviderDriverKind; + }, + event: ProviderRuntimeEvent, + ): Effect.Effect => + Effect.sync(() => correlateRuntimeEventWithInstance(source, event)).pipe( + Effect.flatMap((canonicalEvent) => + increment(providerRuntimeEventsTotal, { + provider: canonicalEvent.provider, + eventType: canonicalEvent.type, + }).pipe(Effect.andThen(publishRuntimeEvent(canonicalEvent))), + ), + ); + + // `subscribedAdapters` is our source-of-truth for "which instance adapters + // are currently wired into the runtime event bus". It both tracks the set + // of live subscriptions (so `reconcileInstanceSubscriptions` can diff and + // fork only the *new* or *rebuilt* ones) and serves as the dynamic adapter + // list consumed by `stopStaleSessionsForThread`, `listSessions`, and + // `runStopAll` — replacing the pre-Slice-D startup snapshot so hot-added + // instances become visible to those call sites as soon as settings edits + // land. + const subscribedAdapters = yield* Ref.make( + new Map>(), + ); - yield* Effect.forEach(adapters, (adapter) => - Stream.runForEach(adapter.streamEvents, processRuntimeEvent).pipe(Effect.forkScoped), - ).pipe(Effect.asVoid); + const getAdapterEntries = Ref.get(subscribedAdapters).pipe( + Effect.map((map) => Array.from(map.entries())), + ); + + // Rebuild the map of id → adapter from the registry and fork a new event + // subscription for every instance that is either brand new or whose adapter + // identity changed (indicating the underlying `ProviderInstance` was torn + // down and rebuilt by `ProviderInstanceRegistry.reconcile`). Orphaned + // fibers for removed/replaced instances exit on their own because their + // adapter's `streamEvents` source terminates when the old scope closes. + const reconcileInstanceSubscriptions = Effect.gen(function* () { + const previous = yield* Ref.get(subscribedAdapters); + const currentIds = yield* registry.listInstances(); + const next = new Map>(); + for (const id of currentIds) { + const adapterOption = yield* registry + .getByInstance(id) + .pipe(Effect.tapError(Effect.logWarning), Effect.option); + if (Option.isNone(adapterOption)) continue; + const adapter = adapterOption.value; + next.set(id, adapter); + if (previous.get(id) !== adapter) { + yield* Stream.runForEach(adapter.streamEvents, (event) => + processRuntimeEvent( + { + instanceId: id, + provider: adapter.provider, + }, + event, + ), + ).pipe(Effect.forkScoped); + } + } + yield* Ref.set(subscribedAdapters, next); + }); + + const instanceChanges = yield* registry.subscribeChanges; + yield* reconcileInstanceSubscriptions; + yield* Stream.runForEach( + Stream.fromSubscription(instanceChanges), + () => reconcileInstanceSubscriptions, + ).pipe(Effect.forkScoped); const recoverSessionForThread = Effect.fn("recoverSessionForThread")(function* (input: { readonly binding: ProviderRuntimeBinding; readonly operation: string; }) { + const bindingInstanceId = yield* requireBindingInstanceId(input.operation, input.binding); yield* Effect.annotateCurrentSpan({ "provider.operation": "recover-session", "provider.kind": input.binding.provider, + "provider.instance_id": bindingInstanceId, "provider.thread_id": input.binding.threadId, }); return yield* Effect.gen(function* () { - const adapter = yield* registry.getByProvider(input.binding.provider); + const adapter = yield* registry.getByInstance(bindingInstanceId); const hasResumeCursor = input.binding.resumeCursor !== null && input.binding.resumeCursor !== undefined; const hasActiveSession = yield* adapter.hasSession(input.binding.threadId); @@ -219,7 +347,10 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( (session) => session.threadId === input.binding.threadId, ); if (existing) { - yield* upsertSessionBinding(existing, input.binding.threadId); + yield* upsertSessionBinding( + { ...existing, providerInstanceId: bindingInstanceId }, + input.binding.threadId, + ); yield* analytics.record("provider.session.recovered", { provider: existing.provider, strategy: "adopt-existing", @@ -242,6 +373,7 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( const resumed = yield* adapter.startSession({ threadId: input.binding.threadId, provider: input.binding.provider, + providerInstanceId: bindingInstanceId, ...(persistedCwd ? { cwd: persistedCwd } : {}), ...(persistedModelSelection ? { modelSelection: persistedModelSelection } : {}), ...(hasResumeCursor ? { resumeCursor: input.binding.resumeCursor } : {}), @@ -254,7 +386,10 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( ); } - yield* upsertSessionBinding(resumed, input.binding.threadId); + yield* upsertSessionBinding( + { ...resumed, providerInstanceId: bindingInstanceId }, + input.binding.threadId, + ); yield* analytics.record("provider.session.recovered", { provider: resumed.provider, strategy: "resume-thread", @@ -284,29 +419,49 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( `Cannot route thread '${input.threadId}' because no persisted provider binding exists.`, ); } - const adapter = yield* registry.getByProvider(binding.provider); + const instanceId = yield* requireBindingInstanceId(input.operation, binding); + const adapter = yield* registry.getByInstance(instanceId); const hasRequestedSession = yield* adapter.hasSession(input.threadId); if (hasRequestedSession) { - return { adapter, threadId: input.threadId, isActive: true } as const; + return { + adapter, + instanceId, + threadId: input.threadId, + isActive: true, + } as const; } if (!input.allowRecovery) { - return { adapter, threadId: input.threadId, isActive: false } as const; + return { + adapter, + instanceId, + threadId: input.threadId, + isActive: false, + } as const; } - const recovered = yield* recoverSessionForThread({ binding, operation: input.operation }); - return { adapter: recovered.adapter, threadId: input.threadId, isActive: true } as const; + const recovered = yield* recoverSessionForThread({ + binding, + operation: input.operation, + }); + return { + adapter: recovered.adapter, + instanceId, + threadId: input.threadId, + isActive: true, + } as const; }); const stopStaleSessionsForThread = Effect.fn("stopStaleSessionsForThread")(function* (input: { readonly threadId: ThreadId; - readonly currentProvider: ProviderSession["provider"]; + readonly currentInstanceId: ProviderInstanceId; }) { + const currentAdapters = yield* getAdapterEntries; yield* Effect.forEach( - adapters, - (adapter) => - adapter.provider === input.currentProvider + currentAdapters, + ([instanceId, adapter]) => + instanceId === input.currentInstanceId ? Effect.void : Effect.gen(function* () { const hasSession = yield* adapter.hasSession(input.threadId); @@ -341,63 +496,72 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( payload: rawInput, }); - const input = { - ...parsed, - threadId, - provider: parsed.provider ?? "codex", - }; + const resolvedInstanceId = yield* requireBindingInstanceId( + "ProviderService.startSession", + parsed, + ); + let metricProvider = parsed.provider ?? String(resolvedInstanceId); yield* Effect.annotateCurrentSpan({ "provider.operation": "start-session", - "provider.kind": input.provider, + "provider.instance_id": resolvedInstanceId, "provider.thread_id": threadId, - "provider.runtime_mode": input.runtimeMode, + "provider.runtime_mode": parsed.runtimeMode, }); return yield* Effect.gen(function* () { - const settings = yield* serverSettings.getSettings.pipe( - Effect.mapError((error) => - toValidationError( - "ProviderService.startSession", - `Failed to load provider settings: ${error.message}`, - error, - ), - ), - ); - if (!settings.providers[input.provider].enabled) { + const instanceInfo = yield* registry.getInstanceInfo(resolvedInstanceId); + const resolvedProvider = instanceInfo.driverKind; + metricProvider = resolvedProvider; + if (parsed.provider !== undefined && parsed.provider !== resolvedProvider) { return yield* toValidationError( "ProviderService.startSession", - `Provider '${input.provider}' is disabled in T3 Code settings.`, + `Provider instance '${resolvedInstanceId}' belongs to driver '${resolvedProvider}', not '${parsed.provider}'.`, + ); + } + const input = { + ...parsed, + threadId, + provider: resolvedProvider, + }; + if (!instanceInfo.enabled) { + return yield* toValidationError( + "ProviderService.startSession", + `Provider instance '${resolvedInstanceId}' is disabled in T3 Code settings.`, ); } const persistedBinding = Option.getOrUndefined(yield* directory.getBinding(threadId)); const effectiveResumeCursor = input.resumeCursor ?? - (persistedBinding?.provider === input.provider + (persistedBinding?.providerInstanceId === resolvedInstanceId ? persistedBinding.resumeCursor : undefined); const effectiveCwd = input.cwd ?? - (persistedBinding?.provider === input.provider + (persistedBinding?.providerInstanceId === resolvedInstanceId ? readPersistedCwd(persistedBinding.runtimePayload) : undefined); yield* Effect.annotateCurrentSpan({ + "provider.kind": resolvedProvider, "provider.resume_cursor.source": input.resumeCursor !== undefined ? "request" - : effectiveResumeCursor !== undefined && persistedBinding?.provider === input.provider + : effectiveResumeCursor !== undefined && + persistedBinding?.providerInstanceId === resolvedInstanceId ? "persisted" : "none", "provider.resume_cursor.present": effectiveResumeCursor !== undefined, "provider.cwd.source": input.cwd !== undefined ? "request" - : effectiveCwd !== undefined && persistedBinding?.provider === input.provider + : effectiveCwd !== undefined && + persistedBinding?.providerInstanceId === resolvedInstanceId ? "persisted" : "none", "provider.cwd.effective": effectiveCwd ?? "", }); - const adapter = yield* registry.getByProvider(input.provider); + const adapter = yield* registry.getByInstance(resolvedInstanceId); const session = yield* adapter.startSession({ ...input, + providerInstanceId: resolvedInstanceId, ...(effectiveCwd !== undefined ? { cwd: effectiveCwd } : {}), ...(effectiveResumeCursor !== undefined ? { resumeCursor: effectiveResumeCursor } : {}), }); @@ -408,31 +572,36 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( `Adapter/provider mismatch: requested '${adapter.provider}', received '${session.provider}'.`, ); } + const sessionWithInstance = { + ...session, + providerInstanceId: resolvedInstanceId, + }; yield* stopStaleSessionsForThread({ threadId, - currentProvider: adapter.provider, + currentInstanceId: resolvedInstanceId, }); - yield* upsertSessionBinding(session, threadId, { + yield* upsertSessionBinding(sessionWithInstance, threadId, { modelSelection: input.modelSelection, }); yield* analytics.record("provider.session.started", { - provider: session.provider, + provider: sessionWithInstance.provider, runtimeMode: input.runtimeMode, - hasResumeCursor: session.resumeCursor !== undefined, + hasResumeCursor: sessionWithInstance.resumeCursor !== undefined, hasCwd: typeof effectiveCwd === "string" && effectiveCwd.trim().length > 0, hasModel: typeof input.modelSelection?.model === "string" && input.modelSelection.model.trim().length > 0, }); - return session; + return sessionWithInstance; }).pipe( withMetrics({ counter: providerSessionsTotal, - attributes: providerMetricAttributes(input.provider, { - operation: "start", - }), + attributes: () => + providerMetricAttributes(metricProvider, { + operation: "start", + }), }), ); }, @@ -479,6 +648,7 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( yield* directory.upsert({ threadId: input.threadId, provider: routed.adapter.provider, + providerInstanceId: routed.instanceId, status: "running", ...(turn.resumeCursor !== undefined ? { resumeCursor: turn.resumeCursor } : {}), runtimePayload: { @@ -647,6 +817,7 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( yield* directory.upsert({ threadId: input.threadId, provider: routed.adapter.provider, + providerInstanceId: routed.instanceId, status: "stopped", runtimePayload: { activeTurnId: null, @@ -669,8 +840,16 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( const listSessions: ProviderServiceShape["listSessions"] = Effect.fn("listSessions")( function* () { - const sessionsByProvider = yield* Effect.forEach(adapters, (adapter) => - adapter.listSessions(), + const currentAdapters = yield* getAdapterEntries; + const sessionsByProvider = yield* Effect.forEach(currentAdapters, ([instanceId, adapter]) => + adapter.listSessions().pipe( + Effect.map((sessions) => + sessions.map((session) => ({ + ...session, + providerInstanceId: instanceId, + })), + ), + ), ); const activeSessions = sessionsByProvider.flatMap((sessions) => sessions); const persistedBindings = yield* directory.listThreadIds().pipe( @@ -694,29 +873,54 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( } } - return activeSessions.map((session) => { + const sessions: ProviderSession[] = []; + for (const session of activeSessions) { const binding = bindingsByThreadId.get(session.threadId); if (!binding) { - return session; + sessions.push(session); + continue; } const overrides: { resumeCursor?: ProviderSession["resumeCursor"]; runtimeMode?: ProviderSession["runtimeMode"]; + providerInstanceId?: ProviderSession["providerInstanceId"]; } = {}; + overrides.providerInstanceId = dieOnMissingBindingInstanceId( + "ProviderService.listSessions", + binding, + ); + if (binding.provider !== session.provider) { + return yield* Effect.die( + new Error( + `ProviderService.listSessions: thread '${session.threadId}' is active on provider '${session.provider}' but persisted binding names provider '${binding.provider}'.`, + ), + ); + } + if (overrides.providerInstanceId !== session.providerInstanceId) { + return yield* Effect.die( + new Error( + `ProviderService.listSessions: thread '${session.threadId}' is active on provider instance '${session.providerInstanceId}' but persisted binding names '${overrides.providerInstanceId}'.`, + ), + ); + } if (session.resumeCursor === undefined && binding.resumeCursor !== undefined) { overrides.resumeCursor = binding.resumeCursor; } if (binding.runtimeMode !== undefined) { overrides.runtimeMode = binding.runtimeMode; } - return Object.assign({}, session, overrides); - }); + sessions.push(Object.assign({}, session, overrides)); + } + return sessions; }, ); - const getCapabilities: ProviderServiceShape["getCapabilities"] = (provider) => - registry.getByProvider(provider).pipe(Effect.map((adapter) => adapter.capabilities)); + const getCapabilities: ProviderServiceShape["getCapabilities"] = (instanceId) => + registry.getByInstance(instanceId).pipe(Effect.map((adapter) => adapter.capabilities)); + + const getInstanceInfo: ProviderServiceShape["getInstanceInfo"] = (instanceId) => + registry.getInstanceInfo(instanceId); const rollbackConversation: ProviderServiceShape["rollbackConversation"] = Effect.fn( "rollbackConversation", @@ -761,8 +965,16 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( const runStopAll = Effect.fn("runStopAll")(function* () { const threadIds = yield* directory.listThreadIds(); - const activeSessions = yield* Effect.forEach(adapters, (adapter) => - adapter.listSessions(), + const currentAdapters = yield* getAdapterEntries; + const activeSessions = yield* Effect.forEach(currentAdapters, ([instanceId, adapter]) => + adapter.listSessions().pipe( + Effect.map((sessions) => + sessions.map((session) => ({ + ...session, + providerInstanceId: instanceId, + })), + ), + ), ).pipe(Effect.map((sessionsByAdapter) => sessionsByAdapter.flatMap((sessions) => sessions))); yield* Effect.forEach(activeSessions, (session) => upsertSessionBinding(session, session.threadId, { @@ -770,23 +982,22 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( lastRuntimeEventAt: new Date().toISOString(), }), ).pipe(Effect.asVoid); - yield* Effect.forEach(adapters, (adapter) => adapter.stopAll()).pipe(Effect.asVoid); - yield* Effect.forEach(threadIds, (threadId) => - directory.getProvider(threadId).pipe( - Effect.flatMap((provider) => - directory.upsert({ - threadId, - provider, - status: "stopped", - runtimePayload: { - activeTurnId: null, - lastRuntimeEvent: "provider.stopAll", - lastRuntimeEventAt: new Date().toISOString(), - }, - }), - ), - ), - ).pipe(Effect.asVoid); + yield* Effect.forEach(currentAdapters, ([, adapter]) => adapter.stopAll()).pipe(Effect.asVoid); + const bindings = yield* directory.listBindings().pipe(Effect.orElseSucceed(() => [])); + yield* Effect.forEach(bindings, (binding) => { + const providerInstanceId = dieOnMissingBindingInstanceId("ProviderService.stopAll", binding); + return directory.upsert({ + threadId: binding.threadId, + provider: binding.provider, + providerInstanceId, + status: "stopped", + runtimePayload: { + activeTurnId: null, + lastRuntimeEvent: "provider.stopAll", + lastRuntimeEventAt: new Date().toISOString(), + }, + }); + }).pipe(Effect.asVoid); yield* analytics.record("provider.sessions.stopped_all", { sessionCount: threadIds.length, }); @@ -794,8 +1005,10 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( }); yield* Effect.addFinalizer(() => - Effect.catch(runStopAll(), (cause) => - Effect.logWarning("failed to stop provider service", { cause }), + runStopAll().pipe( + Effect.catchCause((cause) => + Effect.logWarning("failed to stop provider service", { cause: Cause.pretty(cause) }), + ), ), ); @@ -808,6 +1021,7 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( stopSession, listSessions, getCapabilities, + getInstanceInfo, rollbackConversation, // Each access creates a fresh PubSub subscription so that multiple // consumers (ProviderRuntimeIngestion, CheckpointReactor, etc.) each diff --git a/apps/server/src/provider/Layers/ProviderSessionDirectory.test.ts b/apps/server/src/provider/Layers/ProviderSessionDirectory.test.ts index d19eab25eb6..2ff26320e57 100644 --- a/apps/server/src/provider/Layers/ProviderSessionDirectory.test.ts +++ b/apps/server/src/provider/Layers/ProviderSessionDirectory.test.ts @@ -3,7 +3,7 @@ import os from "node:os"; import path from "node:path"; import * as NodeServices from "@effect/platform-node/NodeServices"; -import { ThreadId } from "@t3tools/contracts"; +import { ProviderDriverKind, ThreadId } from "@t3tools/contracts"; import { it, assert } from "@effect/vitest"; import { assertSome } from "@effect/vitest/utils"; import { Effect, Layer, Option } from "effect"; @@ -38,7 +38,7 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL const initialThreadId = ThreadId.make("thread-1"); yield* directory.upsert({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: initialThreadId, }); @@ -47,7 +47,7 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL const resolvedBinding = yield* directory.getBinding(initialThreadId); assertSome(resolvedBinding, { threadId: initialThreadId, - provider: "codex", + provider: ProviderDriverKind.make("codex"), }); if (Option.isSome(resolvedBinding)) { assert.equal(resolvedBinding.value.threadId, initialThreadId); @@ -56,7 +56,7 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL const nextThreadId = ThreadId.make("thread-2"); yield* directory.upsert({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId: nextThreadId, }); const updatedBinding = yield* directory.getBinding(nextThreadId); @@ -85,7 +85,7 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL const threadId = ThreadId.make("thread-runtime"); yield* directory.upsert({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId, status: "starting", resumeCursor: { @@ -98,7 +98,7 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL }); yield* directory.upsert({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId, status: "running", runtimePayload: { @@ -133,6 +133,7 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL yield* runtimeRepository.upsert({ threadId: newerThreadId, providerName: "codex", + providerInstanceId: null, adapterKey: "codex", runtimeMode: "full-access", status: "running", @@ -148,6 +149,7 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL yield* runtimeRepository.upsert({ threadId: olderThreadId, providerName: "claudeAgent", + providerInstanceId: null, adapterKey: "claudeAgent", runtimeMode: "approval-required", status: "starting", @@ -165,7 +167,7 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL assert.deepEqual(bindings, [ { threadId: olderThreadId, - provider: "claudeAgent", + provider: ProviderDriverKind.make("claudeAgent"), adapterKey: "claudeAgent", runtimeMode: "approval-required", status: "starting", @@ -179,7 +181,7 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL }, { threadId: newerThreadId, - provider: "codex", + provider: ProviderDriverKind.make("codex"), adapterKey: "codex", runtimeMode: "full-access", status: "running", @@ -203,6 +205,7 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL yield* runtimeRepository.upsert({ threadId, providerName: "claudeAgent", + providerInstanceId: null, adapterKey: "claudeAgent", runtimeMode: "full-access", status: "running", @@ -212,7 +215,7 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL }); yield* directory.upsert({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId, }); @@ -235,7 +238,7 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL yield* Effect.gen(function* () { const directory = yield* ProviderSessionDirectory; yield* directory.upsert({ - provider: "codex", + provider: ProviderDriverKind.make("codex"), threadId, }); }).pipe(Effect.provide(directoryLayer)); @@ -249,7 +252,7 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL const resolvedBinding = yield* directory.getBinding(threadId); assertSome(resolvedBinding, { threadId, - provider: "codex", + provider: ProviderDriverKind.make("codex"), }); if (Option.isSome(resolvedBinding)) { assert.equal(resolvedBinding.value.threadId, threadId); @@ -272,7 +275,7 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL const threadId = ThreadId.make("thread-cursor"); yield* directory.upsert({ - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), threadId, }); @@ -281,7 +284,7 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL const resolvedBinding = yield* directory.getBinding(threadId); assertSome(resolvedBinding, { threadId, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), }); if (Option.isSome(resolvedBinding)) { assert.equal(resolvedBinding.value.threadId, threadId); @@ -323,7 +326,7 @@ it.layer(makeDirectoryLayer(SqlitePersistenceMemory))("ProviderSessionDirectoryL const binding = yield* directory.getBinding(threadId); assertSome(binding, { threadId, - provider: "geminiCli", + provider: ProviderDriverKind.make("geminiCli"), }); })); }); diff --git a/apps/server/src/provider/Layers/ProviderSessionDirectory.ts b/apps/server/src/provider/Layers/ProviderSessionDirectory.ts index 50a3ffe3fe2..e1479082f2f 100644 --- a/apps/server/src/provider/Layers/ProviderSessionDirectory.ts +++ b/apps/server/src/provider/Layers/ProviderSessionDirectory.ts @@ -1,11 +1,9 @@ -import { type ProviderKind, type ThreadId } from "@t3tools/contracts"; -import { Cache, Duration, Effect, Layer, Option } from "effect"; -import * as Semaphore from "effect/Semaphore"; +import { defaultInstanceIdForDriver, ProviderDriverKind, type ThreadId } from "@t3tools/contracts"; +import { Effect, Layer, Option, Schema } from "effect"; import type { ProviderSessionRuntime } from "../../persistence/Services/ProviderSessionRuntime.ts"; import { ProviderSessionRuntimeRepository } from "../../persistence/Services/ProviderSessionRuntime.ts"; import { ProviderSessionDirectoryPersistenceError, ProviderValidationError } from "../Errors.ts"; -import { normalizePersistedProviderKindName } from "../providerKind.ts"; import { ProviderSessionDirectory, type ProviderRuntimeBinding, @@ -22,19 +20,19 @@ function toPersistenceError(operation: string) { }); } -function decodeProviderKind( +function decodeProviderDriverKind( providerName: string, operation: string, -): Effect.Effect { - const normalizedProvider = normalizePersistedProviderKindName(providerName); - if (normalizedProvider !== null) { - return Effect.succeed(normalizedProvider); - } - return Effect.fail( - new ProviderSessionDirectoryPersistenceError({ - operation, - detail: `Unknown persisted provider '${providerName}'.`, - }), +): Effect.Effect { + return Schema.decodeUnknownEffect(ProviderDriverKind)(providerName).pipe( + Effect.mapError( + (cause) => + new ProviderSessionDirectoryPersistenceError({ + operation, + detail: `Unknown persisted provider '${providerName}'.`, + cause, + }), + ), ); } @@ -59,12 +57,17 @@ function toRuntimeBinding( runtime: ProviderSessionRuntime, operation: string, ): Effect.Effect { - return decodeProviderKind(runtime.providerName, operation).pipe( + return decodeProviderDriverKind(runtime.providerName, operation).pipe( Effect.map( (provider) => ({ threadId: runtime.threadId, provider, + // Migration boundary only: rows written before the instance split + // have a null provider_instance_id. Promote them as they leave + // persistence so hot routing code never has to infer an instance + // from a driver kind. + providerInstanceId: runtime.providerInstanceId ?? defaultInstanceIdForDriver(provider), adapterKey: runtime.adapterKey, runtimeMode: runtime.runtimeMode, status: runtime.status, @@ -79,12 +82,6 @@ function toRuntimeBinding( const makeProviderSessionDirectory = Effect.gen(function* () { const repository = yield* ProviderSessionRuntimeRepository; - const upsertLocks = yield* Cache.make({ - capacity: 10_000, - timeToLive: Duration.minutes(60), - lookup: () => Semaphore.make(1), - }); - const getBinding = (threadId: ThreadId) => repository.getByThreadId({ threadId }).pipe( Effect.mapError(toPersistenceError("ProviderSessionDirectory.getBinding:getByThreadId")), @@ -92,71 +89,59 @@ const makeProviderSessionDirectory = Effect.gen(function* () { Option.match(runtime, { onNone: () => Effect.succeed(Option.none()), onSome: (value) => - decodeProviderKind(value.providerName, "ProviderSessionDirectory.getBinding").pipe( - Effect.map((provider) => - Option.some({ - threadId: value.threadId, - provider, - adapterKey: value.adapterKey, - runtimeMode: value.runtimeMode, - status: value.status, - resumeCursor: value.resumeCursor, - runtimePayload: value.runtimePayload, - }), - ), - // Gracefully treat unknown persisted providers as "no binding" - Effect.orElseSucceed(() => Option.none()), + toRuntimeBinding(value, "ProviderSessionDirectory.getBinding").pipe( + Effect.map((binding) => Option.some(binding)), ), }), ), ); const upsert: ProviderSessionDirectoryShape["upsert"] = Effect.fn(function* (binding) { - const threadId = binding.threadId; - if (!threadId) { + const existing = yield* repository + .getByThreadId({ threadId: binding.threadId }) + .pipe(Effect.mapError(toPersistenceError("ProviderSessionDirectory.upsert:getByThreadId"))); + + const existingRuntime = Option.getOrUndefined(existing); + const resolvedThreadId = binding.threadId ?? existingRuntime?.threadId; + if (!resolvedThreadId) { return yield* new ProviderValidationError({ operation: "ProviderSessionDirectory.upsert", issue: "threadId must be a non-empty string.", }); } - const lock = yield* Cache.get(upsertLocks, threadId); - yield* Semaphore.withPermit(lock)( - Effect.gen(function* () { - const existing = yield* repository - .getByThreadId({ threadId }) - .pipe( - Effect.mapError(toPersistenceError("ProviderSessionDirectory.upsert:getByThreadId")), - ); - - const existingRuntime = Option.getOrUndefined(existing); - const now = new Date().toISOString(); - const providerChanged = - existingRuntime !== undefined && existingRuntime.providerName !== binding.provider; - yield* repository - .upsert({ - threadId, - providerName: binding.provider, - adapterKey: - binding.adapterKey ?? - (providerChanged - ? binding.provider - : (existingRuntime?.adapterKey ?? binding.provider)), - runtimeMode: binding.runtimeMode ?? existingRuntime?.runtimeMode ?? "full-access", - status: binding.status ?? existingRuntime?.status ?? "running", - lastSeenAt: now, - resumeCursor: - binding.resumeCursor !== undefined - ? binding.resumeCursor - : (existingRuntime?.resumeCursor ?? null), - runtimePayload: mergeRuntimePayload( - existingRuntime?.runtimePayload ?? null, - binding.runtimePayload, - ), - }) - .pipe(Effect.mapError(toPersistenceError("ProviderSessionDirectory.upsert:upsert"))); - }), - ); + const now = new Date().toISOString(); + const providerChanged = + existingRuntime !== undefined && existingRuntime.providerName !== binding.provider; + const providerInstanceId = + binding.providerInstanceId ?? (!providerChanged ? existingRuntime?.providerInstanceId : null); + if (providerInstanceId === null || providerInstanceId === undefined) { + return yield* new ProviderValidationError({ + operation: "ProviderSessionDirectory.upsert", + issue: "providerInstanceId is required for provider session runtime bindings.", + }); + } + yield* repository + .upsert({ + threadId: resolvedThreadId, + providerName: binding.provider, + providerInstanceId, + adapterKey: + binding.adapterKey ?? + (providerChanged ? binding.provider : (existingRuntime?.adapterKey ?? binding.provider)), + runtimeMode: binding.runtimeMode ?? existingRuntime?.runtimeMode ?? "full-access", + status: binding.status ?? existingRuntime?.status ?? "running", + lastSeenAt: now, + resumeCursor: + binding.resumeCursor !== undefined + ? binding.resumeCursor + : (existingRuntime?.resumeCursor ?? null), + runtimePayload: mergeRuntimePayload( + existingRuntime?.runtimePayload ?? null, + binding.runtimePayload, + ), + }) + .pipe(Effect.mapError(toPersistenceError("ProviderSessionDirectory.upsert:upsert"))); }); const getProvider: ProviderSessionDirectoryShape["getProvider"] = (threadId) => diff --git a/apps/server/src/provider/Layers/ProviderSessionReaper.test.ts b/apps/server/src/provider/Layers/ProviderSessionReaper.test.ts index abde9b5446e..91e1a9aef97 100644 --- a/apps/server/src/provider/Layers/ProviderSessionReaper.test.ts +++ b/apps/server/src/provider/Layers/ProviderSessionReaper.test.ts @@ -1,5 +1,11 @@ import * as NodeServices from "@effect/platform-node/NodeServices"; -import { ProjectId, ThreadId, TurnId } from "@t3tools/contracts"; +import { + ProjectId, + ThreadId, + TurnId, + ProviderDriverKind, + ProviderInstanceId, +} from "@t3tools/contracts"; import { Effect, Exit, Layer, ManagedRuntime, Option, Scope, Stream } from "effect"; import { afterEach, describe, expect, it, vi } from "vitest"; @@ -17,7 +23,7 @@ import { ProviderSessionDirectoryLive } from "./ProviderSessionDirectory.ts"; import { makeProviderSessionReaperLive } from "./ProviderSessionReaper.ts"; const defaultModelSelection = { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", } as const; @@ -139,17 +145,20 @@ describe("ProviderSessionReaper", () => { respondToUserInput: () => unsupported(), stopSession, listSessions: () => Effect.succeed([]), - getCapabilities: () => - Effect.succeed({ - sessionModelSwitch: "in-session" as const, - transport: "app-server-json-rpc" as const, - modelDiscovery: "native" as const, - supportsModelDiscovery: true, - supportsResume: true, - supportsRollback: false, - supportsAttachments: false, - persistentRuntime: true, - }), + getCapabilities: () => Effect.succeed({ sessionModelSwitch: "in-session" }), + getInstanceInfo: (instanceId) => { + const driverKind = ProviderDriverKind.make(String(instanceId)); + return Effect.succeed({ + instanceId, + driverKind, + displayName: undefined, + enabled: true, + continuationIdentity: { + driverKind, + continuationKey: `${driverKind}:instance:${instanceId}`, + }, + }); + }, rollbackConversation: () => unsupported(), streamEvents: Stream.empty, }; @@ -207,6 +216,7 @@ describe("ProviderSessionReaper", () => { repository.upsert({ threadId, providerName: "claudeAgent", + providerInstanceId: null, adapterKey: "claudeAgent", runtimeMode: "full-access", status: "running", @@ -254,6 +264,7 @@ describe("ProviderSessionReaper", () => { repository.upsert({ threadId, providerName: "claudeAgent", + providerInstanceId: null, adapterKey: "claudeAgent", runtimeMode: "full-access", status: "running", @@ -300,6 +311,7 @@ describe("ProviderSessionReaper", () => { repository.upsert({ threadId, providerName: "claudeAgent", + providerInstanceId: null, adapterKey: "claudeAgent", runtimeMode: "full-access", status: "running", @@ -346,6 +358,7 @@ describe("ProviderSessionReaper", () => { repository.upsert({ threadId, providerName: "claudeAgent", + providerInstanceId: null, adapterKey: "claudeAgent", runtimeMode: "full-access", status: "stopped", @@ -414,6 +427,7 @@ describe("ProviderSessionReaper", () => { repository.upsert({ threadId: failedThreadId, providerName: "claudeAgent", + providerInstanceId: null, adapterKey: "claudeAgent", runtimeMode: "full-access", status: "running", @@ -428,6 +442,7 @@ describe("ProviderSessionReaper", () => { repository.upsert({ threadId: reapedThreadId, providerName: "codex", + providerInstanceId: null, adapterKey: "codex", runtimeMode: "full-access", status: "running", @@ -493,6 +508,7 @@ describe("ProviderSessionReaper", () => { repository.upsert({ threadId: defectThreadId, providerName: "claudeAgent", + providerInstanceId: null, adapterKey: "claudeAgent", runtimeMode: "full-access", status: "running", @@ -507,6 +523,7 @@ describe("ProviderSessionReaper", () => { repository.upsert({ threadId: reapedThreadId, providerName: "codex", + providerInstanceId: null, adapterKey: "codex", runtimeMode: "full-access", status: "running", diff --git a/apps/server/src/provider/Layers/copilotTurnTracking.test.ts b/apps/server/src/provider/Layers/copilotTurnTracking.test.ts index 5e42103ac4d..945ed32ae61 100644 --- a/apps/server/src/provider/Layers/copilotTurnTracking.test.ts +++ b/apps/server/src/provider/Layers/copilotTurnTracking.test.ts @@ -5,24 +5,19 @@ import { assistantUsageFields, beginCopilotTurn, clearTurnTracking, + createCopilotTurnTracker, isCopilotTurnTerminalEvent, + makeCopilotTurnTrackingState, markTurnAwaitingCompletion, recordTurnUsage, type CopilotTurnTrackingState, } from "./copilotTurnTracking.ts"; function makeState(): CopilotTurnTrackingState { - return { - currentTurnId: undefined, - currentProviderTurnId: undefined, - pendingCompletionTurnId: undefined, - pendingCompletionProviderTurnId: undefined, - pendingTurnIds: [], - pendingTurnUsage: undefined, - }; + return makeCopilotTurnTrackingState(); } -describe("copilotTurnTracking", () => { +describe("copilotTurnTracking (state-passing API)", () => { it("keeps turn tracking alive until session.idle", () => { expect(isCopilotTurnTerminalEvent({ type: "assistant.usage" } as never)).toBe(false); expect(isCopilotTurnTerminalEvent({ type: "session.idle" } as never)).toBe(true); @@ -57,3 +52,52 @@ describe("copilotTurnTracking", () => { expect(state.pendingCompletionTurnId).toBeUndefined(); }); }); + +describe("copilotTurnTracking (per-instance closure factory)", () => { + it("isolates state between two trackers (no shared mutable state)", () => { + const tracker1 = createCopilotTurnTracker(); + const tracker2 = createCopilotTurnTracker(); + + tracker1.enqueuePendingTurnId(TurnId.make("turn-A")); + tracker1.beginTurn(TurnId.make("provider-A")); + tracker1.recordUsage({ model: "gpt-A", cost: 0.1, totalTokens: 1 } as never); + + expect(tracker1.currentTurnId()).toBe("turn-A"); + expect(tracker2.currentTurnId()).toBeUndefined(); + expect(tracker2.pendingTurnUsage()).toBeUndefined(); + }); + + it("threads the closure through completion refs and usage fields", () => { + const tracker = createCopilotTurnTracker(); + tracker.enqueuePendingTurnId(TurnId.make("turn-1")); + tracker.beginTurn(TurnId.make("provider-1")); + tracker.recordUsage({ model: "gpt-4.1", cost: 0.5, totalTokens: 200 } as never); + tracker.markAwaitingCompletion(); + + expect(tracker.completionRefs()).toEqual({ + turnId: TurnId.make("turn-1"), + providerTurnId: TurnId.make("provider-1"), + }); + expect(tracker.usageFields()).toEqual({ + usage: { model: "gpt-4.1", cost: 0.5, totalTokens: 200 }, + modelUsage: { model: "gpt-4.1" }, + totalCostUsd: 0.5, + }); + + tracker.clear(); + expect(tracker.currentTurnId()).toBeUndefined(); + expect(tracker.pendingTurnUsage()).toBeUndefined(); + }); + + it("removePendingTurnId deletes the matching id only", () => { + const tracker = createCopilotTurnTracker(); + const turnA = TurnId.make("turn-A"); + const turnB = TurnId.make("turn-B"); + + tracker.enqueuePendingTurnId(turnA); + tracker.enqueuePendingTurnId(turnB); + tracker.removePendingTurnId(turnA); + + expect(tracker.state.pendingTurnIds).toEqual([turnB]); + }); +}); diff --git a/apps/server/src/provider/Layers/copilotTurnTracking.ts b/apps/server/src/provider/Layers/copilotTurnTracking.ts index 1b831e5148b..e64dd83526f 100644 --- a/apps/server/src/provider/Layers/copilotTurnTracking.ts +++ b/apps/server/src/provider/Layers/copilotTurnTracking.ts @@ -1,3 +1,23 @@ +/** + * copilotTurnTracking — per-instance tracking of in-flight Copilot turns. + * + * Two surfaces are exported: + * + * 1. `createCopilotTurnTracker()` — a factory that returns operations + * bound to a freshly allocated `CopilotTurnTrackingState` closure. Use + * this from `makeCopilotAdapter`, where each `ProviderInstance` needs + * its own isolated turn-tracking state. Two driver instances of the + * same kind MUST NOT share this state. + * + * 2. The legacy state-passing helpers (`beginCopilotTurn`, + * `markTurnAwaitingCompletion`, etc.) — kept for unit tests that drive + * the state machine directly without an adapter. Production code paths + * should prefer the closure factory. + * + * The state object is intentionally a plain mutable record. Concurrent + * mutation is impossible because the SDK delivers events serially on a + * single callback thread per session. + */ import { TurnId } from "@t3tools/contracts"; import type { SessionEvent } from "@github/copilot-sdk"; @@ -12,6 +32,17 @@ export interface CopilotTurnTrackingState { pendingTurnUsage: CopilotAssistantUsage | undefined; } +export function makeCopilotTurnTrackingState(): CopilotTurnTrackingState { + return { + currentTurnId: undefined, + currentProviderTurnId: undefined, + pendingCompletionTurnId: undefined, + pendingCompletionProviderTurnId: undefined, + pendingTurnIds: [], + pendingTurnUsage: undefined, + }; +} + export function completionTurnRefs(state: CopilotTurnTrackingState) { return { turnId: state.pendingCompletionTurnId ?? state.currentTurnId, @@ -67,3 +98,63 @@ export function assistantUsageFields(usage: CopilotAssistantUsage | undefined): export function isCopilotTurnTerminalEvent(event: SessionEvent): boolean { return event.type === "abort" || event.type === "session.idle"; } + +/** + * Per-instance Copilot turn tracker. The returned object owns its own + * `CopilotTurnTrackingState` closure — two drivers of the same kind get + * two independent trackers and cannot observe one another's turns. + * + * The shape mirrors the legacy state-passing helpers above: each method is + * the same operation pre-bound to the closure. + */ +export interface CopilotTurnTracker { + readonly state: CopilotTurnTrackingState; + readonly currentTurnId: () => TurnId | undefined; + readonly currentProviderTurnId: () => TurnId | undefined; + readonly pendingTurnUsage: () => CopilotAssistantUsage | undefined; + readonly enqueuePendingTurnId: (turnId: TurnId) => void; + readonly removePendingTurnId: (turnId: TurnId) => void; + readonly setCurrentTurnId: (turnId: TurnId | undefined) => void; + readonly setCurrentProviderTurnId: (turnId: TurnId | undefined) => void; + readonly completionRefs: () => { + readonly turnId: TurnId | undefined; + readonly providerTurnId: TurnId | undefined; + }; + readonly beginTurn: (providerTurnId: TurnId) => void; + readonly markAwaitingCompletion: () => void; + readonly recordUsage: (usage: CopilotAssistantUsage) => void; + readonly clear: () => void; + readonly usageFields: () => { + usage?: CopilotAssistantUsage; + modelUsage?: { model: string }; + totalCostUsd?: number; + }; +} + +export function createCopilotTurnTracker(): CopilotTurnTracker { + const state = makeCopilotTurnTrackingState(); + return { + state, + currentTurnId: () => state.currentTurnId, + currentProviderTurnId: () => state.currentProviderTurnId, + pendingTurnUsage: () => state.pendingTurnUsage, + enqueuePendingTurnId: (turnId) => { + state.pendingTurnIds.push(turnId); + }, + removePendingTurnId: (turnId) => { + state.pendingTurnIds = state.pendingTurnIds.filter((candidate) => candidate !== turnId); + }, + setCurrentTurnId: (turnId) => { + state.currentTurnId = turnId; + }, + setCurrentProviderTurnId: (turnId) => { + state.currentProviderTurnId = turnId; + }, + completionRefs: () => completionTurnRefs(state), + beginTurn: (providerTurnId) => beginCopilotTurn(state, providerTurnId), + markAwaitingCompletion: () => markTurnAwaitingCompletion(state), + recordUsage: (usage) => recordTurnUsage(state, usage), + clear: () => clearTurnTracking(state), + usageFields: () => assistantUsageFields(state.pendingTurnUsage), + }; +} diff --git a/apps/server/src/provider/Layers/scopedSafeTeardown.test.ts b/apps/server/src/provider/Layers/scopedSafeTeardown.test.ts new file mode 100644 index 00000000000..ebbc379b7db --- /dev/null +++ b/apps/server/src/provider/Layers/scopedSafeTeardown.test.ts @@ -0,0 +1,94 @@ +import { it } from "@effect/vitest"; +import { Cause, Effect, Exit } from "effect"; +import { describe, expect } from "vitest"; + +import { scopedSafeTeardown } from "./scopedSafeTeardown.ts"; + +describe("scopedSafeTeardown", () => { + it.effect("returns the body's value when teardown is clean", () => + Effect.gen(function* () { + const finalizers: string[] = []; + const wrapped = Effect.gen(function* () { + yield* Effect.addFinalizer(() => + Effect.sync(() => { + finalizers.push("clean"); + }), + ); + return "body-ok"; + }).pipe(scopedSafeTeardown("test")); + + const value = yield* wrapped; + expect(value).toBe("body-ok"); + expect(finalizers).toEqual(["clean"]); + }), + ); + + it.effect("preserves body success when a finalizer dies", () => + // The production failure mode: `Layer.build(...)` registers a finalizer + // that kills a subprocess; if the kill fails, the defect would otherwise + // override a successful probe body. + Effect.gen(function* () { + const finalizers: string[] = []; + const wrapped = Effect.gen(function* () { + yield* Effect.addFinalizer(() => + Effect.sync(() => { + finalizers.push("ran-before-die"); + }), + ); + yield* Effect.addFinalizer(() => + Effect.die(new Error("simulated subprocess kill failure")), + ); + return "body-ok"; + }).pipe(scopedSafeTeardown("test")); + + const value = yield* wrapped; + expect(value).toBe("body-ok"); + // The clean finalizer still ran; teardown defect was logged + swallowed. + expect(finalizers).toEqual(["ran-before-die"]); + }), + ); + + it.effect("preserves typed body failures even when teardown is clean", () => + Effect.gen(function* () { + class BodyError { + readonly _tag = "BodyError" as const; + } + const wrapped = Effect.gen(function* () { + yield* Effect.addFinalizer(() => Effect.void); + return yield* Effect.fail(new BodyError()); + }).pipe(scopedSafeTeardown("test")); + + const exit = yield* Effect.exit(wrapped); + expect(Exit.isFailure(exit)).toBe(true); + if (Exit.isFailure(exit)) { + // Body's typed failure should surface, not a defect. + const squashed = Cause.squash(exit.cause); + expect(squashed).toBeInstanceOf(BodyError); + } + }), + ); + + it.effect("prefers the body's typed failure over a teardown defect", () => + // Even when both the body fails AND teardown defects, the body's typed + // failure is what callers see. This matters because `Effect.result` / + // `.pipe(Effect.exit)` in callers expects a typed Failure, not a Die. + Effect.gen(function* () { + class BodyError { + readonly _tag = "BodyError" as const; + } + const wrapped = Effect.gen(function* () { + yield* Effect.addFinalizer(() => + Effect.die(new Error("simulated subprocess kill failure")), + ); + return yield* Effect.fail(new BodyError()); + }).pipe(scopedSafeTeardown("test")); + + const exit = yield* Effect.exit(wrapped); + expect(Exit.isFailure(exit)).toBe(true); + if (Exit.isFailure(exit)) { + const squashed = Cause.squash(exit.cause); + expect(squashed).toBeInstanceOf(BodyError); + } + }), + ); +}); diff --git a/apps/server/src/provider/Layers/scopedSafeTeardown.ts b/apps/server/src/provider/Layers/scopedSafeTeardown.ts new file mode 100644 index 00000000000..688374590e7 --- /dev/null +++ b/apps/server/src/provider/Layers/scopedSafeTeardown.ts @@ -0,0 +1,61 @@ +/** + * scopedSafeTeardown — run a scope-requiring effect so that finalizer + * failures during scope close cannot override the body's Exit. + * + * Motivation + * ---------- + * The obvious pattern is `body.pipe(Effect.scoped)`: provide a fresh + * Scope, run the body, close the scope with the body's Exit. If a + * finalizer (e.g. `ChildProcess.kill` from an effect-codex-app-server + * spawn) dies during that close, the combined Exit becomes the + * finalizer's defect — even when the body already succeeded. + * + * Concretely this bit us in the Codex provider probe: a successful + * `initialize` → `account/read` → `skills/list` → `model/list` + * round-trip produced a `CodexAppServerProviderSnapshot`, but the + * `Layer.build(CodexClient.layerCommand(...))` finalizer then failed to + * kill the `codex app-server` subprocess with a `PlatformError`. The + * defect bubbled past `Effect.result` in `checkCodexProviderStatus`, + * died `refreshOneSource`, and `providersRef` never saw the snapshot. + * + * Strategy + * -------- + * 1. Make a fresh scope manually. + * 2. Run the body against that scope, capturing its Exit via + * `Effect.exit`. + * 3. Close the scope, catching any cause (typed failure *or* defect) + * with a log. + * 4. Replay the captured Exit so typed body failures still surface and + * successes still return their value. + * + * The helper deliberately logs teardown causes at `Warning` level — + * silently swallowing them is dangerous because they usually indicate a + * real bug in a downstream Layer's finalizer. + * + * @module provider/Layers/scopedSafeTeardown + */ +import { Effect, Exit, Scope } from "effect"; + +/** + * Run `effect` with a freshly made `Scope.Scope`, guaranteeing that + * teardown failures cannot override the body's Exit. + * + * Shape matches `Effect.scoped`: takes an effect whose env includes + * `Scope.Scope`, returns one whose env excludes it. + * + * @param label Short label for the warning log emitted when teardown + * fails. Use something like `"codex-probe"`. + */ +export const scopedSafeTeardown = + (label: string) => + (effect: Effect.Effect): Effect.Effect> => + Effect.gen(function* () { + const scope = yield* Scope.make(); + const bodyExit = yield* effect.pipe(Effect.provideService(Scope.Scope, scope), Effect.exit); + yield* Scope.close(scope, Exit.void).pipe( + Effect.catchCause((cause) => + Effect.logWarning(`${label} teardown errored; preserving body result`, cause), + ), + ); + return yield* bodyExit; + }) as Effect.Effect>; diff --git a/apps/server/src/provider/ProviderDriver.ts b/apps/server/src/provider/ProviderDriver.ts new file mode 100644 index 00000000000..e7d3e40cd8c --- /dev/null +++ b/apps/server/src/provider/ProviderDriver.ts @@ -0,0 +1,167 @@ +/** + * ProviderDriver / ProviderInstance — driver SPI as plain values. + * + * `ProviderDriver` is a record, not a Context.Service. The thing it produces + * (`ProviderInstance`) is also a record — three captured closures + * (`snapshot`, `adapter`, `textGeneration`), an id, and a driver kind. There + * are intentionally no per-driver Context tags because tags are + * singleton-per-runtime and we need many instances of the same driver. + * + * The only Effect service involved is `ProviderInstanceRegistry`, which + * owns the live `Map` and is itself a + * singleton. + * + * Driver factories are functions of `(typed config, env)` where: + * - `typed config` is decoded once by the registry via `configSchema`, + * so drivers never deal with raw `unknown`. + * - `env` flows through Effect's R channel. Each driver declares the + * subset of infrastructure services it needs (FileSystem, + * ChildProcessSpawner, …) on its `create` return type; the registry + * layer's R is the union of those, and the runtime layer satisfies it. + * + * @module provider/ProviderDriver + */ +import type { + ProviderDriverKind, + ProviderInstanceEnvironment, + ProviderInstanceId, +} from "@t3tools/contracts"; +import type { Effect, Schema, Scope } from "effect"; + +import type { TextGenerationShape } from "../textGeneration/TextGeneration.ts"; +import type { ProviderAdapterError, ProviderDriverError } from "./Errors.ts"; +import type { ProviderAdapterShape } from "./Services/ProviderAdapter.ts"; +import type { ServerProviderShape } from "./Services/ServerProvider.ts"; + +/** + * Static metadata advertised by a driver. Used for default presentation + * and (later) settings UI. Doesn't need to be Effect-typed because nothing + * about it is dynamic — drivers are registered at startup. + */ +export interface ProviderDriverMetadata { + /** Human-readable name for the driver itself (e.g. "Codex"). */ + readonly displayName: string; + /** + * Whether the driver may be instantiated more than once concurrently. + * Defaults to `true`. Set to `false` for drivers that wrap a global + * resource (e.g. a single desktop app socket) — the registry then + * rejects multi-instance configurations with a clear error. + */ + readonly supportsMultipleInstances?: boolean; +} + +/** + * One materialized provider instance. Held by the registry, looked up by + * `instanceId`, torn down by closing the scope it was created in. + * + * The three "shape" fields are captured closures owned by this instance — + * stopping one instance cannot affect another, and starting a second + * instance of the same driver does not reach into the first instance's + * state. + */ +export interface ProviderInstance { + readonly instanceId: ProviderInstanceId; + readonly driverKind: ProviderDriverKind; + readonly continuationIdentity: ProviderContinuationIdentity; + readonly displayName: string | undefined; + readonly accentColor?: string | undefined; + readonly enabled: boolean; + readonly snapshot: ServerProviderShape; + readonly adapter: ProviderAdapterShape; + readonly textGeneration: TextGenerationShape; +} + +export interface ProviderContinuationIdentity { + readonly driverKind: ProviderDriverKind; + readonly continuationKey: string; +} + +export function defaultProviderContinuationIdentity(input: { + readonly driverKind: ProviderDriverKind; + readonly instanceId: ProviderInstanceId; +}): ProviderContinuationIdentity { + return { + driverKind: input.driverKind, + continuationKey: `${input.driverKind}:instance:${input.instanceId}`, + }; +} + +/** + * Inputs the registry passes to a driver's `create` function. + * + * `config` is the typed payload — already decoded by the registry through + * `driver.configSchema`. Drivers never decode their own raw envelope. + */ +export interface ProviderDriverCreateInput { + readonly instanceId: ProviderInstanceId; + readonly displayName: string | undefined; + readonly accentColor?: string | undefined; + readonly environment: ProviderInstanceEnvironment; + readonly enabled: boolean; + readonly config: Config; +} + +/** + * Driver SPI — registered as a plain value, not a Layer. + * + * `Config` is whatever the driver decoded from + * `ProviderInstanceConfig.config`. `R` is the union of infrastructure + * services the driver depends on; the registry layer aggregates `R` across + * all registered drivers and the runtime supplies them. + * + * `create` is responsible for *all* per-instance state — process handles, + * pubsub topics, refs, file watchers — and must release them when its + * scope closes. Two calls to `create` with different `instanceId` / + * `config` MUST yield instances with no shared mutable state. + */ +export interface ProviderDriver { + readonly driverKind: ProviderDriverKind; + readonly metadata: ProviderDriverMetadata; + /** + * Decoder for the opaque `ProviderInstanceConfig.config` envelope. The + * registry runs this exactly once per (re)load of an instance; a decode + * failure is surfaced as `ProviderDriverError` and downgraded to an + * unavailable shadow snapshot. + * + * The `Encoded` parameter is intentionally left as `unknown` (not + * `Config`) so schemas with `withDecodingDefault` / transformations — where + * the encoded shape differs from the decoded shape — satisfy the SPI + * without casts. The registry only ever decodes `unknown` envelopes here, + * so the precise encoded type is irrelevant at this boundary. + * + * Using `Codec` rather than `Schema` pins `DecodingServices = never` — if + * we used `Schema`, the erased `any` in `AnyProviderDriver` would + * widen `DecodingServices` to `unknown` and poison the R channel of every + * caller of `decodeUnknownEffect`. + */ + readonly configSchema: Schema.Codec; + /** + * Default config payload used when the legacy + * `ServerSettings.providers.` entry is empty or when the driver + * is auto-bootstrapped without user configuration. Returning a typed + * default keeps the migration path simple — no special-casing needed + * to construct a "blank" instance. + */ + readonly defaultConfig: () => Config; + /** + * Materialize one instance. The returned effect runs in a scope owned + * by the registry; closing that scope releases every resource the + * driver opened. Failures become unavailable shadow snapshots — the + * driver MUST NOT throw defects. + */ + readonly create: ( + input: ProviderDriverCreateInput, + ) => Effect.Effect; +} + +/** + * Heterogeneous-array convenience: the registry stores drivers as + * `ReadonlyArray>` where `R` is the union of all + * registered drivers' env requirements. + */ +// `any` here intentionally erases the per-driver Config; the registry +// already decoded it before invoking `create`, so downstream code never +// needs the original `Config` type. Using `unknown` instead would force +// `create` callers into casts since `unknown` is not assignable to a +// concrete `Config` from inside the driver body. +export type AnyProviderDriver = ProviderDriver; diff --git a/apps/server/src/provider/ProviderInstanceEnvironment.test.ts b/apps/server/src/provider/ProviderInstanceEnvironment.test.ts new file mode 100644 index 00000000000..f37b328b150 --- /dev/null +++ b/apps/server/src/provider/ProviderInstanceEnvironment.test.ts @@ -0,0 +1,21 @@ +import { describe, expect, it } from "vitest"; + +import { mergeProviderInstanceEnvironment } from "./ProviderInstanceEnvironment.ts"; + +describe("mergeProviderInstanceEnvironment", () => { + it("overrides inherited environment values and preserves empty strings", () => { + expect( + mergeProviderInstanceEnvironment( + [ + { name: "OPENROUTER_API_KEY", value: "sk-or-test", sensitive: true }, + { name: "ANTHROPIC_API_KEY", value: "", sensitive: false }, + ], + { ANTHROPIC_API_KEY: "inherited", PATH: "/bin" }, + ), + ).toMatchObject({ + OPENROUTER_API_KEY: "sk-or-test", + ANTHROPIC_API_KEY: "", + PATH: "/bin", + }); + }); +}); diff --git a/apps/server/src/provider/ProviderInstanceEnvironment.ts b/apps/server/src/provider/ProviderInstanceEnvironment.ts new file mode 100644 index 00000000000..e469253604e --- /dev/null +++ b/apps/server/src/provider/ProviderInstanceEnvironment.ts @@ -0,0 +1,16 @@ +import type { ProviderInstanceEnvironment } from "@t3tools/contracts"; + +export function mergeProviderInstanceEnvironment( + environment: ProviderInstanceEnvironment | undefined, + baseEnv: NodeJS.ProcessEnv = process.env, +): NodeJS.ProcessEnv { + if (!environment || environment.length === 0) { + return baseEnv; + } + + const next: NodeJS.ProcessEnv = { ...baseEnv }; + for (const variable of environment) { + next[variable.name] = variable.value; + } + return next; +} diff --git a/apps/server/src/provider/Services/AmpAdapter.ts b/apps/server/src/provider/Services/AmpAdapter.ts deleted file mode 100644 index 96763e7629b..00000000000 --- a/apps/server/src/provider/Services/AmpAdapter.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { Context } from "effect"; - -import type { ProviderAdapterError } from "../Errors.ts"; -import type { ProviderAdapterShape } from "./ProviderAdapter.ts"; - -export interface AmpAdapterShape extends Omit< - ProviderAdapterShape, - "provider" -> { - readonly provider: "amp"; -} - -export class AmpAdapter extends Context.Service()( - "t3/provider/Services/AmpAdapter", -) {} diff --git a/apps/server/src/provider/Services/ClaudeAdapter.ts b/apps/server/src/provider/Services/ClaudeAdapter.ts index e8c33bd8e40..ed9bd7081bc 100644 --- a/apps/server/src/provider/Services/ClaudeAdapter.ts +++ b/apps/server/src/provider/Services/ClaudeAdapter.ts @@ -1,30 +1,19 @@ /** - * ClaudeAdapter - Claude Agent implementation of the generic provider adapter contract. + * ClaudeAdapter — shape type for the Claude provider adapter. * - * This service owns Claude runtime/session semantics and emits canonical - * provider runtime events. It does not perform cross-provider routing, shared - * event fan-out, or checkpoint orchestration. - * - * Uses Effect `Context.Service` for dependency injection and returns the - * shared provider-adapter error channel with `provider: "claudeAgent"` context. + * Historically this module exposed a `Context.Service` tag so consumers + * could inject the adapter through the Effect layer graph. The driver + * model ({@link ../Drivers/ClaudeDriver}) bundles one adapter per + * instance as a captured closure instead, so the tag is gone — we only + * retain the shape interface as a naming anchor for the driver bundle. * * @module ClaudeAdapter */ -import { Context } from "effect"; - import type { ProviderAdapterError } from "../Errors.ts"; import type { ProviderAdapterShape } from "./ProviderAdapter.ts"; /** - * ClaudeAdapterShape - Service API for the Claude Agent provider adapter. - */ -export interface ClaudeAdapterShape extends ProviderAdapterShape { - readonly provider: "claudeAgent"; -} - -/** - * ClaudeAdapter - Service tag for Claude Agent provider adapter operations. + * ClaudeAdapterShape — per-instance Claude adapter contract. Carries + * a branded driver kind as the nominal discriminant. */ -export class ClaudeAdapter extends Context.Service()( - "t3/provider/Services/ClaudeAdapter", -) {} +export interface ClaudeAdapterShape extends ProviderAdapterShape {} diff --git a/apps/server/src/provider/Services/ClaudeProvider.ts b/apps/server/src/provider/Services/ClaudeProvider.ts deleted file mode 100644 index 7e21ac56d9e..00000000000 --- a/apps/server/src/provider/Services/ClaudeProvider.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Context } from "effect"; - -import type { ServerProviderShape } from "./ServerProvider.ts"; - -export interface ClaudeProviderShape extends ServerProviderShape {} - -export class ClaudeProvider extends Context.Service()( - "t3/provider/Services/ClaudeProvider", -) {} diff --git a/apps/server/src/provider/Services/CodexAdapter.ts b/apps/server/src/provider/Services/CodexAdapter.ts index e7a5508c9c7..33fe0fa12be 100644 --- a/apps/server/src/provider/Services/CodexAdapter.ts +++ b/apps/server/src/provider/Services/CodexAdapter.ts @@ -1,30 +1,19 @@ /** - * CodexAdapter - Codex implementation of the generic provider adapter contract. + * CodexAdapter — shape type for the Codex provider adapter. * - * This service owns Codex app-server process / JSON-RPC semantics and emits - * Codex provider events. It does not perform cross-provider routing, shared - * event fan-out, or checkpoint orchestration. - * - * Uses Effect `Context.Service` for dependency injection and returns the - * shared provider-adapter error channel with `provider: "codex"` context. + * Historically this module exposed a `Context.Service` tag so consumers + * could inject the adapter through the Effect layer graph. The driver + * model ({@link ../Drivers/CodexDriver}) bundles one adapter per + * instance as a captured closure instead, so the tag is gone — we only + * retain the shape interface as a naming anchor for the driver bundle. * * @module CodexAdapter */ -import { Context } from "effect"; - import type { ProviderAdapterError } from "../Errors.ts"; import type { ProviderAdapterShape } from "./ProviderAdapter.ts"; /** - * CodexAdapterShape - Service API for the Codex provider adapter. - */ -export interface CodexAdapterShape extends ProviderAdapterShape { - readonly provider: "codex"; -} - -/** - * CodexAdapter - Service tag for Codex provider adapter operations. + * CodexAdapterShape — per-instance Codex adapter contract. Carries + * a branded driver kind as the nominal discriminant. */ -export class CodexAdapter extends Context.Service()( - "t3/provider/Services/CodexAdapter", -) {} +export interface CodexAdapterShape extends ProviderAdapterShape {} diff --git a/apps/server/src/provider/Services/CodexProvider.ts b/apps/server/src/provider/Services/CodexProvider.ts deleted file mode 100644 index e116f1a761b..00000000000 --- a/apps/server/src/provider/Services/CodexProvider.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Context } from "effect"; - -import type { ServerProviderShape } from "./ServerProvider.ts"; - -export interface CodexProviderShape extends ServerProviderShape {} - -export class CodexProvider extends Context.Service()( - "t3/provider/Services/CodexProvider", -) {} diff --git a/apps/server/src/provider/Services/CopilotAdapter.ts b/apps/server/src/provider/Services/CopilotAdapter.ts index edacaef4ce6..e4e93cb1896 100644 --- a/apps/server/src/provider/Services/CopilotAdapter.ts +++ b/apps/server/src/provider/Services/CopilotAdapter.ts @@ -1,11 +1,28 @@ +/** + * CopilotAdapter — Service tag + shape type for the GitHub Copilot adapter. + * + * The driver model ({@link ../Drivers/CopilotDriver}) bundles one adapter + * per instance as a captured closure, so production code no longer reads + * adapters through this Service tag. The tag is retained for back-compat + * with the conformance suite and any legacy boot graph that still resolves + * a single Copilot adapter via the Effect Context. + * + * Wrap a per-instance adapter into this tag with the `makeCopilotAdapterLive` + * Layer in `Layers/CopilotAdapter.ts`. + * + * @module CopilotAdapter + */ import { Context } from "effect"; import type { ProviderAdapterError } from "../Errors.ts"; import type { ProviderAdapterShape } from "./ProviderAdapter.ts"; -export interface CopilotAdapterShape extends ProviderAdapterShape { - readonly provider: "copilot"; -} +/** + * CopilotAdapterShape — per-instance Copilot adapter contract. Carries + * a branded driver kind (`ProviderDriverKind`) as the nominal discriminant + * inherited from `ProviderAdapterShape`. + */ +export interface CopilotAdapterShape extends ProviderAdapterShape {} export class CopilotAdapter extends Context.Service()( "t3/provider/Services/CopilotAdapter", diff --git a/apps/server/src/provider/Services/CursorAdapter.ts b/apps/server/src/provider/Services/CursorAdapter.ts index f1edb316198..83581f0a454 100644 --- a/apps/server/src/provider/Services/CursorAdapter.ts +++ b/apps/server/src/provider/Services/CursorAdapter.ts @@ -1,12 +1,19 @@ -import { Context } from "effect"; - +/** + * CursorAdapter — shape type for the Cursor provider adapter. + * + * Historically this module exposed a `Context.Service` tag so consumers + * could inject the adapter through the Effect layer graph. The driver + * model ({@link ../Drivers/CursorDriver}) bundles one adapter per + * instance as a captured closure instead, so the tag is gone — we only + * retain the shape interface as a naming anchor for the driver bundle. + * + * @module CursorAdapter + */ import type { ProviderAdapterError } from "../Errors.ts"; import type { ProviderAdapterShape } from "./ProviderAdapter.ts"; -export interface CursorAdapterShape extends ProviderAdapterShape { - readonly provider: "cursor"; -} - -export class CursorAdapter extends Context.Service()( - "t3/provider/Services/CursorAdapter", -) {} +/** + * CursorAdapterShape — per-instance Cursor adapter contract. Carries + * a branded driver kind as the nominal discriminant. + */ +export interface CursorAdapterShape extends ProviderAdapterShape {} diff --git a/apps/server/src/provider/Services/CursorProvider.ts b/apps/server/src/provider/Services/CursorProvider.ts deleted file mode 100644 index aa70994f5e9..00000000000 --- a/apps/server/src/provider/Services/CursorProvider.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Context } from "effect"; - -import type { ServerProviderShape } from "./ServerProvider.ts"; - -export interface CursorProviderShape extends ServerProviderShape {} - -export class CursorProvider extends Context.Service()( - "t3/provider/Services/CursorProvider", -) {} diff --git a/apps/server/src/provider/Services/GeminiCliAdapter.ts b/apps/server/src/provider/Services/GeminiCliAdapter.ts deleted file mode 100644 index f6b49b97098..00000000000 --- a/apps/server/src/provider/Services/GeminiCliAdapter.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { Context } from "effect"; - -import type { ProviderAdapterError } from "../Errors.ts"; -import type { ProviderAdapterShape } from "./ProviderAdapter.ts"; - -export interface GeminiCliAdapterShape extends Omit< - ProviderAdapterShape, - "provider" -> { - readonly provider: "geminiCli"; -} - -export class GeminiCliAdapter extends Context.Service()( - "t3/provider/Services/GeminiCliAdapter", -) {} diff --git a/apps/server/src/provider/Services/KiloAdapter.ts b/apps/server/src/provider/Services/KiloAdapter.ts deleted file mode 100644 index eba18b6e85a..00000000000 --- a/apps/server/src/provider/Services/KiloAdapter.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { Context } from "effect"; - -import type { ProviderAdapterError } from "../Errors.ts"; -import type { ProviderAdapterShape } from "./ProviderAdapter.ts"; - -export interface KiloAdapterShape extends Omit< - ProviderAdapterShape, - "provider" -> { - readonly provider: "kilo"; -} - -export class KiloAdapter extends Context.Service()( - "t3/provider/Services/KiloAdapter", -) {} diff --git a/apps/server/src/provider/Services/OpenCodeAdapter.ts b/apps/server/src/provider/Services/OpenCodeAdapter.ts index ad5660022bf..e3ad97904d1 100644 --- a/apps/server/src/provider/Services/OpenCodeAdapter.ts +++ b/apps/server/src/provider/Services/OpenCodeAdapter.ts @@ -1,12 +1,19 @@ -import { Context } from "effect"; - +/** + * OpenCodeAdapter — shape type for the OpenCode provider adapter. + * + * Historically this module exposed a `Context.Service` tag so consumers + * could inject the adapter through the Effect layer graph. The driver + * model ({@link ../Drivers/OpenCodeDriver}) bundles one adapter per + * instance as a captured closure instead, so the tag is gone — we only + * retain the shape interface as a naming anchor for the driver bundle. + * + * @module OpenCodeAdapter + */ import type { ProviderAdapterError } from "../Errors.ts"; import type { ProviderAdapterShape } from "./ProviderAdapter.ts"; -export interface OpenCodeAdapterShape extends ProviderAdapterShape { - readonly provider: "opencode"; -} - -export class OpenCodeAdapter extends Context.Service()( - "t3/provider/Services/OpenCodeAdapter", -) {} +/** + * OpenCodeAdapterShape — per-instance OpenCode adapter contract. Carries + * a branded driver kind as the nominal discriminant. + */ +export interface OpenCodeAdapterShape extends ProviderAdapterShape {} diff --git a/apps/server/src/provider/Services/OpenCodeProvider.ts b/apps/server/src/provider/Services/OpenCodeProvider.ts deleted file mode 100644 index a799830eec4..00000000000 --- a/apps/server/src/provider/Services/OpenCodeProvider.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Context } from "effect"; - -import type { ServerProviderShape } from "./ServerProvider.ts"; - -export interface OpenCodeProviderShape extends ServerProviderShape {} - -export class OpenCodeProvider extends Context.Service()( - "t3/provider/Services/OpenCodeProvider", -) {} diff --git a/apps/server/src/provider/Services/ProviderAdapter.ts b/apps/server/src/provider/Services/ProviderAdapter.ts index f153c2bf072..dd1be738721 100644 --- a/apps/server/src/provider/Services/ProviderAdapter.ts +++ b/apps/server/src/provider/Services/ProviderAdapter.ts @@ -10,7 +10,7 @@ import type { ApprovalRequestId, ProviderApprovalDecision, - ProviderKind, + ProviderDriverKind, ProviderUserInputAnswers, ProviderRuntimeEvent, ProviderSendTurnInput, @@ -23,205 +23,13 @@ import type { import type { Effect } from "effect"; import type { Stream } from "effect"; -export type ProviderSessionModelSwitchMode = "in-session" | "restart-session" | "unsupported"; -export type ProviderTransport = - | "app-server-json-rpc" - | "sdk-cli-server" - | "sdk-query" - | "acp-stdio" - | "http-sse" - | "cli-headless-json" - | "cli-persistent-json"; -export type ProviderModelDiscovery = - | "native" - | "acp-or-config" - | "config-or-static" - | "session-native" - | "unsupported"; -export type ProviderHarnessOperation = - | "startSession" - | "sendTurn" - | "interruptTurn" - | "respondToRequest" - | "respondToUserInput" - | "readThread" - | "rollbackThread" - | "stopSession" - | "streamEvents"; - -export const PROVIDER_HARNESS_OPERATIONS: ReadonlyArray = [ - "startSession", - "sendTurn", - "interruptTurn", - "respondToRequest", - "respondToUserInput", - "readThread", - "rollbackThread", - "stopSession", - "streamEvents", -] as const; +export type ProviderSessionModelSwitchMode = "in-session" | "unsupported"; export interface ProviderAdapterCapabilities { /** * Declares whether changing the model on an existing session is supported. */ readonly sessionModelSwitch: ProviderSessionModelSwitchMode; - /** - * Declares the provider transport family used by the adapter. - */ - readonly transport: ProviderTransport; - /** - * Describes how model discovery is sourced for this provider. - */ - readonly modelDiscovery: ProviderModelDiscovery; - /** - * Quick boolean check for whether model discovery is available at all. - */ - readonly supportsModelDiscovery: boolean; - /** - * Whether a stopped or missing runtime can be recovered from persisted resume - * state. - */ - readonly supportsResume: boolean; - /** - * Whether conversation rollback is supported by the underlying provider. - */ - readonly supportsRollback: boolean; - /** - * Whether the adapter accepts chat attachments. - */ - readonly supportsAttachments: boolean; - /** - * Whether the provider keeps a runtime/session alive across turns after - * `startSession`. - */ - readonly persistentRuntime: boolean; -} - -export const PROVIDER_CAPABILITIES_BY_PROVIDER: Readonly< - Record -> = { - codex: { - sessionModelSwitch: "in-session", - transport: "app-server-json-rpc", - modelDiscovery: "native", - supportsModelDiscovery: true, - supportsResume: true, - supportsRollback: true, - supportsAttachments: true, - persistentRuntime: true, - }, - copilot: { - sessionModelSwitch: "in-session", - transport: "sdk-cli-server", - modelDiscovery: "native", - supportsModelDiscovery: true, - supportsResume: true, - supportsRollback: false, - supportsAttachments: true, - persistentRuntime: true, - }, - claudeAgent: { - sessionModelSwitch: "in-session", - transport: "sdk-query", - modelDiscovery: "session-native", - supportsModelDiscovery: true, - supportsResume: true, - supportsRollback: false, - supportsAttachments: true, - persistentRuntime: true, - }, - cursor: { - sessionModelSwitch: "unsupported", - transport: "acp-stdio", - modelDiscovery: "acp-or-config", - supportsModelDiscovery: true, - supportsResume: true, - supportsRollback: false, - supportsAttachments: false, - persistentRuntime: true, - }, - opencode: { - sessionModelSwitch: "in-session", - transport: "http-sse", - modelDiscovery: "native", - supportsModelDiscovery: true, - supportsResume: true, - supportsRollback: true, - supportsAttachments: false, - persistentRuntime: true, - }, - geminiCli: { - sessionModelSwitch: "restart-session", - transport: "cli-headless-json", - modelDiscovery: "config-or-static", - supportsModelDiscovery: true, - supportsResume: true, - supportsRollback: false, - supportsAttachments: false, - persistentRuntime: false, - }, - amp: { - sessionModelSwitch: "restart-session", - transport: "cli-persistent-json", - modelDiscovery: "config-or-static", - supportsModelDiscovery: true, - supportsResume: false, - supportsRollback: false, - supportsAttachments: false, - persistentRuntime: true, - }, - kilo: { - sessionModelSwitch: "in-session", - transport: "http-sse", - modelDiscovery: "native", - supportsModelDiscovery: true, - supportsResume: true, - supportsRollback: true, - supportsAttachments: false, - persistentRuntime: true, - }, -} as const; - -export function getProviderCapabilities(provider: ProviderKind): ProviderAdapterCapabilities { - return PROVIDER_CAPABILITIES_BY_PROVIDER[provider]; -} - -export function validateProviderAdapterConformance( - adapter: ProviderAdapterShape, -): ReadonlyArray { - const issues: string[] = []; - const expected = getProviderCapabilities(adapter.provider); - - for (const operation of PROVIDER_HARNESS_OPERATIONS) { - if (operation === "streamEvents") { - if (adapter.streamEvents === undefined || adapter.streamEvents === null) { - issues.push(`missing operation '${operation}'`); - } - continue; - } - - if (typeof adapter[operation] !== "function") { - issues.push(`missing operation '${operation}'`); - } - } - - for (const [key, value] of Object.entries(expected) as Array< - [ - keyof ProviderAdapterCapabilities, - ProviderAdapterCapabilities[keyof ProviderAdapterCapabilities], - ] - >) { - if (adapter.capabilities[key] !== value) { - issues.push( - `capability mismatch for '${String(key)}': expected '${String(value)}', received '${String( - adapter.capabilities[key], - )}'`, - ); - } - } - - return issues; } export interface ProviderThreadTurnSnapshot { @@ -238,7 +46,7 @@ export interface ProviderAdapterShape { /** * Provider kind implemented by this adapter. */ - readonly provider: ProviderKind; + readonly provider: ProviderDriverKind; readonly capabilities: ProviderAdapterCapabilities; /** diff --git a/apps/server/src/provider/Services/ProviderAdapterRegistry.ts b/apps/server/src/provider/Services/ProviderAdapterRegistry.ts index b8e9d4b21c3..1161487d5a3 100644 --- a/apps/server/src/provider/Services/ProviderAdapterRegistry.ts +++ b/apps/server/src/provider/Services/ProviderAdapterRegistry.ts @@ -1,34 +1,91 @@ /** * ProviderAdapterRegistry - Lookup boundary for provider adapter implementations. * - * Maps a provider kind to the concrete adapter service (Codex, Claude, etc). - * It does not own session lifecycle or routing rules; `ProviderService` uses - * this registry together with `ProviderSessionDirectory`. + * Maps a `ProviderInstanceId` (the new per-instance routing key) or a + * `ProviderDriverKind` (legacy single-instance-per-driver key) to the concrete + * adapter service (Codex, Claude, etc). It does not own session lifecycle + * or routing rules; `ProviderService` uses this registry together with + * `ProviderSessionDirectory`. + * + * During the driver/instance migration this tag exposes both flavours: + * + * - `getByInstance` / `listInstances` — new per-instance routing. Callers + * that already know an `instanceId` (threads, sessions, events) + * should prefer these. + * (`defaultInstanceIdForDriver(kind) === kind`), matching the pre-Slice-D + * behaviour. New code should not grow additional callers of the kind-keyed + * methods; they exist so the settings UI, WS refresh RPC, and a handful + * of legacy persisted rows can still be routed during the rollout. * * @module ProviderAdapterRegistry */ -import type { ProviderKind } from "@t3tools/contracts"; +import type { ProviderDriverKind, ProviderInstanceId } from "@t3tools/contracts"; import { Context } from "effect"; -import type { Effect } from "effect"; +import type { Effect, PubSub, Scope, Stream } from "effect"; import type { ProviderAdapterError, ProviderUnsupportedError } from "../Errors.ts"; import type { ProviderAdapterShape } from "./ProviderAdapter.ts"; +import type { ProviderContinuationIdentity } from "../ProviderDriver.ts"; + +export interface ProviderInstanceRoutingInfo { + readonly instanceId: ProviderInstanceId; + readonly driverKind: ProviderDriverKind; + readonly displayName: string | undefined; + readonly accentColor?: string | undefined; + readonly enabled: boolean; + readonly continuationIdentity: ProviderContinuationIdentity; +} /** - * ProviderAdapterRegistryShape - Service API for adapter lookup by provider kind. + * ProviderAdapterRegistryShape - Service API for adapter lookup. */ export interface ProviderAdapterRegistryShape { /** - * Resolve the adapter for a provider kind. + * Resolve the adapter for a specific instance id. Returns + * `ProviderUnsupportedError` if no such instance is currently registered + * (which covers "never configured" *and* "configured but the driver is + * unavailable in this build" — both surface the same failure to callers + * that expect a working adapter). */ - readonly getByProvider: ( - provider: ProviderKind, + readonly getByInstance: ( + instanceId: ProviderInstanceId, ) => Effect.Effect, ProviderUnsupportedError>; + readonly getInstanceInfo: ( + instanceId: ProviderInstanceId, + ) => Effect.Effect; + + /** + * List all live instance ids. Excludes unavailable/shadow instances — + * callers of this method want something they can pass to `getByInstance`. + */ + readonly listInstances: () => Effect.Effect>; + /** - * List provider kinds currently registered. + * Legacy: list provider kinds whose default instance is currently + * registered. + * + * @deprecated Prefer `listInstances`. Retained for migration-era call + * sites that iterate providers to build UI/metrics. */ - readonly listProviders: () => Effect.Effect>; + readonly listProviders: () => Effect.Effect>; + + /** + * Change notification stream mirroring `ProviderInstanceRegistry.streamChanges`. + * Emits one `void` tick whenever the set of live instances changes + * (instance added, removed, or rebuilt after a settings edit). Consumers + * that fan out `adapter.streamEvents` per instance — e.g. `ProviderService`'s + * runtime event bus — re-pull `listInstances` on each tick and fork new + * subscriptions for instances they haven't seen yet. + */ + readonly streamChanges: Stream.Stream; + + /** + * Acquire a change subscription synchronously in the caller's current fiber. + * Consumers that must avoid missing a publish between initial reconciliation + * and watcher startup should use this, then fork `Stream.fromSubscription`. + */ + readonly subscribeChanges: Effect.Effect, never, Scope.Scope>; } /** @@ -38,5 +95,3 @@ export class ProviderAdapterRegistry extends Context.Service< ProviderAdapterRegistry, ProviderAdapterRegistryShape >()("t3/provider/Services/ProviderAdapterRegistry") {} - -// Dummy comment for workflow testing. diff --git a/apps/server/src/provider/Services/ProviderInstanceRegistry.ts b/apps/server/src/provider/Services/ProviderInstanceRegistry.ts new file mode 100644 index 00000000000..f642475a243 --- /dev/null +++ b/apps/server/src/provider/Services/ProviderInstanceRegistry.ts @@ -0,0 +1,84 @@ +/** + * ProviderInstanceRegistry — the single Effect service in the new model. + * + * Owns a `Map` produced by running + * registered driver factories against `ServerSettings.providerInstances`. + * The registry watches settings; when an instance's config changes (or + * the entry disappears), the registry tears down the affected instance's + * scope and rebuilds — that's the entire hot-reload story. + * + * What rest-of-server reads from here: + * - `getInstance(instanceId)` — for routing turn/session calls. + * - `listInstances` — for snapshot aggregation in `ProviderRegistry`. + * - `listUnavailable` — `ServerProvider` shadows for instances whose + * driver is not registered in this build (rollback / fork tolerance). + * - `streamChanges` — coalesced "registry mutated" pings so consumers + * can re-pull lists or re-broadcast. + * + * @module provider/Services/ProviderInstanceRegistry + */ +import type { ProviderInstanceId, ServerProvider } from "@t3tools/contracts"; +import { Context } from "effect"; +import type { Effect, PubSub, Scope, Stream } from "effect"; + +import type { ProviderInstance } from "../ProviderDriver.ts"; + +export interface ProviderInstanceRegistryShape { + /** + * Look up one instance by id. Returns `undefined` (not Option) when the + * id is unknown — callers branch on falsy and emit + * `ProviderInstanceNotFoundError`. + */ + readonly getInstance: ( + instanceId: ProviderInstanceId, + ) => Effect.Effect; + /** + * Every available (driver-registered, successfully created) instance, + * in stable settings-author order. + */ + readonly listInstances: Effect.Effect>; + /** + * Wire-shape shadow snapshots for instances whose driver is unknown to + * this build (or whose config failed to decode). Suitable for merging + * directly into `ProviderRegistry` output. + */ + readonly listUnavailable: Effect.Effect>; + /** + * Push notification stream emitted whenever the registry's contents + * change — instance added, removed, or rebuilt. The payload is `void` + * because consumers always want to re-pull `listInstances` / + * `listUnavailable` together. + * + * NOTE: because `Stream.fromPubSub` defers `PubSub.subscribe` until the + * stream starts running, forking a consumer via + * `Stream.runForEach(...).pipe(Effect.forkScoped)` races the next + * publish — the forked fiber may not have subscribed yet when the + * publish lands. Hot-reload consumers that must not miss a publish + * should use `subscribeChanges` below instead, which acquires the + * subscription synchronously in the caller's fiber before the consumer + * loop is forked. + */ + readonly streamChanges: Stream.Stream; + /** + * Acquire a subscription to the registry's change channel synchronously + * in the caller's fiber. Returns a `PubSub.Subscription` whose + * lifetime is scoped to the provided `Scope` (the subscription is + * released when the scope closes). Consumers typically `yield*` this + * in the same fiber that forks their consumer loop, then drain with + * `PubSub.take(subscription)` inside `Effect.forever`. Because the + * subscription is registered with the PubSub before this `yield*` + * returns, no subsequent publish can land in a gap. + * + * This exists because the `ProviderInstanceRegistry` publishes on a + * PubSub and `Stream.fromPubSub` defers subscription until the stream + * starts executing — a consumer that `forkScoped`s the stream + * consumption can miss a publish that lands in the narrow window + * between "fiber scheduled" and "fiber starts running". + */ + readonly subscribeChanges: Effect.Effect, never, Scope.Scope>; +} + +export class ProviderInstanceRegistry extends Context.Service< + ProviderInstanceRegistry, + ProviderInstanceRegistryShape +>()("t3/provider/Services/ProviderInstanceRegistry") {} diff --git a/apps/server/src/provider/Services/ProviderInstanceRegistryMutator.ts b/apps/server/src/provider/Services/ProviderInstanceRegistryMutator.ts new file mode 100644 index 00000000000..ff861f961c7 --- /dev/null +++ b/apps/server/src/provider/Services/ProviderInstanceRegistryMutator.ts @@ -0,0 +1,52 @@ +/** + * ProviderInstanceRegistryMutator — internal handle used by the hydration + * layer to reconcile the live registry with a fresh + * `ProviderInstanceConfigMap`. + * + * Kept separate from the public `ProviderInstanceRegistry` service tag so + * downstream consumers (drivers, reactors, `ProviderService`) can only read + * from the registry. Only the hydration layer — which watches + * `ServerSettingsService.streamChanges` and applies diffs — imports this + * tag. + * + * The mutator exposes a single entry point, `reconcile(configMap)`, which: + * + * 1. Diffs the incoming map against the live one keyed by instance id. + * 2. Closes the per-instance `Scope` of every removed or replaced entry + * (tearing down adapter processes, refresh fibres, temp files) BEFORE + * creating the replacement — `reconcile` guarantees "at most one live + * instance per id" at all times. + * 3. Opens a fresh child `Scope` for every added or replaced entry, runs + * the driver's `create`, and stores the resulting `ProviderInstance` + * plus its scope. + * 4. Publishes one `void` tick on the registry's `streamChanges` PubSub at + * the end of the batch — consumers re-pull `listInstances` / + * `listUnavailable`. + * + * `reconcile` is idempotent: calling it with an unchanged config map is a + * no-op (no scope churn, no pubsub emission). + * + * @module provider/Services/ProviderInstanceRegistryMutator + */ +import type { ProviderInstanceConfigMap } from "@t3tools/contracts"; +import { Context } from "effect"; +import type { Effect } from "effect"; + +export interface ProviderInstanceRegistryMutatorShape { + /** + * Bring the live registry in line with the supplied config map. See + * module docs for the add / remove / replace semantics. + * + * The effect never fails: individual driver `create` failures are + * captured as "unavailable" shadow snapshots inside the registry, the + * same way boot-time failures are handled by + * `makeProviderInstanceRegistry`. This keeps settings-watcher loops from + * erroring out on a single bad entry. + */ + readonly reconcile: (configMap: ProviderInstanceConfigMap) => Effect.Effect; +} + +export class ProviderInstanceRegistryMutator extends Context.Service< + ProviderInstanceRegistryMutator, + ProviderInstanceRegistryMutatorShape +>()("t3/provider/Services/ProviderInstanceRegistryMutator") {} diff --git a/apps/server/src/provider/Services/ProviderRegistry.ts b/apps/server/src/provider/Services/ProviderRegistry.ts index 2e04fa253b0..13a87bd873b 100644 --- a/apps/server/src/provider/Services/ProviderRegistry.ts +++ b/apps/server/src/provider/Services/ProviderRegistry.ts @@ -6,23 +6,42 @@ * * @module ProviderRegistry */ -import type { ProviderKind, ServerProvider } from "@t3tools/contracts"; +import type { ProviderInstanceId, ProviderDriverKind, ServerProvider } from "@t3tools/contracts"; import { Context } from "effect"; import type { Effect, Stream } from "effect"; export interface ProviderRegistryShape { /** - * Read the latest provider snapshots. + * Read the latest provider snapshots for every configured instance. + * Multiple snapshots may share the same `provider` kind (multiple + * instances of the same driver) and disambiguate via `instanceId`. */ readonly getProviders: Effect.Effect>; /** - * Refresh all providers, or a single provider when specified. + * Refresh all providers, or the default instance of the specified + * kind when supplied. + * + * Retained for back-compat with legacy call sites (WS refresh RPC, + * orchestration metrics). New code should prefer `refreshInstance`. + * + * @deprecated prefer `refreshInstance` for new call sites. */ - readonly refresh: (provider?: ProviderKind) => Effect.Effect>; + readonly refresh: (provider?: ProviderDriverKind) => Effect.Effect>; /** - * Stream of provider snapshot updates. + * Refresh the specific configured instance. Returns the updated snapshot + * list. When the instance id is unknown the call resolves with the + * currently cached list (no error) — matching the legacy `refresh` shim + * behaviour so transport layers don't have to special-case unknowns. + */ + readonly refreshInstance: ( + instanceId: ProviderInstanceId, + ) => Effect.Effect>; + + /** + * Stream of provider snapshot updates — one emission per aggregated + * change. The array contains the full current state. */ readonly streamChanges: Stream.Stream>; } diff --git a/apps/server/src/provider/Services/ProviderService.ts b/apps/server/src/provider/Services/ProviderService.ts index 1e461fcd1c6..17a64689b49 100644 --- a/apps/server/src/provider/Services/ProviderService.ts +++ b/apps/server/src/provider/Services/ProviderService.ts @@ -13,7 +13,7 @@ */ import type { ProviderInterruptTurnInput, - ProviderKind, + ProviderInstanceId, ProviderRespondToRequestInput, ProviderRespondToUserInputInput, ProviderRuntimeEvent, @@ -29,6 +29,7 @@ import type { Effect, Stream } from "effect"; import type { ProviderServiceError } from "../Errors.ts"; import type { ProviderAdapterCapabilities } from "./ProviderAdapter.ts"; +import type { ProviderInstanceRoutingInfo } from "./ProviderAdapterRegistry.ts"; /** * ProviderServiceShape - Service API for provider session and turn orchestration. @@ -85,12 +86,16 @@ export interface ProviderServiceShape { readonly listSessions: () => Effect.Effect>; /** - * Read static capabilities for a provider adapter. + * Read capabilities for the adapter bound to a configured provider instance. */ readonly getCapabilities: ( - provider: ProviderKind, + instanceId: ProviderInstanceId, ) => Effect.Effect; + readonly getInstanceInfo: ( + instanceId: ProviderInstanceId, + ) => Effect.Effect; + /** * Roll back provider conversation state by a number of turns. */ diff --git a/apps/server/src/provider/Services/ProviderSessionDirectory.ts b/apps/server/src/provider/Services/ProviderSessionDirectory.ts index bee7a1b3736..99ffb800f90 100644 --- a/apps/server/src/provider/Services/ProviderSessionDirectory.ts +++ b/apps/server/src/provider/Services/ProviderSessionDirectory.ts @@ -1,5 +1,6 @@ import type { - ProviderKind, + ProviderInstanceId, + ProviderDriverKind, ProviderSessionRuntimeStatus, RuntimeMode, ThreadId, @@ -14,7 +15,13 @@ import type { export interface ProviderRuntimeBinding { readonly threadId: ThreadId; - readonly provider: ProviderKind; + readonly provider: ProviderDriverKind; + /** + * Routing key for the configured provider instance that owns this + * session. The persistence layer promotes legacy null rows before + * exposing bindings; runtime callers must not infer this from `provider`. + */ + readonly providerInstanceId?: ProviderInstanceId; readonly adapterKey?: string; readonly status?: ProviderSessionRuntimeStatus; readonly resumeCursor?: unknown | null; @@ -39,7 +46,7 @@ export interface ProviderSessionDirectoryShape { readonly getProvider: ( threadId: ThreadId, - ) => Effect.Effect; + ) => Effect.Effect; readonly getBinding: ( threadId: ThreadId, diff --git a/apps/server/src/provider/acp/AcpAdapterSupport.test.ts b/apps/server/src/provider/acp/AcpAdapterSupport.test.ts index 7457713e0af..a7fcdc4c827 100644 --- a/apps/server/src/provider/acp/AcpAdapterSupport.test.ts +++ b/apps/server/src/provider/acp/AcpAdapterSupport.test.ts @@ -1,5 +1,6 @@ import { describe, expect, it } from "vitest"; import * as EffectAcpErrors from "effect-acp/errors"; +import { ProviderDriverKind } from "@t3tools/contracts"; import { acpPermissionOutcome, mapAcpToAdapterError } from "./AcpAdapterSupport.ts"; @@ -12,7 +13,7 @@ describe("AcpAdapterSupport", () => { it("maps ACP request errors to provider adapter request errors", () => { const error = mapAcpToAdapterError( - "cursor", + ProviderDriverKind.make("cursor"), "thread-1" as never, "session/prompt", new EffectAcpErrors.AcpRequestError({ diff --git a/apps/server/src/provider/acp/AcpAdapterSupport.ts b/apps/server/src/provider/acp/AcpAdapterSupport.ts index 914bb7e8c31..499ebd2e707 100644 --- a/apps/server/src/provider/acp/AcpAdapterSupport.ts +++ b/apps/server/src/provider/acp/AcpAdapterSupport.ts @@ -1,6 +1,6 @@ import { type ProviderApprovalDecision, - type ProviderKind, + type ProviderDriverKind, type ThreadId, } from "@t3tools/contracts"; import { Schema } from "effect"; @@ -13,7 +13,7 @@ import { } from "../Errors.ts"; export function mapAcpToAdapterError( - provider: ProviderKind, + provider: ProviderDriverKind, threadId: ThreadId, method: string, error: EffectAcpErrors.AcpError, diff --git a/apps/server/src/provider/acp/AcpCoreRuntimeEvents.test.ts b/apps/server/src/provider/acp/AcpCoreRuntimeEvents.test.ts index 79b51f585b1..713d0668928 100644 --- a/apps/server/src/provider/acp/AcpCoreRuntimeEvents.test.ts +++ b/apps/server/src/provider/acp/AcpCoreRuntimeEvents.test.ts @@ -1,4 +1,4 @@ -import { RuntimeRequestId, TurnId } from "@t3tools/contracts"; +import { ProviderDriverKind, RuntimeRequestId, TurnId } from "@t3tools/contracts"; import { describe, expect, it } from "vitest"; import { @@ -30,7 +30,7 @@ describe("AcpCoreRuntimeEvents", () => { expect( makeAcpRequestOpenedEvent({ stamp, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), threadId: "thread-1" as never, turnId, requestId: RuntimeRequestId.make("request-1"), @@ -52,7 +52,7 @@ describe("AcpCoreRuntimeEvents", () => { expect( makeAcpRequestResolvedEvent({ stamp, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), threadId: "thread-1" as never, turnId, requestId: RuntimeRequestId.make("request-1"), @@ -75,7 +75,7 @@ describe("AcpCoreRuntimeEvents", () => { expect( makeAcpPlanUpdatedEvent({ stamp, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), threadId: "thread-1" as never, turnId, payload: { @@ -95,7 +95,7 @@ describe("AcpCoreRuntimeEvents", () => { expect( makeAcpToolCallEvent({ stamp, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), threadId: "thread-1" as never, turnId, toolCall: { @@ -119,7 +119,7 @@ describe("AcpCoreRuntimeEvents", () => { expect( makeAcpContentDeltaEvent({ stamp, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), threadId: "thread-1" as never, turnId, itemId: "assistant:session-1:segment:0", @@ -137,7 +137,7 @@ describe("AcpCoreRuntimeEvents", () => { expect( makeAcpAssistantItemEvent({ stamp, - provider: "cursor", + provider: ProviderDriverKind.make("cursor"), threadId: "thread-1" as never, turnId, itemId: "assistant:session-1:segment:0", diff --git a/apps/server/src/provider/acp/AcpCoreRuntimeEvents.ts b/apps/server/src/provider/acp/AcpCoreRuntimeEvents.ts index 0c0f06cc622..c93e61dc37b 100644 --- a/apps/server/src/provider/acp/AcpCoreRuntimeEvents.ts +++ b/apps/server/src/provider/acp/AcpCoreRuntimeEvents.ts @@ -4,7 +4,7 @@ import { type CanonicalRequestType, type EventId, type ProviderApprovalDecision, - type ProviderKind, + type ProviderDriverKind, type ProviderRuntimeEvent, type RuntimeRequestId, type ThreadId, @@ -78,7 +78,7 @@ function runtimeItemStatusFromAcpToolStatus( export function makeAcpRequestOpenedEvent(input: { readonly stamp: AcpEventStamp; - readonly provider: ProviderKind; + readonly provider: ProviderDriverKind; readonly threadId: ThreadId; readonly turnId: TurnId | undefined; readonly requestId: RuntimeRequestId; @@ -111,7 +111,7 @@ export function makeAcpRequestOpenedEvent(input: { export function makeAcpRequestResolvedEvent(input: { readonly stamp: AcpEventStamp; - readonly provider: ProviderKind; + readonly provider: ProviderDriverKind; readonly threadId: ThreadId; readonly turnId: TurnId | undefined; readonly requestId: RuntimeRequestId; @@ -134,7 +134,7 @@ export function makeAcpRequestResolvedEvent(input: { export function makeAcpPlanUpdatedEvent(input: { readonly stamp: AcpEventStamp; - readonly provider: ProviderKind; + readonly provider: ProviderDriverKind; readonly threadId: ThreadId; readonly turnId: TurnId | undefined; readonly payload: AcpPlanUpdate; @@ -159,7 +159,7 @@ export function makeAcpPlanUpdatedEvent(input: { export function makeAcpToolCallEvent(input: { readonly stamp: AcpEventStamp; - readonly provider: ProviderKind; + readonly provider: ProviderDriverKind; readonly threadId: ThreadId; readonly turnId: TurnId | undefined; readonly toolCall: AcpToolCallState; @@ -193,7 +193,7 @@ export function makeAcpToolCallEvent(input: { export function makeAcpAssistantItemEvent(input: { readonly stamp: AcpEventStamp; - readonly provider: ProviderKind; + readonly provider: ProviderDriverKind; readonly threadId: ThreadId; readonly turnId: TurnId | undefined; readonly itemId: string; @@ -215,7 +215,7 @@ export function makeAcpAssistantItemEvent(input: { export function makeAcpContentDeltaEvent(input: { readonly stamp: AcpEventStamp; - readonly provider: ProviderKind; + readonly provider: ProviderDriverKind; readonly threadId: ThreadId; readonly turnId: TurnId | undefined; readonly itemId?: string; diff --git a/apps/server/src/provider/acp/AcpNativeLogging.ts b/apps/server/src/provider/acp/AcpNativeLogging.ts index 2fb3f4e8335..3d44aac0a47 100644 --- a/apps/server/src/provider/acp/AcpNativeLogging.ts +++ b/apps/server/src/provider/acp/AcpNativeLogging.ts @@ -1,4 +1,4 @@ -import type { ProviderKind, ThreadId } from "@t3tools/contracts"; +import type { ProviderDriverKind, ThreadId } from "@t3tools/contracts"; import { Cause, Effect } from "effect"; import type * as EffectAcpProtocol from "effect-acp/protocol"; @@ -7,7 +7,7 @@ import type { AcpSessionRequestLogEvent, AcpSessionRuntimeOptions } from "./AcpS function writeNativeAcpLog(input: { readonly nativeEventLogger: EventNdjsonLogger | undefined; - readonly provider: ProviderKind; + readonly provider: ProviderDriverKind; readonly threadId: ThreadId; readonly kind: "request" | "protocol"; readonly payload: unknown; @@ -44,7 +44,7 @@ function formatRequestLogPayload(event: AcpSessionRequestLogEvent) { export function makeAcpNativeLoggers(input: { readonly nativeEventLogger: EventNdjsonLogger | undefined; - readonly provider: ProviderKind; + readonly provider: ProviderDriverKind; readonly threadId: ThreadId; }): Pick { return { diff --git a/apps/server/src/provider/acp/AcpSessionRuntime.ts b/apps/server/src/provider/acp/AcpSessionRuntime.ts index d32e70cb4ce..b4cf6656086 100644 --- a/apps/server/src/provider/acp/AcpSessionRuntime.ts +++ b/apps/server/src/provider/acp/AcpSessionRuntime.ts @@ -21,7 +21,7 @@ export interface AcpSpawnInput { readonly command: string; readonly args: ReadonlyArray; readonly cwd?: string; - readonly env?: Readonly>; + readonly env?: NodeJS.ProcessEnv; } export interface AcpSessionRuntimeOptions { diff --git a/apps/server/src/provider/acp/CursorAcpSupport.test.ts b/apps/server/src/provider/acp/CursorAcpSupport.test.ts index 94de569b2b2..30941acbfd5 100644 --- a/apps/server/src/provider/acp/CursorAcpSupport.test.ts +++ b/apps/server/src/provider/acp/CursorAcpSupport.test.ts @@ -99,11 +99,11 @@ describe("applyCursorAcpModelSelection", () => { applyCursorAcpModelSelection({ runtime, model: "gpt-5.4-medium-fast[reasoning=medium,context=272k]", - modelOptions: { - reasoning: "xhigh", - contextWindow: "1m", - fastMode: true, - }, + selections: [ + { id: "reasoning", value: "xhigh" }, + { id: "contextWindow", value: "1m" }, + { id: "fastMode", value: true }, + ], mapError: ({ step, configId, cause }) => new Error( step === "set-config-option" diff --git a/apps/server/src/provider/acp/CursorAcpSupport.ts b/apps/server/src/provider/acp/CursorAcpSupport.ts index 72b9af394b3..3e405dd7ff3 100644 --- a/apps/server/src/provider/acp/CursorAcpSupport.ts +++ b/apps/server/src/provider/acp/CursorAcpSupport.ts @@ -1,4 +1,4 @@ -import { type CursorModelOptions, type CursorSettings } from "@t3tools/contracts"; +import { type CursorSettings, type ProviderOptionSelection } from "@t3tools/contracts"; import { Effect, Layer, Scope } from "effect"; import { ChildProcessSpawner } from "effect/unstable/process"; import type * as EffectAcpErrors from "effect-acp/errors"; @@ -23,6 +23,7 @@ export interface CursorAcpRuntimeInput extends Omit< > { readonly childProcessSpawner: ChildProcessSpawner.ChildProcessSpawner["Service"]; readonly cursorSettings: CursorAcpRuntimeCursorSettings | null | undefined; + readonly environment?: NodeJS.ProcessEnv; } export interface CursorAcpModelSelectionErrorContext { @@ -34,6 +35,7 @@ export interface CursorAcpModelSelectionErrorContext { export function buildCursorAcpSpawnInput( cursorSettings: CursorAcpRuntimeCursorSettings | null | undefined, cwd: string, + environment?: NodeJS.ProcessEnv, ): AcpSpawnInput { return { command: cursorSettings?.binaryPath || "agent", @@ -42,6 +44,7 @@ export function buildCursorAcpSpawnInput( "acp", ], cwd, + ...(environment ? { env: environment } : {}), }; } @@ -52,7 +55,7 @@ export const makeCursorAcpRuntime = ( const acpContext = yield* Layer.build( AcpSessionRuntime.layer({ ...input, - spawn: buildCursorAcpSpawnInput(input.cursorSettings, input.cwd), + spawn: buildCursorAcpSpawnInput(input.cursorSettings, input.cwd, input.environment), authMethodId: "cursor_login", clientCapabilities: CURSOR_PARAMETERIZED_MODEL_PICKER_CAPABILITIES, }).pipe( @@ -76,7 +79,7 @@ interface CursorAcpModelSelectionRuntime { export function applyCursorAcpModelSelection(input: { readonly runtime: CursorAcpModelSelectionRuntime; readonly model: string | null | undefined; - readonly modelOptions: CursorModelOptions | null | undefined; + readonly selections: ReadonlyArray | null | undefined; readonly mapError: (context: CursorAcpModelSelectionErrorContext) => E; }): Effect.Effect { return Effect.gen(function* () { @@ -91,7 +94,7 @@ export function applyCursorAcpModelSelection(input: { const configUpdates = resolveCursorAcpConfigUpdates( yield* input.runtime.getConfigOptions, - input.modelOptions, + input.selections, ); for (const update of configUpdates) { yield* input.runtime.setConfigOption(update.configId, update.value).pipe( diff --git a/apps/server/src/provider/builtInDrivers.ts b/apps/server/src/provider/builtInDrivers.ts new file mode 100644 index 00000000000..09c165e8c61 --- /dev/null +++ b/apps/server/src/provider/builtInDrivers.ts @@ -0,0 +1,65 @@ +/** + * BUILT_IN_DRIVERS — the static set of `ProviderDriver`s this build ships + * with. + * + * Every driver that the server knows how to instantiate from settings is + * listed here. The `ProviderInstanceRegistry` iterates this array when + * resolving `providerInstances` entries; anything not in the array surfaces + * as an `"unavailable"` shadow snapshot at runtime (see + * `buildUnavailableProviderSnapshot`). + * + * Adding a new first-party driver means: + * 1. implement `ProviderDriver` in a sibling `Drivers/Driver.ts`, + * 2. add it to this array, + * 3. ensure the runtime layer satisfies its declared `R`. + * + * The aggregated `BuiltInDriversEnv` type is the union of every driver's + * env requirement — the registry layer's `R` is this type, and the runtime + * layer (ChildProcessSpawner, FileSystem, Path, ServerConfig, + * OpenCodeRuntime, …) must satisfy it. + * + * @module provider/builtInDrivers + */ +import { AmpDriver, type AmpDriverEnv } from "./Drivers/AmpDriver.ts"; +import { ClaudeDriver, type ClaudeDriverEnv } from "./Drivers/ClaudeDriver.ts"; +import { CodexDriver, type CodexDriverEnv } from "./Drivers/CodexDriver.ts"; +import { CopilotDriver, type CopilotDriverEnv } from "./Drivers/CopilotDriver.ts"; +import { CursorDriver, type CursorDriverEnv } from "./Drivers/CursorDriver.ts"; +import { GeminiCliDriver, type GeminiCliDriverEnv } from "./Drivers/GeminiCliDriver.ts"; +import { KiloDriver, type KiloDriverEnv } from "./Drivers/KiloDriver.ts"; +import { OpenCodeDriver, type OpenCodeDriverEnv } from "./Drivers/OpenCodeDriver.ts"; +import type { AnyProviderDriver } from "./ProviderDriver.ts"; + +/** + * Union of infrastructure services required to construct any built-in + * driver. The registry layer declares `R = BuiltInDriversEnv`; the runtime + * layer must provide every service in this union. + */ +export type BuiltInDriversEnv = + | ClaudeDriverEnv + | CodexDriverEnv + | CursorDriverEnv + | OpenCodeDriverEnv + | AmpDriverEnv + | CopilotDriverEnv + | GeminiCliDriverEnv + | KiloDriverEnv; + +/** + * Ordered list of built-in drivers. Order matters only for tie-breaking in + * UI presentation — the registry itself is keyed by `driverKind`, so + * iteration order has no functional effect on instance lookup. + * + * Upstream drivers come first (Codex, Claude, Cursor, OpenCode); fork + * extension drivers follow in alphabetical order. + */ +export const BUILT_IN_DRIVERS: ReadonlyArray> = [ + CodexDriver, + ClaudeDriver, + CursorDriver, + OpenCodeDriver, + AmpDriver, + CopilotDriver, + GeminiCliDriver, + KiloDriver, +]; diff --git a/apps/server/src/provider/builtInProviderCatalog.ts b/apps/server/src/provider/builtInProviderCatalog.ts new file mode 100644 index 00000000000..ee25b6d0184 --- /dev/null +++ b/apps/server/src/provider/builtInProviderCatalog.ts @@ -0,0 +1,17 @@ +import type { ProviderDriverKind, ProviderInstanceId, ServerProvider } from "@t3tools/contracts"; +import type { Stream } from "effect"; +import type { ServerProviderShape } from "./Services/ServerProvider.ts"; + +export type ProviderSnapshotSource = { + /** + * Routing key — uniquely identifies this instance in the aggregated + * snapshot list. Two different snapshot sources may share the same + * driver kind (multiple instances of the same driver). + */ + readonly instanceId: ProviderInstanceId; + /** Driver implementation kind. */ + readonly driverKind: ProviderDriverKind; + readonly getSnapshot: ServerProviderShape["getSnapshot"]; + readonly refresh: ServerProviderShape["refresh"]; + readonly streamChanges: Stream.Stream; +}; diff --git a/apps/server/src/provider/claude-agent-sdk.d.ts b/apps/server/src/provider/claude-agent-sdk.d.ts deleted file mode 100644 index dd98e6c595f..00000000000 --- a/apps/server/src/provider/claude-agent-sdk.d.ts +++ /dev/null @@ -1,186 +0,0 @@ -declare module "@anthropic-ai/claude-agent-sdk" { - export type PermissionMode = "default" | "acceptEdits" | "bypassPermissions" | "plan" | "dontAsk"; - - export interface PermissionUpdate { - readonly [key: string]: unknown; - } - - export type PermissionResult = - | { - readonly behavior: "allow"; - readonly updatedInput?: unknown; - readonly message?: string; - } - | { - readonly behavior: "deny"; - readonly updatedInput?: unknown; - readonly message?: string; - }; - - export interface CanUseToolCallbackOptions { - readonly signal: AbortSignal; - readonly toolUseID?: string; - readonly suggestions?: ReadonlyArray; - readonly [key: string]: unknown; - } - - export type CanUseTool = ( - toolName: string, - toolInput: Record, - callbackOptions: CanUseToolCallbackOptions, - ) => Promise; - - export interface SDKUserMessage { - readonly [key: string]: unknown; - } - - export interface SDKResultMessage { - readonly subtype?: string; - readonly duration_ms?: number; - readonly durationMs?: number; - readonly is_error?: boolean; - readonly isError?: boolean; - readonly num_turns?: number; - readonly total_cost_usd?: number; - readonly stop_reason?: string | null; - readonly errors?: ReadonlyArray; - readonly usage?: { - readonly input_tokens?: number; - readonly output_tokens?: number; - readonly cache_creation_input_tokens?: number; - readonly cache_read_input_tokens?: number; - readonly server_tool_use?: { - readonly web_search_requests?: number; - }; - }; - readonly modelUsage?: { readonly [key: string]: unknown }; - readonly result?: string; - readonly session_id?: string; - readonly [key: string]: unknown; - } - - export interface SDKMessage { - readonly type?: string; - readonly subtype?: string; - readonly role?: string; - readonly message?: { - readonly id?: string; - readonly content?: ReadonlyArray; - readonly [key: string]: unknown; - }; - readonly content?: ReadonlyArray>; - readonly uuid?: string; - readonly session_id?: string; - readonly parent_tool_use_id?: string; - readonly tool_use_id?: string; - readonly tool_name?: string; - readonly input?: Record; - readonly result?: string; - readonly error?: string; - readonly errors?: ReadonlyArray; - readonly content_block?: Record; - readonly index?: number; - readonly preceding_tool_use_ids?: ReadonlyArray; - readonly is_error?: boolean; - readonly suggestions?: ReadonlyArray; - - // System message fields - readonly status?: string; - readonly hook_id?: string; - readonly hook_name?: string; - readonly hook_event?: string; - readonly output?: string; - readonly stdout?: string; - readonly stderr?: string; - readonly outcome?: "error" | "cancelled" | "success"; - readonly exit_code?: number; - - // Task fields - readonly task_id?: string; - readonly description?: string; - readonly task_type?: string; - readonly summary?: string; - readonly usage?: { readonly [key: string]: unknown }; - readonly last_tool_name?: string; - - // File persistence fields - readonly files?: ReadonlyArray<{ readonly filename: string; readonly file_id: string }>; - readonly failed?: ReadonlyArray<{ readonly filename: string; readonly error: string }>; - - // Tool progress fields - readonly elapsed_time_seconds?: number; - - // Auth status fields - readonly isAuthenticating?: boolean; - - // Stream event fields - readonly event?: Record; - - readonly [key: string]: unknown; - } - - export type ThinkingConfig = - | { readonly type: "adaptive" } - | { readonly type: "enabled"; readonly budgetTokens?: number } - | { readonly type: "disabled" }; - - export type EffortLevel = "low" | "medium" | "high" | "max"; - - export interface SpawnOptions { - readonly args: string[]; - readonly env?: Record; - readonly cwd?: string; - readonly [key: string]: unknown; - } - - export interface SpawnedProcess { - readonly stdin: NodeJS.WritableStream; - readonly stdout: NodeJS.ReadableStream; - killed: boolean; - exitCode: number | null; - kill(signal: NodeJS.Signals): boolean; - on(event: "exit" | "error", listener: (...args: unknown[]) => void): void; - once(event: "exit" | "error", listener: (...args: unknown[]) => void): void; - off(event: "exit" | "error", listener: (...args: unknown[]) => void): void; - } - - export type SettingSource = "user" | "project" | "local"; - - export interface Options { - readonly cwd?: string; - readonly model?: string; - readonly pathToClaudeCodeExecutable?: string; - readonly permissionMode?: PermissionMode; - readonly allowDangerouslySkipPermissions?: boolean; - /** @deprecated Use `thinking` instead. */ - readonly maxThinkingTokens?: number; - readonly thinking?: ThinkingConfig; - readonly effort?: EffortLevel; - readonly resume?: string; - readonly resumeSessionAt?: string; - readonly includePartialMessages?: boolean; - readonly persistSession?: boolean; - readonly sessionId?: string; - readonly settings?: Record; - readonly settingSources?: SettingSource[]; - readonly spawnClaudeCodeProcess?: (options: SpawnOptions) => SpawnedProcess; - readonly canUseTool?: CanUseTool; - readonly env?: Record; - readonly additionalDirectories?: ReadonlyArray; - readonly stderr?: (message: string) => void; - } - - export type Query = AsyncIterable & { - readonly interrupt?: () => Promise; - readonly setModel?: (model?: string) => Promise; - readonly setPermissionMode?: (mode: PermissionMode) => Promise; - readonly setMaxThinkingTokens?: (maxThinkingTokens: number | null) => Promise; - readonly close?: () => void; - readonly initializationResult?: () => Promise>; - }; - - export function query(input: { - readonly prompt: string | AsyncIterable; - readonly options?: Options; - }): Query; -} diff --git a/apps/server/src/provider/copilot-sdk.d.ts b/apps/server/src/provider/copilot-sdk.d.ts deleted file mode 100644 index eaf0def0904..00000000000 --- a/apps/server/src/provider/copilot-sdk.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -declare module "@github/copilot-sdk" { - export type CopilotClientOptions = any; - export type ModelInfo = any; - export type PermissionRequest = any; - export type PermissionRequestResult = any; - export type SessionEvent = any; - export type CopilotSession = any; - - export class CopilotClient { - constructor(options: CopilotClientOptions); - start(): Promise; - listModels(): Promise; - createSession(config: any): Promise; - resumeSession(sessionId: string, config: any): Promise; - stop(): Promise; - } -} diff --git a/apps/server/src/provider/makeManagedServerProvider.test.ts b/apps/server/src/provider/makeManagedServerProvider.test.ts index 31fe73a467e..ff664763804 100644 --- a/apps/server/src/provider/makeManagedServerProvider.test.ts +++ b/apps/server/src/provider/makeManagedServerProvider.test.ts @@ -1,15 +1,28 @@ import { describe, it, assert } from "@effect/vitest"; -import type { ServerProvider } from "@t3tools/contracts"; +import { ProviderDriverKind, ProviderInstanceId, type ServerProvider } from "@t3tools/contracts"; +import { createModelCapabilities } from "@t3tools/shared/model"; import { Deferred, Effect, Fiber, PubSub, Ref, Stream } from "effect"; import { makeManagedServerProvider } from "./makeManagedServerProvider.ts"; +const emptyCapabilities = createModelCapabilities({ optionDescriptors: [] }); +const fastModeCapabilities = createModelCapabilities({ + optionDescriptors: [ + { + id: "fastMode", + label: "Fast Mode", + type: "boolean", + }, + ], +}); + interface TestSettings { readonly enabled: boolean; } const initialSnapshot: ServerProvider = { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), + driver: ProviderDriverKind.make("codex"), enabled: true, installed: true, version: null, @@ -23,7 +36,8 @@ const initialSnapshot: ServerProvider = { }; const refreshedSnapshot: ServerProvider = { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), + driver: ProviderDriverKind.make("codex"), enabled: true, installed: true, version: "1.0.0", @@ -43,13 +57,7 @@ const enrichedSnapshot: ServerProvider = { slug: "composer-2", name: "Composer 2", isCustom: false, - capabilities: { - reasoningEffortLevels: [], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: fastModeCapabilities, }, ], }; @@ -68,13 +76,7 @@ const enrichedSnapshotSecond: ServerProvider = { slug: "gpt-5.4", name: "GPT-5.4", isCustom: false, - capabilities: { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: emptyCapabilities, }, ], }; diff --git a/apps/server/src/provider/opencodeRuntime.ts b/apps/server/src/provider/opencodeRuntime.ts index 41ec5102c3c..c3e973e7e32 100644 --- a/apps/server/src/provider/opencodeRuntime.ts +++ b/apps/server/src/provider/opencodeRuntime.ts @@ -109,6 +109,7 @@ export interface OpenCodeRuntimeShape { */ readonly startOpenCodeServerProcess: (input: { readonly binaryPath: string; + readonly environment?: NodeJS.ProcessEnv; readonly port?: number; readonly hostname?: string; readonly timeoutMs?: number; @@ -121,6 +122,7 @@ export interface OpenCodeRuntimeShape { readonly connectToOpenCodeServer: (input: { readonly binaryPath: string; readonly serverUrl?: string | null; + readonly environment?: NodeJS.ProcessEnv; readonly port?: number; readonly hostname?: string; readonly timeoutMs?: number; @@ -128,6 +130,7 @@ export interface OpenCodeRuntimeShape { readonly runOpenCodeCommand: (input: { readonly binaryPath: string; readonly args: ReadonlyArray; + readonly environment?: NodeJS.ProcessEnv; }) => Effect.Effect; readonly createOpenCodeSdkClient: (input: { readonly baseUrl: string; @@ -274,7 +277,7 @@ const makeOpenCodeRuntime = Effect.gen(function* () { const child = yield* spawner.spawn( ChildProcess.make(input.binaryPath, [...input.args], { shell: process.platform === "win32", - env: process.env, + env: input.environment ?? process.env, }), ); const [stdout, stderr, code] = yield* Effect.all( @@ -330,8 +333,10 @@ const makeOpenCodeRuntime = Effect.gen(function* () { const child = yield* spawner .spawn( ChildProcess.make(input.binaryPath, args, { + detached: process.platform !== "win32", + shell: process.platform === "win32", env: { - ...process.env, + ...(input.environment ?? process.env), OPENCODE_CONFIG_CONTENT: JSON.stringify({}), }, }), @@ -348,6 +353,25 @@ const makeOpenCodeRuntime = Effect.gen(function* () { ), ); + const killOpenCodeProcessGroup = (signal: NodeJS.Signals) => + process.platform === "win32" + ? child.kill({ killSignal: signal, forceKillAfter: "1 second" }).pipe(Effect.asVoid) + : Effect.sync(() => { + try { + process.kill(-Number(child.pid), signal); + } catch { + // The direct child may already have exited after starting the + // server; the process group kill is best-effort cleanup for + // any serve process left in that group. + } + }); + const terminateChild = killOpenCodeProcessGroup("SIGTERM").pipe( + Effect.andThen(Effect.sleep("1 second")), + Effect.andThen(killOpenCodeProcessGroup("SIGKILL")), + Effect.ignore, + ); + yield* Scope.addFinalizer(runtimeScope, terminateChild); + const stdoutRef = yield* Ref.make(""); const stderrRef = yield* Ref.make(""); const readyDeferred = yield* Deferred.make(); @@ -452,6 +476,7 @@ const makeOpenCodeRuntime = Effect.gen(function* () { return startOpenCodeServerProcess({ binaryPath: input.binaryPath, + ...(input.environment !== undefined ? { environment: input.environment } : {}), ...(input.port !== undefined ? { port: input.port } : {}), ...(input.hostname !== undefined ? { hostname: input.hostname } : {}), ...(input.timeoutMs !== undefined ? { timeoutMs: input.timeoutMs } : {}), diff --git a/apps/server/src/provider/providerKind.ts b/apps/server/src/provider/providerKind.ts index 14f54075553..a748919b460 100644 --- a/apps/server/src/provider/providerKind.ts +++ b/apps/server/src/provider/providerKind.ts @@ -1,4 +1,11 @@ -import type { ProviderKind } from "@t3tools/contracts"; +/** + * Fork-local `ProviderKind` closed string union. + * + * Upstream's #2277 refactor removed `ProviderKind` from `@t3tools/contracts` + * in favor of the open branded `ProviderDriverKind`. The fork still ships + * eight built-in driver names and needs a closed union for legacy-name + * normalization in `OrchestrationEventStore`'s read path. + */ const PROVIDER_KINDS = [ "codex", @@ -9,7 +16,9 @@ const PROVIDER_KINDS = [ "geminiCli", "amp", "kilo", -] as const satisfies ReadonlyArray; +] as const; + +export type ProviderKind = (typeof PROVIDER_KINDS)[number]; const LEGACY_PROVIDER_KIND_ALIASES = { claudeCode: "claudeAgent", diff --git a/apps/server/src/provider/providerSnapshot.test.ts b/apps/server/src/provider/providerSnapshot.test.ts index 0a0d31ccb59..449dca8fc5a 100644 --- a/apps/server/src/provider/providerSnapshot.test.ts +++ b/apps/server/src/provider/providerSnapshot.test.ts @@ -1,23 +1,33 @@ import { describe, expect, it } from "vitest"; -import type { ModelCapabilities } from "@t3tools/contracts"; +import { ProviderDriverKind, type ModelCapabilities } from "@t3tools/contracts"; +import { createModelCapabilities } from "@t3tools/shared/model"; import { providerModelsFromSettings } from "./providerSnapshot.ts"; -const OPENCODE_CUSTOM_MODEL_CAPABILITIES: ModelCapabilities = { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - variantOptions: [{ value: "medium", label: "Medium", isDefault: true }], - agentOptions: [{ value: "build", label: "Build", isDefault: true }], -}; +const OPENCODE_CUSTOM_MODEL_CAPABILITIES: ModelCapabilities = createModelCapabilities({ + optionDescriptors: [ + { + id: "variant", + label: "Reasoning", + type: "select", + options: [{ id: "medium", label: "Medium", isDefault: true }], + currentValue: "medium", + }, + { + id: "agent", + label: "Agent", + type: "select", + options: [{ id: "build", label: "Build", isDefault: true }], + currentValue: "build", + }, + ], +}); describe("providerModelsFromSettings", () => { it("applies the provided capabilities to custom models", () => { const models = providerModelsFromSettings( [], - "opencode", + ProviderDriverKind.make("opencode"), ["openai/gpt-5"], OPENCODE_CUSTOM_MODEL_CAPABILITIES, ); diff --git a/apps/server/src/provider/providerSnapshot.ts b/apps/server/src/provider/providerSnapshot.ts index 82a3f418803..af0c91274c3 100644 --- a/apps/server/src/provider/providerSnapshot.ts +++ b/apps/server/src/provider/providerSnapshot.ts @@ -1,4 +1,5 @@ import type { + ProviderDriverKind, ModelCapabilities, ServerProvider, ServerProviderAuth, @@ -8,7 +9,9 @@ import type { ServerProviderState, } from "@t3tools/contracts"; import { Effect, Stream } from "effect"; +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; import { normalizeModelSlug } from "@t3tools/shared/model"; +import { isWindowsCommandNotFound } from "../processRunner.ts"; export const DEFAULT_TIMEOUT_MS = 4_000; // Auth status checks involve disk/network lookups and can be slow on first run (especially Windows) @@ -34,6 +37,8 @@ export interface ServerProviderPresentation { readonly showInteractionModeToggle?: boolean; } +export type ServerProviderDraft = Omit; + export function nonEmptyTrimmed(value: string | undefined): string | undefined { if (!value) return undefined; const trimmed = value.trim(); @@ -45,6 +50,26 @@ export function isCommandMissingCause(error: Error): boolean { return lower.includes("enoent") || lower.includes("notfound"); } +export const spawnAndCollect = (binaryPath: string, command: ChildProcess.Command) => + Effect.gen(function* () { + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner; + const child = yield* spawner.spawn(command); + const [stdout, stderr, exitCode] = yield* Effect.all( + [ + collectStreamAsString(child.stdout), + collectStreamAsString(child.stderr), + child.exitCode.pipe(Effect.map(Number)), + ], + { concurrency: "unbounded" }, + ); + + const result: CommandResult = { stdout, stderr, code: exitCode }; + if (isWindowsCommandNotFound(exitCode, stderr)) { + return yield* Effect.fail(new Error(`spawn ${binaryPath} ENOENT`)); + } + return result; + }).pipe(Effect.scoped); + export function detailFromResult( result: CommandResult & { readonly timedOut?: boolean }, ): string | undefined { @@ -88,7 +113,7 @@ export function parseGenericCliVersion(output: string): string | null { export function providerModelsFromSettings( builtInModels: ReadonlyArray, - provider: ServerProvider["provider"], + provider: ProviderDriverKind, customModels: ReadonlyArray, customModelCapabilities: ModelCapabilities, ): ReadonlyArray { @@ -157,20 +182,18 @@ export function buildBooleanOptionDescriptor(input: { } export function buildServerProvider(input: { - provider: ServerProvider["provider"]; - presentation?: ServerProviderPresentation; + presentation: ServerProviderPresentation; enabled: boolean; checkedAt: string; models: ReadonlyArray; slashCommands?: ReadonlyArray; skills?: ReadonlyArray; probe: ProviderProbeResult; -}): ServerProvider { +}): ServerProviderDraft { return { - provider: input.provider, - ...(input.presentation?.displayName ? { displayName: input.presentation.displayName } : {}), - ...(input.presentation?.badgeLabel ? { badgeLabel: input.presentation.badgeLabel } : {}), - ...(typeof input.presentation?.showInteractionModeToggle === "boolean" + displayName: input.presentation.displayName, + ...(input.presentation.badgeLabel ? { badgeLabel: input.presentation.badgeLabel } : {}), + ...(typeof input.presentation.showInteractionModeToggle === "boolean" ? { showInteractionModeToggle: input.presentation.showInteractionModeToggle } : {}), enabled: input.enabled, diff --git a/apps/server/src/provider/providerStatusCache.test.ts b/apps/server/src/provider/providerStatusCache.test.ts index b0cb5bc663c..8986ba48f29 100644 --- a/apps/server/src/provider/providerStatusCache.test.ts +++ b/apps/server/src/provider/providerStatusCache.test.ts @@ -1,20 +1,33 @@ import * as NodeServices from "@effect/platform-node/NodeServices"; -import type { ServerProvider } from "@t3tools/contracts"; +import { + defaultInstanceIdForDriver, + ProviderDriverKind, + ProviderInstanceId, + type ServerProvider, +} from "@t3tools/contracts"; +import { createModelCapabilities } from "@t3tools/shared/model"; import { assert, it } from "@effect/vitest"; import { Effect, FileSystem } from "effect"; import { hydrateCachedProvider, + isCachedProviderCorrelated, readProviderStatusCache, resolveProviderStatusCachePath, writeProviderStatusCache, } from "./providerStatusCache.ts"; +const emptyCapabilities = createModelCapabilities({ optionDescriptors: [] }); +const CODEX_DRIVER = ProviderDriverKind.make("codex"); +const CLAUDE_AGENT_DRIVER = ProviderDriverKind.make("claudeAgent"); +const OPENCODE_DRIVER = ProviderDriverKind.make("opencode"); + const makeProvider = ( - provider: ServerProvider["provider"], + provider: ProviderDriverKind, overrides?: Partial, ): ServerProvider => ({ - provider, + instanceId: defaultInstanceIdForDriver(provider), + driver: provider, enabled: true, installed: true, version: "1.0.0", @@ -32,26 +45,26 @@ it.layer(NodeServices.layer)("providerStatusCache", (it) => { Effect.gen(function* () { const fs = yield* FileSystem.FileSystem; const tempDir = yield* fs.makeTempDirectoryScoped({ prefix: "t3-provider-cache-" }); - const codexProvider = makeProvider("codex"); - const claudeProvider = makeProvider("claudeAgent", { + const codexProvider = makeProvider(CODEX_DRIVER); + const claudeProvider = makeProvider(CLAUDE_AGENT_DRIVER, { status: "warning", auth: { status: "unknown" }, }); - const openCodeProvider = makeProvider("opencode", { + const openCodeProvider = makeProvider(OPENCODE_DRIVER, { status: "warning", auth: { status: "unknown", type: "opencode" }, }); - const codexPath = resolveProviderStatusCachePath({ + const codexPath = yield* resolveProviderStatusCachePath({ cacheDir: tempDir, - provider: "codex", + instanceId: defaultInstanceIdForDriver(ProviderDriverKind.make("codex")), }); - const claudePath = resolveProviderStatusCachePath({ + const claudePath = yield* resolveProviderStatusCachePath({ cacheDir: tempDir, - provider: "claudeAgent", + instanceId: defaultInstanceIdForDriver(ProviderDriverKind.make("claudeAgent")), }); - const openCodePath = resolveProviderStatusCachePath({ + const openCodePath = yield* resolveProviderStatusCachePath({ cacheDir: tempDir, - provider: "opencode", + instanceId: defaultInstanceIdForDriver(ProviderDriverKind.make("opencode")), }); yield* writeProviderStatusCache({ @@ -74,20 +87,14 @@ it.layer(NodeServices.layer)("providerStatusCache", (it) => { ); it("hydrates cached provider status while preserving current settings-derived models", () => { - const cachedCodex = makeProvider("codex", { + const cachedCodex = makeProvider(CODEX_DRIVER, { checkedAt: "2026-04-10T12:00:00.000Z", models: [ { slug: "gpt-5-mini", name: "GPT-5 Mini", isCustom: false, - capabilities: { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: emptyCapabilities, }, ], message: "Cached message", @@ -100,19 +107,13 @@ it.layer(NodeServices.layer)("providerStatusCache", (it) => { }, ], }); - const fallbackCodex = makeProvider("codex", { + const fallbackCodex = makeProvider(CODEX_DRIVER, { models: [ { slug: "gpt-5.4", name: "GPT-5.4", isCustom: false, - capabilities: { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: emptyCapabilities, }, ], message: "Pending refresh", @@ -131,13 +132,7 @@ it.layer(NodeServices.layer)("providerStatusCache", (it) => { slug: "gpt-5-mini", name: "GPT-5 Mini", isCustom: false, - capabilities: { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: emptyCapabilities, }, ], installed: cachedCodex.installed, @@ -153,11 +148,11 @@ it.layer(NodeServices.layer)("providerStatusCache", (it) => { }); it("ignores stale cached enabled state when the provider is now disabled", () => { - const cachedCodex = makeProvider("codex", { + const cachedCodex = makeProvider(CODEX_DRIVER, { checkedAt: "2026-04-10T12:00:00.000Z", message: "Cached ready status", }); - const disabledFallback = makeProvider("codex", { + const disabledFallback = makeProvider(CODEX_DRIVER, { enabled: false, installed: false, version: null, @@ -174,4 +169,68 @@ it.layer(NodeServices.layer)("providerStatusCache", (it) => { disabledFallback, ); }); + + it("rejects cached snapshots that are not correlated to the fallback instance", () => { + const fallbackCodex = makeProvider(CODEX_DRIVER, { + models: [ + { + slug: "gpt-5.4", + name: "GPT-5.4", + isCustom: false, + capabilities: emptyCapabilities, + }, + ], + }); + const legacyCachedCodex = { + provider: ProviderDriverKind.make("codex"), + enabled: true, + installed: true, + version: "1.0.0", + status: "ready", + auth: { status: "authenticated" }, + checkedAt: "2026-04-10T12:00:00.000Z", + models: [ + { + slug: "cached-legacy-model", + name: "Cached Legacy Model", + isCustom: false, + capabilities: emptyCapabilities, + }, + ], + slashCommands: [], + skills: [], + } as unknown as ServerProvider; + const mismatchedCachedCodex = makeProvider(CODEX_DRIVER, { + instanceId: ProviderInstanceId.make("codex_personal"), + }); + + assert.strictEqual( + isCachedProviderCorrelated({ + cachedProvider: legacyCachedCodex, + fallbackProvider: fallbackCodex, + }), + false, + ); + assert.deepStrictEqual( + hydrateCachedProvider({ + cachedProvider: legacyCachedCodex, + fallbackProvider: fallbackCodex, + }), + fallbackCodex, + ); + assert.strictEqual( + isCachedProviderCorrelated({ + cachedProvider: mismatchedCachedCodex, + fallbackProvider: fallbackCodex, + }), + false, + ); + assert.deepStrictEqual( + hydrateCachedProvider({ + cachedProvider: mismatchedCachedCodex, + fallbackProvider: fallbackCodex, + }), + fallbackCodex, + ); + }); }); diff --git a/apps/server/src/provider/providerStatusCache.ts b/apps/server/src/provider/providerStatusCache.ts index 6ba5a4d5bb2..d6c051fd560 100644 --- a/apps/server/src/provider/providerStatusCache.ts +++ b/apps/server/src/provider/providerStatusCache.ts @@ -1,25 +1,17 @@ -import * as nodePath from "node:path"; -import { type ServerProvider, ServerProvider as ServerProviderSchema } from "@t3tools/contracts"; -import { Cause, Effect, FileSystem, Schema } from "effect"; +import { + type ProviderDriverKind, + type ProviderInstanceId, + type ServerProvider, + ServerProvider as ServerProviderSchema, +} from "@t3tools/contracts"; +import { Cause, Effect, FileSystem, Path, Schema } from "effect"; import { writeFileStringAtomically } from "../atomicWrite.ts"; -export const PROVIDER_CACHE_IDS = [ - "codex", - "claudeAgent", - "opencode", - "cursor", -] as const satisfies ReadonlyArray; - const decodeProviderStatusCache = Schema.decodeUnknownEffect( Schema.fromJsonString(ServerProviderSchema), ); -const providerOrderRank = (provider: ServerProvider["provider"]): number => { - const rank = (PROVIDER_CACHE_IDS as ReadonlyArray).indexOf(provider); - return rank === -1 ? Number.MAX_SAFE_INTEGER : rank; -}; - const mergeProviderModels = ( fallbackModels: ReadonlyArray, cachedModels: ReadonlyArray, @@ -32,13 +24,27 @@ export const orderProviderSnapshots = ( providers: ReadonlyArray, ): ReadonlyArray => [...providers].toSorted( - (left, right) => providerOrderRank(left.provider) - providerOrderRank(right.provider), + (left, right) => + (left.displayName ?? "").localeCompare(right.displayName ?? "") || + left.driver.localeCompare(right.driver) || + left.instanceId.localeCompare(right.instanceId), ); +export const isCachedProviderCorrelated = (input: { + readonly cachedProvider: ServerProvider; + readonly fallbackProvider: ServerProvider; +}): boolean => + input.cachedProvider.instanceId === input.fallbackProvider.instanceId && + input.cachedProvider.driver === input.fallbackProvider.driver; + export const hydrateCachedProvider = (input: { readonly cachedProvider: ServerProvider; readonly fallbackProvider: ServerProvider; }): ServerProvider => { + if (!isCachedProviderCorrelated(input)) { + return input.fallbackProvider; + } + if ( !input.fallbackProvider.enabled || input.cachedProvider.enabled !== input.fallbackProvider.enabled @@ -64,10 +70,46 @@ export const hydrateCachedProvider = (input: { : hydratedProvider; }; -export const resolveProviderStatusCachePath = (input: { +/** + * Resolve the on-disk cache path for a provider instance snapshot. + * + * File naming: `/.json`. For the default instance of + * a built-in kind this equals the legacy `.json` path (because + * `defaultInstanceIdForDriver(kind).toString() === kind`), so existing + * cached snapshots remain readable without any rename step. + * + * Non-default instances (e.g. `codex_personal`) land in their own files and + * never collide with other instances. + * + * Cache contents must still carry matching `instanceId` + `driver` identity + * before hydration. The filename alone is not trusted as a routing key. + */ +export const resolveProviderStatusCachePath = Effect.fn("resolveProviderStatusCachePath")( + function* (input: { + readonly cacheDir: string; + readonly instanceId: ProviderInstanceId; + }): Effect.fn.Return { + const path = yield* Path.Path; + return path.join(input.cacheDir, `${input.instanceId}.json`); + }, +); + +/** + * Legacy kind-keyed path resolver retained for callers that still think in + * terms of `ProviderDriverKind`. Prefer `resolveProviderStatusCachePath` with an + * `instanceId`; new code should route through the instance registry. + * + * @deprecated use `resolveProviderStatusCachePath` with an instance id. + */ +export const resolveLegacyProviderStatusCachePath = Effect.fn( + "resolveLegacyProviderStatusCachePath", +)(function* (input: { readonly cacheDir: string; - readonly provider: ServerProvider["provider"]; -}) => nodePath.join(input.cacheDir, `${input.provider}.json`); + readonly provider: ProviderDriverKind; +}): Effect.fn.Return { + const path = yield* Path.Path; + return path.join(input.cacheDir, `${input.provider}.json`); +}); export const readProviderStatusCache = (filePath: string) => Effect.gen(function* () { diff --git a/apps/server/src/provider/testUtils/providerAdapterRegistryMock.ts b/apps/server/src/provider/testUtils/providerAdapterRegistryMock.ts new file mode 100644 index 00000000000..9a4f107db8b --- /dev/null +++ b/apps/server/src/provider/testUtils/providerAdapterRegistryMock.ts @@ -0,0 +1,95 @@ +/** + * Test helpers for constructing a `ProviderAdapterRegistryShape` mock from a + * kind-keyed adapter map. + * + * Tests historically assembled a `registry` object with only `getByProvider` + * + `listProviders` populated. Slice D grew the shape with `getByInstance` + * and `listInstances`; this helper fills both in from a single kind-keyed + * input so individual fixtures can stay concise. + * + * Non-default instance ids (e.g. `codex_personal`) are not addressable via + * the shim returned here — the legacy test fixtures only ever had + * single-instance-per-driver data anyway. + * + * @module provider/testUtils/providerAdapterRegistryMock + */ +import { + defaultInstanceIdForDriver, + ProviderDriverKind, + type ProviderInstanceId, +} from "@t3tools/contracts"; +import { Effect, PubSub, Record, Result, Stream } from "effect"; + +import { ProviderUnsupportedError, type ProviderAdapterError } from "../Errors.ts"; +import type { ProviderAdapterShape } from "../Services/ProviderAdapter.ts"; +import type { ProviderAdapterRegistryShape } from "../Services/ProviderAdapterRegistry.ts"; + +export type KindAdapterMap = Partial< + Record> +>; + +/** + * Build a `ProviderAdapterRegistryShape` from a kind-keyed adapter map. + * Every adapter present in the map is addressable via both the legacy + * `getByProvider(kind)` path and the new `getByInstance(id)` path (where + * `id = defaultInstanceIdForDriver(kind)`). + */ +export const makeAdapterRegistryMock = (adapters: KindAdapterMap): ProviderAdapterRegistryShape => { + const byInstanceId = new Map>(); + for (const [kind, adapter] of Object.entries(adapters)) { + if (!adapter) continue; + const driverKind = ProviderDriverKind.make(kind); + byInstanceId.set(defaultInstanceIdForDriver(driverKind), adapter); + } + + const getByInstance: ProviderAdapterRegistryShape["getByInstance"] = (instanceId) => { + const adapter = byInstanceId.get(instanceId); + return adapter + ? Effect.succeed(adapter) + : Effect.fail( + new ProviderUnsupportedError({ + provider: ProviderDriverKind.make(instanceId), + }), + ); + }; + + return { + getByInstance, + getInstanceInfo: (instanceId) => { + const adapter = byInstanceId.get(instanceId); + if (!adapter) { + return Effect.fail( + new ProviderUnsupportedError({ + provider: ProviderDriverKind.make(instanceId), + }), + ); + } + return Effect.succeed({ + instanceId, + driverKind: ProviderDriverKind.make(adapter.provider), + displayName: undefined, + enabled: true, + continuationIdentity: { + driverKind: ProviderDriverKind.make(adapter.provider), + continuationKey: `${adapter.provider}:instance:${instanceId}`, + }, + }); + }, + listInstances: () => Effect.succeed(Array.from(byInstanceId.keys())), + listProviders: () => + Effect.succeed( + Record.keys( + Record.filterMap(adapters, (adapter, kind) => + adapter !== undefined ? Result.succeed(kind) : Result.failVoid, + ), + ), + ), + // Static test fixtures don't reload; an empty stream is enough to + // satisfy the shape. Tests exercising hot-reload build their own + // stream via the real `ProviderInstanceRegistry`. + streamChanges: Stream.empty, + subscribeChanges: Effect.flatMap(PubSub.unbounded(), (pubsub) => + PubSub.subscribe(pubsub), + ), + }; +}; diff --git a/apps/server/src/provider/unavailableProviderSnapshot.ts b/apps/server/src/provider/unavailableProviderSnapshot.ts new file mode 100644 index 00000000000..97a532a9d98 --- /dev/null +++ b/apps/server/src/provider/unavailableProviderSnapshot.ts @@ -0,0 +1,72 @@ +/** + * Helpers for synthesizing "unavailable" `ServerProvider` snapshots. + * + * When `ServerSettings.providerInstances` (or persisted thread/session + * state) references a driver this build does not ship — typical after a + * downgrade from a fork or a feature-branch test session — the runtime + * needs to surface the entry to the UI without crashing. This module + * produces shadow snapshots that satisfy `ServerProvider`'s wire shape + * while signalling unavailability. + * + * @module unavailableProviderSnapshot + */ +import { + ProviderDriverKind, + type ProviderInstanceId, + type ServerProvider, +} from "@t3tools/contracts"; + +import { buildServerProvider } from "./providerSnapshot.ts"; + +export interface UnavailableProviderSnapshotInput { + readonly driverKind: ProviderDriverKind | string; + readonly instanceId: ProviderInstanceId; + readonly displayName?: string | undefined; + readonly accentColor?: string | undefined; + readonly reason: string; + /** + * Optional override for `checkedAt`. Defaulted to `new Date()` so callers + * (notably tests) don't have to pass it. + */ + readonly checkedAt?: string; +} + +/** + * Produce a `ServerProvider` snapshot representing a configured instance + * whose driver the running build does not implement. The result is safe + * to broadcast over the wire and is structured so the web UI can render + * a "missing driver" affordance without special-casing. + */ +export function buildUnavailableProviderSnapshot( + input: UnavailableProviderSnapshotInput, +): ServerProvider { + const checkedAt = input.checkedAt ?? new Date().toISOString(); + const displayName = input.displayName?.trim() || (input.driverKind as string); + + const base = buildServerProvider({ + presentation: { displayName }, + enabled: false, + checkedAt, + models: [], + skills: [], + probe: { + installed: false, + version: null, + status: "error", + auth: { status: "unknown" }, + message: input.reason, + }, + }); + + return { + ...base, + instanceId: input.instanceId, + ...(input.accentColor ? { accentColor: input.accentColor } : {}), + driver: + typeof input.driverKind === "string" + ? ProviderDriverKind.make(input.driverKind) + : input.driverKind, + availability: "unavailable", + unavailableReason: input.reason, + }; +} diff --git a/apps/server/src/server.test.ts b/apps/server/src/server.test.ts index 7c62395598b..b48273ff485 100644 --- a/apps/server/src/server.test.ts +++ b/apps/server/src/server.test.ts @@ -17,6 +17,8 @@ import { type OrchestrationEvent, ORCHESTRATION_WS_METHODS, ProjectId, + ProviderDriverKind, + ProviderInstanceId, ResolvedKeybindingRule, ThreadId, WS_METHODS, @@ -27,6 +29,7 @@ import { assert, it } from "@effect/vitest"; import { assertFailure, assertInclude, assertTrue } from "@effect/vitest/utils"; import { Deferred, + DateTime, Duration, Effect, FileSystem, @@ -36,6 +39,7 @@ import { Path, Stream, } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; import { FetchHttpClient, HttpBody, @@ -48,6 +52,8 @@ import { RpcClient, RpcSerialization } from "effect/unstable/rpc"; import * as Socket from "effect/unstable/socket/Socket"; import { vi } from "vitest"; +const TEST_EPOCH = DateTime.makeUnsafe("1970-01-01T00:00:00.000Z"); + import type { ServerConfigShape } from "./config.ts"; import { deriveServerPaths, ServerConfig } from "./config.ts"; import { makeRoutesLayer } from "./server.ts"; @@ -56,13 +62,7 @@ import { CheckpointDiffQuery, type CheckpointDiffQueryShape, } from "./checkpointing/Services/CheckpointDiffQuery.ts"; -import { GitCore, type GitCoreShape } from "./git/Services/GitCore.ts"; -import { GitManager, type GitManagerShape } from "./git/Services/GitManager.ts"; -import { GitStatusBroadcasterLive } from "./git/Layers/GitStatusBroadcaster.ts"; -import { - GitStatusBroadcaster, - type GitStatusBroadcasterShape, -} from "./git/Services/GitStatusBroadcaster.ts"; +import { GitManager, type GitManagerShape } from "./git/GitManager.ts"; import { Keybindings, type KeybindingsShape } from "./keybindings.ts"; import { Open, type OpenShape } from "./open.ts"; import { @@ -75,6 +75,7 @@ import { type ProjectionSnapshotQueryShape, } from "./orchestration/Services/ProjectionSnapshotQuery.ts"; import { SqlitePersistenceMemory } from "./persistence/Layers/Sqlite.ts"; +import { PersistenceSqlError } from "./persistence/Errors.ts"; import { ProviderRegistry, type ProviderRegistryShape, @@ -103,6 +104,13 @@ import { import { WorkspaceEntriesLive } from "./workspace/Layers/WorkspaceEntries.ts"; import { WorkspaceFileSystemLive } from "./workspace/Layers/WorkspaceFileSystem.ts"; import { WorkspacePathsLive } from "./workspace/Layers/WorkspacePaths.ts"; +import * as GitVcsDriver from "./vcs/GitVcsDriver.ts"; +import * as VcsDriver from "./vcs/VcsDriver.ts"; +import * as VcsStatusBroadcaster from "./vcs/VcsStatusBroadcaster.ts"; +import * as VcsDriverRegistry from "./vcs/VcsDriverRegistry.ts"; +import * as VcsProvisioningService from "./vcs/VcsProvisioningService.ts"; +import * as GitWorkflowService from "./git/GitWorkflowService.ts"; +import * as SourceControlRepositoryService from "./sourceControl/SourceControlRepositoryService.ts"; import { ServerSecretStoreLive } from "./auth/Layers/ServerSecretStore.ts"; import { ServerAuthLive } from "./auth/Layers/ServerAuth.ts"; @@ -110,7 +118,7 @@ const defaultProjectId = ProjectId.make("project-default"); const defaultThreadId = ThreadId.make("thread-default"); const defaultDesktopBootstrapToken = "test-desktop-bootstrap-token"; const defaultModelSelection = { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", } as const; const testEnvironmentDescriptor = { @@ -193,26 +201,14 @@ const makeDefaultOrchestrationThreadShell = ( }; }; -const workspaceAndProjectServicesLayer = Layer.mergeAll( - WorkspacePathsLive, - WorkspaceEntriesLive.pipe(Layer.provide(WorkspacePathsLive)), - WorkspaceFileSystemLive.pipe( - Layer.provide(WorkspacePathsLive), - Layer.provide(WorkspaceEntriesLive.pipe(Layer.provide(WorkspacePathsLive))), - ), - ProjectFaviconResolverLive, -); - const browserOtlpTracingLayer = Layer.mergeAll( FetchHttpClient.layer, OtlpSerialization.layerJson, Layer.succeed(HttpClient.TracerDisabledWhen, () => true), ); -const authTestLayer = ServerAuthLive.pipe( - Layer.provide(SqlitePersistenceMemory), - Layer.provide(ServerSecretStoreLive), -); +const makeAuthTestLayer = () => + ServerAuthLive.pipe(Layer.provide(SqlitePersistenceMemory), Layer.provide(ServerSecretStoreLive)); const makeBrowserOtlpPayload = (spanName: string) => Effect.gen(function* () { @@ -322,9 +318,12 @@ const buildAppUnderTest = (options?: { providerRegistry?: Partial; serverSettings?: Partial; open?: Partial; - gitCore?: Partial; + vcsDriver?: Partial; + vcsDriverRegistry?: Partial; + gitVcsDriver?: Partial; gitManager?: Partial; - gitStatusBroadcaster?: Partial; + sourceControlRepositoryService?: Partial; + vcsStatusBroadcaster?: Partial; projectSetupScriptRunner?: Partial; terminalManager?: Partial; orchestrationEngine?: Partial; @@ -367,25 +366,115 @@ const buildAppUnderTest = (options?: { desktopBootstrapToken: defaultDesktopBootstrapToken, autoBootstrapProjectFromCwd: false, logWebSocketEvents: false, + tailscaleServeEnabled: false, + tailscaleServePort: 443, ...options?.config, }; const layerConfig = Layer.succeed(ServerConfig, config); - const gitCoreLayer = Layer.mock(GitCore)({ + const defaultVcsDriver: VcsDriver.VcsDriverShape = { + capabilities: { + kind: "git", + supportsWorktrees: true, + supportsBookmarks: false, + supportsAtomicSnapshot: false, + supportsPushDefaultRemote: true, + ignoreClassifier: "native", + }, + execute: () => + Effect.succeed({ + exitCode: ChildProcessSpawner.ExitCode(0), + stdout: "", + stderr: "", + stdoutTruncated: false, + stderrTruncated: false, + }), + detectRepository: () => Effect.succeed(null), isInsideWorkTree: () => Effect.succeed(false), listWorkspaceFiles: () => Effect.succeed({ paths: [], truncated: false, + freshness: { + source: "live-local", + observedAt: TEST_EPOCH, + expiresAt: Option.none(), + }, + }), + listRemotes: () => + Effect.succeed({ + remotes: [], + freshness: { + source: "live-local", + observedAt: TEST_EPOCH, + expiresAt: Option.none(), + }, }), filterIgnoredPaths: (_cwd, relativePaths) => Effect.succeed(relativePaths), - ...options?.layers?.gitCore, + initRepository: () => Effect.void, + ...options?.layers?.vcsDriver, + }; + const vcsDriverRegistryLayer = Layer.mock(VcsDriverRegistry.VcsDriverRegistry)({ + get: () => Effect.succeed(defaultVcsDriver), + detect: (input) => + defaultVcsDriver.detectRepository(input.cwd).pipe( + Effect.flatMap((repository) => + repository + ? Effect.succeed(repository) + : defaultVcsDriver.isInsideWorkTree(input.cwd).pipe( + Effect.map((isInsideWorkTree) => + isInsideWorkTree + ? { + kind: "git" as const, + rootPath: input.cwd, + metadataPath: null, + freshness: { + source: "live-local" as const, + observedAt: TEST_EPOCH, + expiresAt: Option.none(), + }, + } + : null, + ), + ), + ), + Effect.map((repository) => + repository + ? ({ + kind: repository.kind, + repository, + driver: defaultVcsDriver, + } satisfies VcsDriverRegistry.VcsDriverHandle) + : null, + ), + ), + resolve: (input) => + Effect.succeed({ + kind: + input.requestedKind === "auto" || !input.requestedKind ? "git" : input.requestedKind, + repository: { + kind: + input.requestedKind === "auto" || !input.requestedKind ? "git" : input.requestedKind, + rootPath: input.cwd, + metadataPath: null, + freshness: { + source: "live-local", + observedAt: TEST_EPOCH, + expiresAt: Option.none(), + }, + }, + driver: defaultVcsDriver, + }), + ...options?.layers?.vcsDriverRegistry, + }); + const gitVcsDriverLayer = Layer.mock(GitVcsDriver.GitVcsDriver)({ + ...options?.layers?.gitVcsDriver, }); const gitManagerLayer = Layer.mock(GitManager)({ ...options?.layers?.gitManager, }); const workspaceEntriesLayer = WorkspaceEntriesLive.pipe( Layer.provide(WorkspacePathsLive), - Layer.provideMerge(gitCoreLayer), + Layer.provideMerge(vcsDriverRegistryLayer), ); const workspaceAndProjectServicesLayer = Layer.mergeAll( WorkspacePathsLive, @@ -396,11 +485,19 @@ const buildAppUnderTest = (options?: { ), ProjectFaviconResolverLive, ); - const gitStatusBroadcasterLayer = options?.layers?.gitStatusBroadcaster - ? Layer.mock(GitStatusBroadcaster)({ - ...options.layers.gitStatusBroadcaster, + const gitWorkflowLayer = GitWorkflowService.layer.pipe( + Layer.provideMerge(vcsDriverRegistryLayer), + Layer.provideMerge(gitVcsDriverLayer), + Layer.provideMerge(gitManagerLayer), + ); + const vcsProvisioningLayer = VcsProvisioningService.layer.pipe( + Layer.provide(vcsDriverRegistryLayer), + ); + const vcsStatusBroadcasterLayer = options?.layers?.vcsStatusBroadcaster + ? Layer.mock(VcsStatusBroadcaster.VcsStatusBroadcaster)({ + ...options.layers.vcsStatusBroadcaster, }) - : GitStatusBroadcasterLive.pipe(Layer.provide(gitManagerLayer)); + : VcsStatusBroadcaster.layer.pipe(Layer.provide(gitWorkflowLayer)); const servedRoutesLayer = HttpRouter.serve(makeRoutesLayer, { disableListenLog: true, @@ -439,9 +536,16 @@ const buildAppUnderTest = (options?: { ...options?.layers?.open, }), ), - Layer.provide(gitCoreLayer), Layer.provide(gitManagerLayer), - Layer.provideMerge(gitStatusBroadcasterLayer), + Layer.provide(gitVcsDriverLayer), + Layer.provide(gitWorkflowLayer), + Layer.provide(vcsProvisioningLayer), + Layer.provide( + Layer.mock(SourceControlRepositoryService.SourceControlRepositoryService)({ + ...options?.layers?.sourceControlRepositoryService, + }), + ), + Layer.provideMerge(vcsStatusBroadcasterLayer), Layer.provide( Layer.mock(ProjectSetupScriptRunner)({ runForThread: () => Effect.succeed({ status: "no-script" as const }), @@ -539,7 +643,7 @@ const buildAppUnderTest = (options?: { ...options?.layers?.repositoryIdentityResolver, }), ), - Layer.provideMerge(authTestLayer), + Layer.provideMerge(makeAuthTestLayer()), Layer.provide(workspaceAndProjectServicesLayer), Layer.provideMerge(FetchHttpClient.layer), Layer.provide(layerConfig), @@ -1845,7 +1949,8 @@ it.layer(NodeServices.layer)("server router seam", (it) => { Effect.gen(function* () { const providers = [ { - provider: "codex" as const, + instanceId: ProviderInstanceId.make("codex"), + driver: ProviderDriverKind.make("codex"), enabled: true, installed: true, version: "1.0.0", @@ -1906,7 +2011,7 @@ it.layer(NodeServices.layer)("server router seam", (it) => { assert.deepEqual(second, { version: 1, type: "keybindingsUpdated", - payload: { issues: [] }, + payload: { keybindings: [], issues: [] }, }); }).pipe(Effect.provide(NodeHttpServer.layerTest)), ); @@ -1915,7 +2020,8 @@ it.layer(NodeServices.layer)("server router seam", (it) => { Effect.gen(function* () { const nextProviders = [ { - provider: "codex" as const, + instanceId: ProviderInstanceId.make("codex"), + driver: ProviderDriverKind.make("codex"), enabled: true, installed: true, version: "1.0.0", @@ -2064,12 +2170,17 @@ it.layer(NodeServices.layer)("server router seam", (it) => { yield* buildAppUnderTest({ layers: { - gitCore: { + vcsDriver: { isInsideWorkTree: () => Effect.succeed(true), listWorkspaceFiles: () => Effect.succeed({ paths: ["src/tracked.ts"], truncated: false, + freshness: { + source: "live-local", + observedAt: TEST_EPOCH, + expiresAt: Option.none(), + }, }), filterIgnoredPaths: (_cwd, relativePaths) => Effect.succeed( @@ -2164,7 +2275,7 @@ it.layer(NodeServices.layer)("server router seam", (it) => { workspaceRoot: missingWorkspaceRoot, createWorkspaceRootIfMissing: true, defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5-codex", }, createdAt: new Date().toISOString(), @@ -2269,9 +2380,9 @@ it.layer(NodeServices.layer)("server router seam", (it) => { localStatus: () => Effect.succeed({ isRepo: true, - hasOriginRemote: true, - isDefaultBranch: true, - branch: "main", + hasPrimaryRemote: true, + isDefaultRef: true, + refName: "main", hasWorkingTreeChanges: false, workingTree: { files: [], insertions: 0, deletions: 0 }, }), @@ -2285,9 +2396,9 @@ it.layer(NodeServices.layer)("server router seam", (it) => { status: () => Effect.succeed({ isRepo: true, - hasOriginRemote: true, - isDefaultBranch: true, - branch: "main", + hasPrimaryRemote: true, + isDefaultRef: true, + refName: "main", hasWorkingTreeChanges: false, workingTree: { files: [], insertions: 0, deletions: 0 }, hasUpstream: true, @@ -2368,16 +2479,16 @@ it.layer(NodeServices.layer)("server router seam", (it) => { worktreePath: null, }), }, - gitCore: { + gitVcsDriver: { pullCurrentBranch: () => Effect.succeed({ status: "pulled", - branch: "main", - upstreamBranch: "origin/main", + refName: "main", + upstreamRef: "origin/main", }), - listBranches: () => + listRefs: () => Effect.succeed({ - branches: [ + refs: [ { name: "main", current: true, @@ -2386,18 +2497,20 @@ it.layer(NodeServices.layer)("server router seam", (it) => { }, ], isRepo: true, - hasOriginRemote: true, + hasPrimaryRemote: true, nextCursor: null, totalCount: 1, }), createWorktree: () => Effect.succeed({ - worktree: { path: "/tmp/wt", branch: "feature/demo" }, + worktree: { path: "/tmp/wt", refName: "feature/demo" }, }), removeWorktree: () => Effect.void, - createBranch: (input) => Effect.succeed({ branch: input.branch }), - checkoutBranch: (input) => Effect.succeed({ branch: input.branch }), - initRepo: () => Effect.void, + createRef: (input) => Effect.succeed({ refName: input.refName }), + switchRef: (input) => Effect.succeed({ refName: input.refName }), + }, + vcsDriver: { + isInsideWorkTree: () => Effect.succeed(true), }, }, }); @@ -2405,13 +2518,13 @@ it.layer(NodeServices.layer)("server router seam", (it) => { const wsUrl = yield* getWsServerUrl("/ws"); const pull = yield* Effect.scoped( - withWsRpcClient(wsUrl, (client) => client[WS_METHODS.gitPull]({ cwd: "/tmp/repo" })), + withWsRpcClient(wsUrl, (client) => client[WS_METHODS.vcsPull]({ cwd: "/tmp/repo" })), ); assert.equal(pull.status, "pulled"); const refreshedStatus = yield* Effect.scoped( withWsRpcClient(wsUrl, (client) => - client[WS_METHODS.gitRefreshStatus]({ cwd: "/tmp/repo" }), + client[WS_METHODS.vcsRefreshStatus]({ cwd: "/tmp/repo" }), ), ); assert.equal(refreshedStatus.isRepo, true); @@ -2455,27 +2568,25 @@ it.layer(NodeServices.layer)("server router seam", (it) => { ); assert.equal(prepared.branch, "feature/demo"); - const branches = yield* Effect.scoped( - withWsRpcClient(wsUrl, (client) => - client[WS_METHODS.gitListBranches]({ cwd: "/tmp/repo" }), - ), + const refs = yield* Effect.scoped( + withWsRpcClient(wsUrl, (client) => client[WS_METHODS.vcsListRefs]({ cwd: "/tmp/repo" })), ); - assert.equal(branches.branches[0]?.name, "main"); + assert.equal(refs.refs[0]?.name, "main"); const worktree = yield* Effect.scoped( withWsRpcClient(wsUrl, (client) => - client[WS_METHODS.gitCreateWorktree]({ + client[WS_METHODS.vcsCreateWorktree]({ cwd: "/tmp/repo", - branch: "main", + refName: "main", path: null, }), ), ); - assert.equal(worktree.worktree.branch, "feature/demo"); + assert.equal(worktree.worktree.refName, "feature/demo"); yield* Effect.scoped( withWsRpcClient(wsUrl, (client) => - client[WS_METHODS.gitRemoveWorktree]({ + client[WS_METHODS.vcsRemoveWorktree]({ cwd: "/tmp/repo", path: "/tmp/wt", }), @@ -2484,25 +2595,25 @@ it.layer(NodeServices.layer)("server router seam", (it) => { yield* Effect.scoped( withWsRpcClient(wsUrl, (client) => - client[WS_METHODS.gitCreateBranch]({ + client[WS_METHODS.vcsCreateRef]({ cwd: "/tmp/repo", - branch: "feature/new", + refName: "feature/new", }), ), ); yield* Effect.scoped( withWsRpcClient(wsUrl, (client) => - client[WS_METHODS.gitCheckout]({ + client[WS_METHODS.vcsSwitchRef]({ cwd: "/tmp/repo", - branch: "main", + refName: "main", }), ), ); yield* Effect.scoped( withWsRpcClient(wsUrl, (client) => - client[WS_METHODS.gitInit]({ + client[WS_METHODS.vcsInit]({ cwd: "/tmp/repo", }), ), @@ -2522,7 +2633,7 @@ it.layer(NodeServices.layer)("server router seam", (it) => { let statusCalls = 0; yield* buildAppUnderTest({ layers: { - gitCore: { + gitVcsDriver: { pullCurrentBranch: () => Effect.fail(gitError), }, gitManager: { @@ -2541,9 +2652,9 @@ it.layer(NodeServices.layer)("server router seam", (it) => { localStatus: () => Effect.succeed({ isRepo: true, - hasOriginRemote: true, - isDefaultBranch: true, - branch: "main", + hasPrimaryRemote: true, + isDefaultRef: true, + refName: "main", hasWorkingTreeChanges: true, workingTree: { files: [], insertions: 0, deletions: 0 }, }), @@ -2562,9 +2673,9 @@ it.layer(NodeServices.layer)("server router seam", (it) => { statusCalls += 1; return { isRepo: true, - hasOriginRemote: true, - isDefaultBranch: true, - branch: "main", + hasPrimaryRemote: true, + isDefaultRef: true, + refName: "main", hasWorkingTreeChanges: true, workingTree: { files: [], insertions: 0, deletions: 0 }, hasUpstream: true, @@ -2579,7 +2690,7 @@ it.layer(NodeServices.layer)("server router seam", (it) => { const wsUrl = yield* getWsServerUrl("/ws"); const result = yield* Effect.scoped( - withWsRpcClient(wsUrl, (client) => client[WS_METHODS.gitPull]({ cwd: "/tmp/repo" })).pipe( + withWsRpcClient(wsUrl, (client) => client[WS_METHODS.vcsPull]({ cwd: "/tmp/repo" })).pipe( Effect.result, ), ); @@ -2618,9 +2729,9 @@ it.layer(NodeServices.layer)("server router seam", (it) => { localStatus: () => Effect.succeed({ isRepo: true, - hasOriginRemote: true, - isDefaultBranch: false, - branch: "feature/demo", + hasPrimaryRemote: true, + isDefaultRef: false, + refName: "feature/demo", hasWorkingTreeChanges: true, workingTree: { files: [], insertions: 0, deletions: 0 }, }), @@ -2639,9 +2750,9 @@ it.layer(NodeServices.layer)("server router seam", (it) => { statusCalls += 1; return { isRepo: true, - hasOriginRemote: true, - isDefaultBranch: false, - branch: "feature/demo", + hasPrimaryRemote: true, + isDefaultRef: false, + refName: "feature/demo", hasWorkingTreeChanges: true, workingTree: { files: [], insertions: 0, deletions: 0 }, hasUpstream: true, @@ -2676,12 +2787,12 @@ it.layer(NodeServices.layer)("server router seam", (it) => { Effect.gen(function* () { yield* buildAppUnderTest({ layers: { - gitCore: { + gitVcsDriver: { pullCurrentBranch: () => Effect.succeed({ status: "pulled" as const, - branch: "main", - upstreamBranch: "origin/main", + refName: "main", + upstreamRef: "origin/main", }), }, gitManager: { @@ -2690,9 +2801,9 @@ it.layer(NodeServices.layer)("server router seam", (it) => { localStatus: () => Effect.succeed({ isRepo: true, - hasOriginRemote: true, - isDefaultBranch: true, - branch: "main", + hasPrimaryRemote: true, + isDefaultRef: true, + refName: "main", hasWorkingTreeChanges: false, workingTree: { files: [], insertions: 0, deletions: 0 }, }), @@ -2712,7 +2823,7 @@ it.layer(NodeServices.layer)("server router seam", (it) => { const wsUrl = yield* getWsServerUrl("/ws"); const startedAt = Date.now(); const result = yield* Effect.scoped( - withWsRpcClient(wsUrl, (client) => client[WS_METHODS.gitPull]({ cwd: "/tmp/repo" })), + withWsRpcClient(wsUrl, (client) => client[WS_METHODS.vcsPull]({ cwd: "/tmp/repo" })), ); const elapsedMs = Date.now() - startedAt; @@ -2727,15 +2838,18 @@ it.layer(NodeServices.layer)("server router seam", (it) => { Effect.gen(function* () { yield* buildAppUnderTest({ layers: { + vcsDriver: { + isInsideWorkTree: () => Effect.succeed(true), + }, gitManager: { invalidateLocalStatus: () => Effect.void, invalidateRemoteStatus: () => Effect.void, localStatus: () => Effect.succeed({ isRepo: true, - hasOriginRemote: true, - isDefaultBranch: false, - branch: "feature/demo", + hasPrimaryRemote: true, + isDefaultRef: false, + refName: "feature/demo", hasWorkingTreeChanges: false, workingTree: { files: [], insertions: 0, deletions: 0 }, }), @@ -2800,6 +2914,9 @@ it.layer(NodeServices.layer)("server router seam", (it) => { yield* buildAppUnderTest({ layers: { + vcsDriver: { + isInsideWorkTree: () => Effect.succeed(true), + }, gitManager: { invalidateLocalStatus: () => Effect.void, invalidateRemoteStatus: () => Effect.void, @@ -2809,9 +2926,9 @@ it.layer(NodeServices.layer)("server router seam", (it) => { Effect.andThen( Effect.succeed({ isRepo: true, - hasOriginRemote: true, - isDefaultBranch: false, - branch: "feature/demo", + hasPrimaryRemote: true, + isDefaultRef: false, + refName: "feature/demo", hasWorkingTreeChanges: false, workingTree: { files: [], insertions: 0, deletions: 0 }, }), @@ -2983,6 +3100,34 @@ it.layer(NodeServices.layer)("server router seam", (it) => { }).pipe(Effect.provide(NodeHttpServer.layerTest)), ); + it.effect("routes websocket rpc orchestration shell snapshot errors", () => + Effect.gen(function* () { + const projectionError = new PersistenceSqlError({ + operation: "ProjectionSnapshotQuery.getShellSnapshot:test", + detail: "failed to read projection shell snapshot", + }); + yield* buildAppUnderTest({ + layers: { + projectionSnapshotQuery: { + getShellSnapshot: () => Effect.fail(projectionError), + }, + }, + }); + + const wsUrl = yield* getWsServerUrl("/ws"); + const result = yield* Effect.scoped( + withWsRpcClient(wsUrl, (client) => + client[ORCHESTRATION_WS_METHODS.subscribeShell]({}).pipe(Stream.runCollect), + ).pipe(Effect.result), + ); + + assertTrue(result._tag === "Failure"); + assertTrue(result.failure._tag === "OrchestrationGetSnapshotError"); + assertTrue(result.failure.cause instanceof Error); + assert.include(result.failure.cause.message, projectionError.message); + }).pipe(Effect.provide(NodeHttpServer.layerTest)), + ); + it.effect("enriches replayed project events with repository identity metadata", () => Effect.gen(function* () { const repositoryIdentity = { @@ -3475,9 +3620,9 @@ it.layer(NodeServices.layer)("server router seam", (it) => { const refreshStatus = vi.fn((_: string) => Effect.succeed({ isRepo: true, - hasOriginRemote: true, - isDefaultBranch: false, - branch: "t3code/bootstrap-branch", + hasPrimaryRemote: true, + isDefaultRef: false, + refName: "t3code/bootstrap-refName", hasWorkingTreeChanges: false, workingTree: { files: [], @@ -3490,13 +3635,14 @@ it.layer(NodeServices.layer)("server router seam", (it) => { pr: null, }), ); - const createWorktree = vi.fn((_: Parameters[0]) => - Effect.succeed({ - worktree: { - branch: "t3code/bootstrap-branch", - path: "/tmp/bootstrap-worktree", - }, - }), + const createWorktree = vi.fn( + (_: Parameters[0]) => + Effect.succeed({ + worktree: { + refName: "t3code/bootstrap-refName", + path: "/tmp/bootstrap-worktree", + }, + }), ); const runForThread = vi.fn( (_: Parameters[0]) => @@ -3511,10 +3657,10 @@ it.layer(NodeServices.layer)("server router seam", (it) => { yield* buildAppUnderTest({ layers: { - gitCore: { + gitVcsDriver: { createWorktree, }, - gitStatusBroadcaster: { + vcsStatusBroadcaster: { refreshStatus, }, orchestrationEngine: { @@ -3562,7 +3708,7 @@ it.layer(NodeServices.layer)("server router seam", (it) => { prepareWorktree: { projectCwd: "/tmp/project", baseBranch: "main", - branch: "t3code/bootstrap-branch", + branch: "t3code/bootstrap-refName", }, runSetupScript: true, }, @@ -3584,8 +3730,8 @@ it.layer(NodeServices.layer)("server router seam", (it) => { ); assert.deepEqual(createWorktree.mock.calls[0]?.[0], { cwd: "/tmp/project", - branch: "main", - newBranch: "t3code/bootstrap-branch", + refName: "main", + newRefName: "t3code/bootstrap-refName", path: null, }); assert.deepEqual(runForThread.mock.calls[0]?.[0], { @@ -3615,13 +3761,14 @@ it.layer(NodeServices.layer)("server router seam", (it) => { it.effect("records setup-script failures without aborting bootstrap turn start", () => Effect.gen(function* () { const dispatchedCommands: Array = []; - const createWorktree = vi.fn((_: Parameters[0]) => - Effect.succeed({ - worktree: { - branch: "t3code/bootstrap-branch", - path: "/tmp/bootstrap-worktree", - }, - }), + const createWorktree = vi.fn( + (_: Parameters[0]) => + Effect.succeed({ + worktree: { + refName: "t3code/bootstrap-refName", + path: "/tmp/bootstrap-worktree", + }, + }), ); const runForThread = vi.fn( (_: Parameters[0]) => @@ -3630,7 +3777,7 @@ it.layer(NodeServices.layer)("server router seam", (it) => { yield* buildAppUnderTest({ layers: { - gitCore: { + gitVcsDriver: { createWorktree, }, orchestrationEngine: { @@ -3678,7 +3825,7 @@ it.layer(NodeServices.layer)("server router seam", (it) => { prepareWorktree: { projectCwd: "/tmp/project", baseBranch: "main", - branch: "t3code/bootstrap-branch", + branch: "t3code/bootstrap-refName", }, runSetupScript: true, }, @@ -3708,13 +3855,14 @@ it.layer(NodeServices.layer)("server router seam", (it) => { it.effect("does not misattribute setup activity dispatch failures as setup launch failures", () => Effect.gen(function* () { const dispatchedCommands: Array = []; - const createWorktree = vi.fn((_: Parameters[0]) => - Effect.succeed({ - worktree: { - branch: "t3code/bootstrap-branch", - path: "/tmp/bootstrap-worktree", - }, - }), + const createWorktree = vi.fn( + (_: Parameters[0]) => + Effect.succeed({ + worktree: { + refName: "t3code/bootstrap-refName", + path: "/tmp/bootstrap-worktree", + }, + }), ); const runForThread = vi.fn( (_: Parameters[0]) => @@ -3730,7 +3878,7 @@ it.layer(NodeServices.layer)("server router seam", (it) => { yield* buildAppUnderTest({ layers: { - gitCore: { + gitVcsDriver: { createWorktree, }, orchestrationEngine: { @@ -3794,7 +3942,7 @@ it.layer(NodeServices.layer)("server router seam", (it) => { prepareWorktree: { projectCwd: "/tmp/project", baseBranch: "main", - branch: "t3code/bootstrap-branch", + branch: "t3code/bootstrap-refName", }, runSetupScript: true, }, @@ -3826,13 +3974,14 @@ it.layer(NodeServices.layer)("server router seam", (it) => { it.effect("cleans up created bootstrap threads when worktree creation defects", () => Effect.gen(function* () { const dispatchedCommands: Array = []; - const createWorktree = vi.fn((_: Parameters[0]) => - Effect.die(new Error("worktree exploded")), + const createWorktree = vi.fn( + (_: Parameters[0]) => + Effect.die(new Error("worktree exploded")), ); yield* buildAppUnderTest({ layers: { - gitCore: { + gitVcsDriver: { createWorktree, }, orchestrationEngine: { @@ -3877,7 +4026,7 @@ it.layer(NodeServices.layer)("server router seam", (it) => { prepareWorktree: { projectCwd: "/tmp/project", baseBranch: "main", - branch: "t3code/bootstrap-branch", + branch: "t3code/bootstrap-refName", }, runSetupScript: false, }, diff --git a/apps/server/src/server.ts b/apps/server/src/server.ts index 9a8885ffc0f..939b2c8abf8 100644 --- a/apps/server/src/server.ts +++ b/apps/server/src/server.ts @@ -16,28 +16,23 @@ import { OpenLive } from "./open.ts"; import { layerConfig as SqlitePersistenceLayerLive } from "./persistence/Layers/Sqlite.ts"; import { ServerLifecycleEventsLive } from "./serverLifecycleEvents.ts"; import { AnalyticsServiceLayerLive } from "./telemetry/Layers/AnalyticsService.ts"; -import { makeEventNdjsonLogger } from "./provider/Layers/EventNdjsonLogger.ts"; import { ProviderSessionDirectoryLive } from "./provider/Layers/ProviderSessionDirectory.ts"; import { ProviderSessionRuntimeRepositoryLive } from "./persistence/Layers/ProviderSessionRuntime.ts"; -import { makeCodexAdapterLive } from "./provider/Layers/CodexAdapter.ts"; -import { makeClaudeAdapterLive } from "./provider/Layers/ClaudeAdapter.ts"; -import { makeCopilotAdapterLive } from "./provider/Layers/CopilotAdapter.ts"; -import { makeCursorAdapterLive } from "./provider/Layers/CursorAdapter.ts"; -import { makeGeminiCliAdapterLive } from "./provider/Layers/GeminiCliAdapter.ts"; -import { makeOpenCodeAdapterLive } from "./provider/Layers/OpenCodeAdapter.ts"; -import { makeAmpAdapterLive } from "./provider/Layers/AmpAdapter.ts"; -import { makeKiloAdapterLive } from "./provider/Layers/KiloAdapter.ts"; import { ProviderAdapterRegistryLive } from "./provider/Layers/ProviderAdapterRegistry.ts"; -import { makeProviderServiceLive } from "./provider/Layers/ProviderService.ts"; +import { ProviderEventLoggersLive } from "./provider/Layers/ProviderEventLoggers.ts"; +import { ProviderServiceLive } from "./provider/Layers/ProviderService.ts"; import { ProviderSessionReaperLive } from "./provider/Layers/ProviderSessionReaper.ts"; +import { OpenCodeRuntimeLive } from "./provider/opencodeRuntime.ts"; import { CheckpointDiffQueryLive } from "./checkpointing/Layers/CheckpointDiffQuery.ts"; import { CheckpointStoreLive } from "./checkpointing/Layers/CheckpointStore.ts"; -import { GitCoreLive } from "./git/Layers/GitCore.ts"; -import { GitHubCliLive } from "./git/Layers/GitHubCli.ts"; -import { GitStatusBroadcasterLive } from "./git/Layers/GitStatusBroadcaster.ts"; -import { RoutingTextGenerationLive } from "./git/Layers/RoutingTextGeneration.ts"; +import * as AzureDevOpsCli from "./sourceControl/AzureDevOpsCli.ts"; +import * as BitbucketApi from "./sourceControl/BitbucketApi.ts"; +import * as GitHubCli from "./sourceControl/GitHubCli.ts"; +import * as GitLabCli from "./sourceControl/GitLabCli.ts"; +import * as TextGeneration from "./textGeneration/TextGeneration.ts"; +import { ProviderInstanceRegistryHydrationLive } from "./provider/Layers/ProviderInstanceRegistryHydration.ts"; import { TerminalManagerLive } from "./terminal/Layers/Manager.ts"; -import { GitManagerLive } from "./git/Layers/GitManager.ts"; +import * as GitManager from "./git/GitManager.ts"; import { KeybindingsLive } from "./keybindings.ts"; import { ServerRuntimeStartup, ServerRuntimeStartupLive } from "./serverRuntimeStartup.ts"; import { OrchestrationReactorLive } from "./orchestration/Layers/OrchestrationReactor.ts"; @@ -53,6 +48,15 @@ import { RepositoryIdentityResolverLive } from "./project/Layers/RepositoryIdent import { WorkspaceEntriesLive } from "./workspace/Layers/WorkspaceEntries.ts"; import { WorkspaceFileSystemLive } from "./workspace/Layers/WorkspaceFileSystem.ts"; import { WorkspacePathsLive } from "./workspace/Layers/WorkspacePaths.ts"; +import * as GitVcsDriver from "./vcs/GitVcsDriver.ts"; +import * as VcsDriverRegistry from "./vcs/VcsDriverRegistry.ts"; +import * as VcsProjectConfig from "./vcs/VcsProjectConfig.ts"; +import * as VcsProcess from "./vcs/VcsProcess.ts"; +import * as VcsProvisioningService from "./vcs/VcsProvisioningService.ts"; +import * as VcsStatusBroadcaster from "./vcs/VcsStatusBroadcaster.ts"; +import * as GitWorkflowService from "./git/GitWorkflowService.ts"; +import * as SourceControlProviderRegistry from "./sourceControl/SourceControlProviderRegistry.ts"; +import * as SourceControlRepositoryService from "./sourceControl/SourceControlRepositoryService.ts"; import { ProjectSetupScriptRunnerLive } from "./project/Layers/ProjectSetupScriptRunner.ts"; import { ObservabilityLive } from "./observability/Layers/Observability.ts"; import { ServerEnvironmentLive } from "./environment/Layers/ServerEnvironment.ts"; @@ -81,6 +85,7 @@ import { orchestrationSnapshotRouteLayer, } from "./orchestration/http.ts"; import { NetService } from "@t3tools/shared/Net"; +import { disableTailscaleServe, ensureTailscaleServe } from "@t3tools/tailscale"; const PtyAdapterLive = Layer.unwrap( Effect.gen(function* () { @@ -139,80 +144,76 @@ const ReactorLayerLive = Layer.empty.pipe( Layer.provideMerge(RuntimeReceiptBusLive), ); -const CheckpointingLayerLive = Layer.empty.pipe( - Layer.provideMerge(CheckpointDiffQueryLive), - Layer.provideMerge(CheckpointStoreLive), -); - const ProviderSessionDirectoryLayerLive = ProviderSessionDirectoryLive.pipe( Layer.provide(ProviderSessionRuntimeRepositoryLive), ); -const ProviderLayerLive = Layer.unwrap( - Effect.gen(function* () { - const { providerEventLogPath } = yield* ServerConfig; - const nativeEventLogger = yield* makeEventNdjsonLogger(providerEventLogPath, { - stream: "native", - }); - const canonicalEventLogger = yield* makeEventNdjsonLogger(providerEventLogPath, { - stream: "canonical", - }); - const codexAdapterLayer = makeCodexAdapterLive( - nativeEventLogger ? { nativeEventLogger } : undefined, - ); - const claudeAdapterLayer = makeClaudeAdapterLive( - nativeEventLogger ? { nativeEventLogger } : undefined, - ); - const copilotAdapterLayer = makeCopilotAdapterLive( - nativeEventLogger ? { nativeEventLogger } : undefined, - ); - const cursorAdapterLayer = makeCursorAdapterLive( - nativeEventLogger ? { nativeEventLogger } : undefined, - ); - const geminiCliAdapterLayer = makeGeminiCliAdapterLive(); - const openCodeAdapterLayer = makeOpenCodeAdapterLive(); - const ampAdapterLayer = makeAmpAdapterLive(); - const kiloAdapterLayer = makeKiloAdapterLive(); - const adapterRegistryLayer = ProviderAdapterRegistryLive.pipe( - Layer.provide(codexAdapterLayer), - Layer.provide(claudeAdapterLayer), - Layer.provide(copilotAdapterLayer), - Layer.provide(cursorAdapterLayer), - Layer.provide(geminiCliAdapterLayer), - Layer.provide(openCodeAdapterLayer), - Layer.provide(ampAdapterLayer), - Layer.provide(kiloAdapterLayer), - Layer.provideMerge(ProviderSessionDirectoryLayerLive), - ); - return makeProviderServiceLive( - canonicalEventLogger ? { canonicalEventLogger } : undefined, - ).pipe( - Layer.provide(adapterRegistryLayer), - Layer.provideMerge(ProviderSessionDirectoryLayerLive), - ); - }), +// `ProviderAdapterRegistryLive` is now a facade that resolves kind → adapter +// by looking up the default `ProviderInstance` per driver in the instance +// registry. Adapter construction itself moved inside each driver's +// `create()`; `ProviderEventLoggersLive` owns the shared native/canonical +// NDJSON writers and is provided at the outer runtime layer so both +// `ProviderService` and the per-instance drivers read the same logger pair. +const ProviderLayerLive = ProviderServiceLive.pipe( + Layer.provide(ProviderAdapterRegistryLive), + Layer.provideMerge(ProviderSessionDirectoryLayerLive), ); const PersistenceLayerLive = Layer.empty.pipe(Layer.provideMerge(SqlitePersistenceLayerLive)); -const GitManagerLayerLive = GitManagerLive.pipe( +const VcsDriverRegistryLayerLive = VcsDriverRegistry.layer.pipe( + Layer.provide(VcsProjectConfig.layer), +); + +const SourceControlProviderRegistryLayerLive = SourceControlProviderRegistry.layer.pipe( + Layer.provide( + Layer.mergeAll(AzureDevOpsCli.layer, BitbucketApi.layer, GitHubCli.layer, GitLabCli.layer), + ), + Layer.provideMerge(GitVcsDriver.layer), + Layer.provideMerge(VcsDriverRegistryLayerLive), +); + +const GitManagerLayerLive = GitManager.layer.pipe( Layer.provideMerge(ProjectSetupScriptRunnerLive), - Layer.provideMerge(GitCoreLive), - Layer.provideMerge(GitHubCliLive), - Layer.provideMerge(RoutingTextGenerationLive), + Layer.provideMerge(GitVcsDriver.layer), + Layer.provideMerge(SourceControlProviderRegistryLayerLive), + Layer.provideMerge(TextGeneration.layer), ); const GitLayerLive = Layer.empty.pipe( Layer.provideMerge(GitManagerLayerLive), - Layer.provideMerge(GitStatusBroadcasterLive.pipe(Layer.provide(GitManagerLayerLive))), - Layer.provideMerge(GitCoreLive), + Layer.provideMerge(GitVcsDriver.layer), +); + +const GitWorkflowLayerLive = GitWorkflowService.layer.pipe( + Layer.provideMerge(VcsDriverRegistryLayerLive), + Layer.provideMerge(GitLayerLive), +); + +const SourceControlRepositoryServiceLayerLive = SourceControlRepositoryService.layer.pipe( + Layer.provideMerge(GitVcsDriver.layer), + Layer.provideMerge(SourceControlProviderRegistryLayerLive), +); + +const VcsLayerLive = Layer.empty.pipe( + Layer.provideMerge(VcsProjectConfig.layer), + Layer.provideMerge(VcsDriverRegistryLayerLive), + Layer.provideMerge(VcsProvisioningService.layer.pipe(Layer.provide(VcsDriverRegistryLayerLive))), + Layer.provideMerge(GitWorkflowLayerLive), + Layer.provideMerge(SourceControlRepositoryServiceLayerLive), + Layer.provideMerge(VcsStatusBroadcaster.layer.pipe(Layer.provide(GitWorkflowLayerLive))), +); + +const CheckpointingLayerLive = Layer.empty.pipe( + Layer.provideMerge(CheckpointDiffQueryLive), + Layer.provideMerge(CheckpointStoreLive.pipe(Layer.provide(VcsDriverRegistryLayerLive))), ); const TerminalLayerLive = TerminalManagerLive.pipe(Layer.provide(PtyAdapterLive)); const WorkspaceEntriesLayerLive = WorkspaceEntriesLive.pipe( Layer.provide(WorkspacePathsLive), - Layer.provideMerge(GitCoreLive), + Layer.provideMerge(VcsDriverRegistryLayerLive), ); const WorkspaceFileSystemLayerLive = WorkspaceFileSystemLive.pipe( @@ -236,22 +237,44 @@ const ProviderRuntimeLayerLive = ProviderSessionReaperLive.pipe( Layer.provideMerge(OrchestrationLayerLive), ); -const RuntimeDependenciesLive = ReactorLayerLive.pipe( +const RuntimeCoreDependenciesLive = ReactorLayerLive.pipe( // Core Services Layer.provideMerge(CheckpointingLayerLive), + Layer.provideMerge(SourceControlProviderRegistryLayerLive), Layer.provideMerge(GitLayerLive), + Layer.provideMerge(VcsLayerLive), Layer.provideMerge(ProviderRuntimeLayerLive), Layer.provideMerge(TerminalLayerLive), Layer.provideMerge(PersistenceLayerLive), Layer.provideMerge(KeybindingsLive), Layer.provideMerge(ProviderRegistryLive), + // The instance registry is the new routing keystone — text generation, + // adapter lookup, and runtime ingestion all resolve `ProviderInstanceId` + // through this layer. Built-in drivers come from `BUILT_IN_DRIVERS`; + // `providerInstances` hydration merges `settings.providers.` + // with explicit `providerInstances` entries on boot. + Layer.provideMerge(ProviderInstanceRegistryHydrationLive), + // Shared native/canonical NDJSON writers used by both the per-instance + // drivers (native stream, written from inside each `Adapter`) and + // `ProviderService` (canonical stream, written after event normalization). + // Provided once at the runtime level so every consumer sees the same + // logger instances. + Layer.provideMerge(ProviderEventLoggersLive), + // `OpenCodeDriver.create()` yields `OpenCodeRuntime`; previously the old + // `ProviderRegistryLive` pulled `OpenCodeRuntimeLive` in for itself, but + // the rewritten registry reads snapshots off the instance registry and + // no longer transitively provides it. Exposing it at the runtime level + // keeps a single Live for all opencode consumers. + Layer.provideMerge(OpenCodeRuntimeLive), Layer.provideMerge(ServerSettingsLive), Layer.provideMerge(WorkspaceLayerLive), Layer.provideMerge(ProjectFaviconResolverLive), Layer.provideMerge(RepositoryIdentityResolverLive), Layer.provideMerge(ServerEnvironmentLive), Layer.provideMerge(AuthLayerLive), +); +const RuntimeDependenciesLive = RuntimeCoreDependenciesLive.pipe( // Misc. Layer.provideMerge(AnalyticsServiceLayerLive), Layer.provideMerge(OpenLive), @@ -318,6 +341,57 @@ export const makeServerLayer = Layer.unwrap( () => clearPersistedServerRuntimeState(config.serverRuntimeStatePath), ), ); + const tailscaleServeLayer = config.tailscaleServeEnabled + ? Layer.effectDiscard( + Effect.acquireRelease( + Effect.gen(function* () { + const server = yield* HttpServer.HttpServer; + const address = server.address; + if (typeof address === "string" || !("port" in address)) { + return null; + } + + const localPort = address.port; + return yield* ensureTailscaleServe({ + localPort, + servePort: config.tailscaleServePort, + localHost: "127.0.0.1", + }).pipe( + Effect.as({ localPort, servePort: config.tailscaleServePort }), + Effect.tap(() => + Effect.logInfo("Tailscale Serve configured", { + localPort, + servePort: config.tailscaleServePort, + }), + ), + Effect.catch((cause) => + Effect.logWarning("Failed to configure Tailscale Serve", { + cause, + localPort, + servePort: config.tailscaleServePort, + }).pipe(Effect.as(null)), + ), + ); + }), + (configured) => + configured + ? disableTailscaleServe({ servePort: configured.servePort }).pipe( + Effect.tap(() => + Effect.logInfo("Tailscale Serve disabled", { + servePort: configured.servePort, + }), + ), + Effect.catch((cause) => + Effect.logWarning("Failed to disable Tailscale Serve", { + cause, + servePort: configured.servePort, + }), + ), + ) + : Effect.void, + ), + ) + : Layer.empty; const serverApplicationLayer = Layer.mergeAll( HttpRouter.serve(makeRoutesLayer, { @@ -325,6 +399,7 @@ export const makeServerLayer = Layer.unwrap( }), httpListeningLayer, runtimeStateLayer, + tailscaleServeLayer, ); return serverApplicationLayer.pipe( @@ -332,6 +407,7 @@ export const makeServerLayer = Layer.unwrap( Layer.provideMerge(HttpServerLive), Layer.provide(ObservabilityLive), Layer.provideMerge(FetchHttpClient.layer), + Layer.provideMerge(VcsProcess.layer), Layer.provideMerge(PlatformServicesLive), ); }), diff --git a/apps/server/src/serverRuntimeStartup.test.ts b/apps/server/src/serverRuntimeStartup.test.ts index 836b71c7eb4..91b4b215c10 100644 --- a/apps/server/src/serverRuntimeStartup.test.ts +++ b/apps/server/src/serverRuntimeStartup.test.ts @@ -1,5 +1,5 @@ import * as NodeServices from "@effect/platform-node/NodeServices"; -import { DEFAULT_MODEL_BY_PROVIDER, ProjectId, ThreadId } from "@t3tools/contracts"; +import { DEFAULT_MODEL, ProjectId, ProviderInstanceId, ThreadId } from "@t3tools/contracts"; import { assert, it } from "@effect/vitest"; import { Deferred, Effect, Fiber, Option, Ref, Stream } from "effect"; @@ -21,8 +21,8 @@ import { it("uses the canonical Codex default for auto-bootstrapped model selection", () => { assert.deepStrictEqual(getAutoBootstrapDefaultModelSelection(), { - provider: "codex", - model: DEFAULT_MODEL_BY_PROVIDER.codex, + instanceId: ProviderInstanceId.make("codex"), + model: DEFAULT_MODEL, }); }); diff --git a/apps/server/src/serverRuntimeStartup.ts b/apps/server/src/serverRuntimeStartup.ts index 99728f681f4..1f164860a6f 100644 --- a/apps/server/src/serverRuntimeStartup.ts +++ b/apps/server/src/serverRuntimeStartup.ts @@ -1,9 +1,10 @@ import { CommandId, - DEFAULT_MODEL_BY_PROVIDER, + DEFAULT_MODEL, DEFAULT_PROVIDER_INTERACTION_MODE, type ModelSelection, ProjectId, + ProviderInstanceId, ThreadId, } from "@t3tools/contracts"; import { @@ -154,8 +155,8 @@ export const launchStartupHeartbeat = recordStartupHeartbeat.pipe( ); export const getAutoBootstrapDefaultModelSelection = (): ModelSelection => ({ - provider: "codex", - model: DEFAULT_MODEL_BY_PROVIDER.codex, + instanceId: ProviderInstanceId.make("codex"), + model: DEFAULT_MODEL, }); export const resolveWelcomeBase = Effect.gen(function* () { diff --git a/apps/server/src/serverSettings.test.ts b/apps/server/src/serverSettings.test.ts index f9e0542f9a6..f11c5bf4519 100644 --- a/apps/server/src/serverSettings.test.ts +++ b/apps/server/src/serverSettings.test.ts @@ -1,5 +1,12 @@ import * as NodeServices from "@effect/platform-node/NodeServices"; -import { DEFAULT_SERVER_SETTINGS, ServerSettingsPatch } from "@t3tools/contracts"; +import { + DEFAULT_SERVER_SETTINGS, + ProviderDriverKind, + ProviderInstanceId, + ServerSettings, + ServerSettingsPatch, +} from "@t3tools/contracts"; +import { createModelSelection } from "@t3tools/shared/model"; import { assert, it } from "@effect/vitest"; import { Effect, FileSystem, Layer, Schema } from "effect"; import { ServerConfig } from "./config.ts"; @@ -24,44 +31,44 @@ it.layer(NodeServices.layer)("server settings", (it) => { assert.deepEqual(decodePatch({ providers: { codex: { binaryPath: "/tmp/codex" } } }), { providers: { codex: { binaryPath: "/tmp/codex" } }, }); - assert.deepEqual( - decodePatch({ - providers: { - copilot: { - binaryPath: "/tmp/copilot", - configDir: "/tmp/copilot-config", - }, - }, - }), - { - providers: { - copilot: { - binaryPath: "/tmp/copilot", - configDir: "/tmp/copilot-config", - }, - }, - }, - ); assert.deepEqual( decodePatch({ textGenerationModelSelection: { - options: { - fastMode: false, - }, + options: [{ id: "fastMode", value: false }], }, }), { textGenerationModelSelection: { - options: { - fastMode: false, - }, + options: [{ id: "fastMode", value: false }], }, }, ); }), ); + it.effect( + "decodes legacy object-shaped textGenerationModelSelection.options from settings.json", + () => + Effect.sync(() => { + const decode = Schema.decodeUnknownSync(ServerSettings); + + const decoded = decode({ + textGenerationModelSelection: { + provider: ProviderDriverKind.make("codex"), + model: "gpt-5.4-mini", + options: { reasoningEffort: "low" }, + }, + }); + + assert.deepEqual(decoded.textGenerationModelSelection, { + instanceId: ProviderInstanceId.make("codex"), + model: "gpt-5.4-mini", + options: [{ id: "reasoningEffort", value: "low" }], + }); + }), + ); + it.effect("deep merges nested settings updates without dropping siblings", () => Effect.gen(function* () { const serverSettings = yield* ServerSettingsService; @@ -78,12 +85,16 @@ it.layer(NodeServices.layer)("server settings", (it) => { }, }, textGenerationModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: DEFAULT_SERVER_SETTINGS.textGenerationModelSelection.model, - options: { - reasoningEffort: "high", - fastMode: true, - }, + options: createModelSelection( + ProviderInstanceId.make("codex"), + DEFAULT_SERVER_SETTINGS.textGenerationModelSelection.model, + [ + { id: "reasoningEffort", value: "high" }, + { id: "fastMode", value: true }, + ], + ).options!, }, }); @@ -94,9 +105,7 @@ it.layer(NodeServices.layer)("server settings", (it) => { }, }, textGenerationModelSelection: { - options: { - fastMode: false, - }, + options: [{ id: "fastMode", value: false }], }, }); @@ -104,22 +113,27 @@ it.layer(NodeServices.layer)("server settings", (it) => { enabled: true, binaryPath: "/opt/homebrew/bin/codex", homePath: "/Users/julius/.codex", + shadowHomePath: "", customModels: [], }); assert.deepEqual(next.providers.claudeAgent, { enabled: true, binaryPath: "/usr/local/bin/claude", + homePath: "", customModels: ["claude-custom"], launchArgs: "", }); - assert.deepEqual(next.textGenerationModelSelection, { - provider: "codex", - model: DEFAULT_SERVER_SETTINGS.textGenerationModelSelection.model, - options: { - reasoningEffort: "high", - fastMode: false, - }, - }); + assert.deepEqual( + next.textGenerationModelSelection, + createModelSelection( + ProviderInstanceId.make("codex"), + DEFAULT_SERVER_SETTINGS.textGenerationModelSelection.model, + [ + { id: "reasoningEffort", value: "high" }, + { id: "fastMode", value: false }, + ], + ), + ); }).pipe(Effect.provide(makeServerSettingsLayer())), ); @@ -130,11 +144,13 @@ it.layer(NodeServices.layer)("server settings", (it) => { // Start with Claude text generation selection yield* serverSettings.updateSettings({ textGenerationModelSelection: { - provider: "claudeAgent", + instanceId: ProviderInstanceId.make("claudeAgent"), model: "claude-sonnet-4-6", - options: { - effort: "high", - }, + options: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-sonnet-4-6", + [{ id: "effort", value: "high" }], + ).options!, }, }); @@ -142,21 +158,104 @@ it.layer(NodeServices.layer)("server settings", (it) => { // cause the update to lose the selected model. const next = yield* serverSettings.updateSettings({ textGenerationModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.4", - options: { - reasoningEffort: "high", + options: createModelSelection(ProviderInstanceId.make("codex"), "gpt-5.4", [ + { id: "reasoningEffort", value: "high" }, + ]).options!, + }, + }); + + assert.deepEqual( + next.textGenerationModelSelection, + createModelSelection(ProviderInstanceId.make("codex"), "gpt-5.4", [ + { id: "reasoningEffort", value: "high" }, + ]), + ); + }).pipe(Effect.provide(makeServerSettingsLayer())), + ); + + it.effect("preserves custom provider instance text generation selections", () => + Effect.gen(function* () { + const serverSettings = yield* ServerSettingsService; + + const next = yield* serverSettings.updateSettings({ + providerInstances: { + [ProviderInstanceId.make("claude_openrouter")]: { + driver: ProviderDriverKind.make("claudeAgent"), + enabled: true, + config: { customModels: ["openai/gpt-5.5"] }, }, }, + textGenerationModelSelection: { + instanceId: ProviderInstanceId.make("claude_openrouter"), + model: "openai/gpt-5.5", + }, }); assert.deepEqual(next.textGenerationModelSelection, { - provider: "codex", - model: "gpt-5.4", - options: { - reasoningEffort: "high", + instanceId: ProviderInstanceId.make("claude_openrouter"), + model: "openai/gpt-5.5", + }); + }).pipe(Effect.provide(makeServerSettingsLayer())), + ); + + it.effect( + "uses explicit provider instance enabled state over legacy provider enabled state", + () => + Effect.gen(function* () { + const serverSettings = yield* ServerSettingsService; + const instanceId = ProviderInstanceId.make("claude_openrouter"); + + const next = yield* serverSettings.updateSettings({ + providers: { + claudeAgent: { + enabled: false, + }, + }, + providerInstances: { + [instanceId]: { + driver: ProviderDriverKind.make("claudeAgent"), + enabled: true, + config: { customModels: ["openai/gpt-5.5"] }, + }, + }, + textGenerationModelSelection: { + instanceId, + model: "openai/gpt-5.5", + }, + }); + + assert.deepEqual(next.textGenerationModelSelection, { + instanceId, + model: "openai/gpt-5.5", + }); + }).pipe(Effect.provide(makeServerSettingsLayer())), + ); + + it.effect("preserves enabled text generation selections for non-built-in drivers", () => + Effect.gen(function* () { + const serverSettings = yield* ServerSettingsService; + const instanceId = ProviderInstanceId.make("openrouter_text"); + + const next = yield* serverSettings.updateSettings({ + providerInstances: { + [instanceId]: { + driver: ProviderDriverKind.make("openrouter"), + enabled: true, + config: { customModels: ["openai/gpt-5.5"] }, + }, + }, + textGenerationModelSelection: { + instanceId, + model: "openai/gpt-5.5", }, }); + + assert.deepEqual(next.textGenerationModelSelection, { + instanceId, + model: "openai/gpt-5.5", + }); }).pipe(Effect.provide(makeServerSettingsLayer())), ); @@ -166,29 +265,70 @@ it.layer(NodeServices.layer)("server settings", (it) => { yield* serverSettings.updateSettings({ textGenerationModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: DEFAULT_SERVER_SETTINGS.textGenerationModelSelection.model, - options: { - reasoningEffort: "high", - fastMode: true, - }, + options: createModelSelection( + ProviderInstanceId.make("codex"), + DEFAULT_SERVER_SETTINGS.textGenerationModelSelection.model, + [ + { id: "reasoningEffort", value: "high" }, + { id: "fastMode", value: true }, + ], + ).options!, }, }); const next = yield* serverSettings.updateSettings({ textGenerationModelSelection: { - provider: DEFAULT_SERVER_SETTINGS.textGenerationModelSelection.provider, + instanceId: DEFAULT_SERVER_SETTINGS.textGenerationModelSelection.instanceId, model: DEFAULT_SERVER_SETTINGS.textGenerationModelSelection.model, }, }); assert.deepEqual(next.textGenerationModelSelection, { - provider: DEFAULT_SERVER_SETTINGS.textGenerationModelSelection.provider, + instanceId: DEFAULT_SERVER_SETTINGS.textGenerationModelSelection.instanceId, model: DEFAULT_SERVER_SETTINGS.textGenerationModelSelection.model, }); }).pipe(Effect.provide(makeServerSettingsLayer())), ); + it.effect("replaces provider instance maps when clearing optional fields", () => + Effect.gen(function* () { + const serverSettings = yield* ServerSettingsService; + const codexId = ProviderInstanceId.make("codex"); + + yield* serverSettings.updateSettings({ + providerInstances: { + [codexId]: { + driver: ProviderDriverKind.make("codex"), + displayName: "Codex Work", + accentColor: "#7c3aed", + enabled: true, + config: { homePath: "~/.codex" }, + }, + }, + }); + + const next = yield* serverSettings.updateSettings({ + providerInstances: { + [codexId]: { + driver: ProviderDriverKind.make("codex"), + displayName: "Codex Work", + enabled: true, + config: { homePath: "~/.codex" }, + }, + }, + }); + + assert.deepEqual(next.providerInstances[codexId], { + driver: ProviderDriverKind.make("codex"), + displayName: "Codex Work", + enabled: true, + config: { homePath: "~/.codex" }, + }); + }).pipe(Effect.provide(makeServerSettingsLayer())), + ); + it.effect("trims provider path settings when updates are applied", () => Effect.gen(function* () { const serverSettings = yield* ServerSettingsService; @@ -202,14 +342,10 @@ it.layer(NodeServices.layer)("server settings", (it) => { claudeAgent: { binaryPath: " /opt/homebrew/bin/claude ", }, - copilot: { - binaryPath: " /opt/homebrew/bin/copilot ", - configDir: " /Users/julius/.config/copilot ", - }, opencode: { binaryPath: " /opt/homebrew/bin/opencode ", - serverUrl: " http://localhost:1234 ", - serverPassword: " s3cret ", + serverUrl: " http://127.0.0.1:4096 ", + serverPassword: " secret-password ", }, }, }); @@ -218,25 +354,21 @@ it.layer(NodeServices.layer)("server settings", (it) => { enabled: true, binaryPath: "/opt/homebrew/bin/codex", homePath: "", + shadowHomePath: "", customModels: [], }); assert.deepEqual(next.providers.claudeAgent, { enabled: true, binaryPath: "/opt/homebrew/bin/claude", + homePath: "", customModels: [], launchArgs: "", }); - assert.deepEqual(next.providers.copilot, { - enabled: true, - binaryPath: "/opt/homebrew/bin/copilot", - configDir: "/Users/julius/.config/copilot", - customModels: [], - }); assert.deepEqual(next.providers.opencode, { enabled: true, binaryPath: "/opt/homebrew/bin/opencode", - serverUrl: "http://localhost:1234", - serverPassword: "s3cret", + serverUrl: "http://127.0.0.1:4096", + serverPassword: "secret-password", customModels: [], }); }).pipe(Effect.provide(makeServerSettingsLayer())), @@ -298,7 +430,8 @@ it.layer(NodeServices.layer)("server settings", (it) => { binaryPath: "/opt/homebrew/bin/codex", }, opencode: { - serverUrl: "http://localhost:1234", + serverUrl: "http://127.0.0.1:4096", + serverPassword: "secret-password", }, }, }); @@ -317,10 +450,74 @@ it.layer(NodeServices.layer)("server settings", (it) => { binaryPath: "/opt/homebrew/bin/codex", }, opencode: { - serverUrl: "http://localhost:1234", + serverUrl: "http://127.0.0.1:4096", + serverPassword: "secret-password", }, }, }); }).pipe(Effect.provide(makeServerSettingsLayer())), ); + + it.effect("stores sensitive provider instance environment values outside settings.json", () => + Effect.gen(function* () { + const serverSettings = yield* ServerSettingsService; + const serverConfig = yield* ServerConfig; + const fileSystem = yield* FileSystem.FileSystem; + const instanceId = ProviderInstanceId.make("codex_personal"); + + const next = yield* serverSettings.updateSettings({ + providerInstances: { + [instanceId]: { + driver: ProviderDriverKind.make("codex"), + environment: [ + { name: "OPENROUTER_API_KEY", value: "sk-or-secret", sensitive: true }, + { name: "ANTHROPIC_BASE_URL", value: "https://openrouter.ai/api", sensitive: false }, + ], + config: {}, + }, + }, + }); + + assert.deepEqual(next.providerInstances[instanceId]?.environment, [ + { + name: "OPENROUTER_API_KEY", + value: "sk-or-secret", + sensitive: true, + valueRedacted: true, + }, + { name: "ANTHROPIC_BASE_URL", value: "https://openrouter.ai/api", sensitive: false }, + ]); + + const raw = yield* fileSystem.readFileString(serverConfig.settingsPath); + assert.notInclude(raw, "sk-or-secret"); + assert.deepEqual(JSON.parse(raw).providerInstances.codex_personal.environment, [ + { + name: "OPENROUTER_API_KEY", + value: "", + sensitive: true, + valueRedacted: true, + }, + { name: "ANTHROPIC_BASE_URL", value: "https://openrouter.ai/api", sensitive: false }, + ]); + + const roundTripped = yield* serverSettings.updateSettings({ + providerInstances: { + [instanceId]: { + driver: ProviderDriverKind.make("codex"), + displayName: "Codex Personal", + environment: [ + { name: "OPENROUTER_API_KEY", value: "", sensitive: true, valueRedacted: true }, + { name: "ANTHROPIC_BASE_URL", value: "https://openrouter.ai/api", sensitive: false }, + ], + config: {}, + }, + }, + }); + + assert.equal( + roundTripped.providerInstances[instanceId]?.environment?.[0]?.value, + "sk-or-secret", + ); + }).pipe(Effect.provide(makeServerSettingsLayer())), + ); }); diff --git a/apps/server/src/serverSettings.ts b/apps/server/src/serverSettings.ts index acda7be294f..2b9d8a52951 100644 --- a/apps/server/src/serverSettings.ts +++ b/apps/server/src/serverSettings.ts @@ -11,10 +11,15 @@ * @module ServerSettings */ import { + DEFAULT_GIT_TEXT_GENERATION_MODEL, DEFAULT_GIT_TEXT_GENERATION_MODEL_BY_PROVIDER, DEFAULT_SERVER_SETTINGS, + isProviderDriverKind, type ModelSelection, - type ProviderKind, + type ProviderInstanceConfig, + type ProviderInstanceEnvironmentVariable, + ProviderDriverKind, + ProviderInstanceId, ServerSettings, ServerSettingsError, type ServerSettingsPatch, @@ -44,6 +49,47 @@ import { ServerConfig } from "./config.ts"; import { type DeepPartial, deepMerge } from "@t3tools/shared/Struct"; import { fromLenientJson } from "@t3tools/shared/schemaJson"; import { applyServerSettingsPatch } from "@t3tools/shared/serverSettings"; +import { ServerSecretStoreLive } from "./auth/Layers/ServerSecretStore.ts"; +import { ServerSecretStore } from "./auth/Services/ServerSecretStore.ts"; + +const textEncoder = new TextEncoder(); +const textDecoder = new TextDecoder(); + +function providerEnvironmentSecretName(input: { + readonly instanceId: string; + readonly name: string; +}): string { + return `provider-env-${Buffer.from(input.instanceId, "utf8").toString("base64url")}-${Buffer.from(input.name, "utf8").toString("base64url")}`; +} + +function redactProviderEnvironmentVariable( + variable: ProviderInstanceEnvironmentVariable, +): ProviderInstanceEnvironmentVariable { + if (!variable.sensitive) { + const { valueRedacted: _omit, ...rest } = variable; + return rest; + } + return { + ...variable, + value: "", + ...(variable.value.length > 0 || variable.valueRedacted ? { valueRedacted: true } : {}), + }; +} + +export function redactServerSettingsForClient(settings: ServerSettings): ServerSettings { + const providerInstances = Object.fromEntries( + Object.entries(settings.providerInstances).map(([instanceId, instance]) => [ + instanceId, + instance.environment + ? { + ...instance, + environment: instance.environment.map(redactProviderEnvironmentVariable), + } + : instance, + ]), + ); + return { ...settings, providerInstances }; +} export interface ServerSettingsShape { /** Start the settings runtime and attach file watching. */ @@ -106,16 +152,13 @@ export class ServerSettingsService extends Context.Service< const ServerSettingsJson = fromLenientJson(ServerSettings); -const PROVIDER_ORDER: readonly ProviderKind[] = [ - "codex", - "claudeAgent", - "copilot", - "cursor", - "opencode", - "geminiCli", - "amp", - "kilo", -]; +type LegacyProviderSettings = ServerSettings["providers"][keyof ServerSettings["providers"]]; + +const getLegacyProviderSettings = ( + settings: ServerSettings, + provider: ProviderDriverKind, +): LegacyProviderSettings | undefined => + (settings.providers as Record)[provider]; /** * Ensure the `textGenerationModelSelection` points to an enabled provider. @@ -125,22 +168,36 @@ const PROVIDER_ORDER: readonly ProviderKind[] = [ */ function resolveTextGenerationProvider(settings: ServerSettings): ServerSettings { const selection = settings.textGenerationModelSelection; - if (settings.providers[selection.provider].enabled) { + const instanceConfig = settings.providerInstances[selection.instanceId]; + if (instanceConfig !== undefined) { + return (instanceConfig.enabled ?? true) ? settings : fallbackTextGenerationProvider(settings); + } + + if ( + isProviderDriverKind(selection.instanceId) && + getLegacyProviderSettings(settings, selection.instanceId)?.enabled + ) { return settings; } - const fallback = PROVIDER_ORDER.find((p) => settings.providers[p].enabled); + return fallbackTextGenerationProvider(settings); +} + +function fallbackTextGenerationProvider(settings: ServerSettings): ServerSettings { + const fallbackEntry = Object.entries(settings.providers).find(([, provider]) => provider.enabled); + const fallback = fallbackEntry ? ProviderDriverKind.make(fallbackEntry[0]) : undefined; if (!fallback) { - // No providers enabled — return as-is; callers will report the error. return settings; } return { ...settings, textGenerationModelSelection: { - provider: fallback, - model: DEFAULT_GIT_TEXT_GENERATION_MODEL_BY_PROVIDER[fallback], - } as ModelSelection, + instanceId: ProviderInstanceId.make(fallback), + model: + DEFAULT_GIT_TEXT_GENERATION_MODEL_BY_PROVIDER[fallback] ?? + DEFAULT_GIT_TEXT_GENERATION_MODEL, + } satisfies ModelSelection, }; } @@ -185,6 +242,7 @@ const makeServerSettings = Effect.gen(function* () { const { settingsPath } = yield* ServerConfig; const fs = yield* FileSystem.FileSystem; const pathService = yield* Path.Path; + const secretStore = yield* ServerSecretStore; const writeSemaphore = yield* Semaphore.make(1); const cacheKey = "settings" as const; const changesPubSub = yield* PubSub.unbounded(); @@ -242,6 +300,138 @@ const makeServerSettings = Effect.gen(function* () { const getSettingsFromCache = Cache.get(settingsCache, cacheKey); + const toSettingsError = (detail: string, cause: unknown) => + new ServerSettingsError({ + settingsPath, + detail, + cause, + }); + + const materializeProviderEnvironmentSecrets = ( + settings: ServerSettings, + ): Effect.Effect => + Effect.gen(function* () { + const providerInstances: Record = { + ...settings.providerInstances, + }; + for (const [instanceId, instance] of Object.entries(settings.providerInstances)) { + if (!instance.environment) continue; + const environment: ProviderInstanceEnvironmentVariable[] = []; + for (const variable of instance.environment) { + if (!variable.sensitive || !variable.valueRedacted) { + environment.push(variable); + continue; + } + const secret = yield* secretStore + .get(providerEnvironmentSecretName({ instanceId, name: variable.name })) + .pipe( + Effect.mapError((cause) => + toSettingsError( + `failed to read sensitive environment variable ${variable.name}`, + cause, + ), + ), + ); + environment.push({ + ...variable, + value: secret ? textDecoder.decode(secret) : "", + }); + } + providerInstances[instanceId] = { + ...instance, + environment, + } satisfies ProviderInstanceConfig; + } + return { + ...settings, + providerInstances: providerInstances as ServerSettings["providerInstances"], + }; + }); + + const persistProviderEnvironmentSecrets = ( + current: ServerSettings, + next: ServerSettings, + ): Effect.Effect => + Effect.gen(function* () { + const providerInstances: Record = { + ...next.providerInstances, + }; + + const nextSecretKeys = new Set(); + for (const [instanceId, instance] of Object.entries(next.providerInstances)) { + if (!instance.environment) continue; + const environment: ProviderInstanceEnvironmentVariable[] = []; + for (const variable of instance.environment) { + const secretName = providerEnvironmentSecretName({ instanceId, name: variable.name }); + if (!variable.sensitive) { + yield* secretStore + .remove(secretName) + .pipe( + Effect.mapError((cause) => + toSettingsError(`failed to remove environment secret ${variable.name}`, cause), + ), + ); + environment.push(redactProviderEnvironmentVariable(variable)); + continue; + } + + nextSecretKeys.add(secretName); + if (!variable.valueRedacted) { + if (variable.value.length > 0) { + yield* secretStore + .set(secretName, textEncoder.encode(variable.value)) + .pipe( + Effect.mapError((cause) => + toSettingsError(`failed to persist environment secret ${variable.name}`, cause), + ), + ); + environment.push({ ...variable, value: "", valueRedacted: true }); + } else { + yield* secretStore + .remove(secretName) + .pipe( + Effect.mapError((cause) => + toSettingsError(`failed to remove environment secret ${variable.name}`, cause), + ), + ); + const { valueRedacted: _omit, ...rest } = variable; + environment.push(rest); + } + continue; + } + + environment.push(redactProviderEnvironmentVariable(variable)); + } + providerInstances[instanceId] = { + ...instance, + environment, + } satisfies ProviderInstanceConfig; + } + + for (const [instanceId, instance] of Object.entries(current.providerInstances)) { + for (const variable of instance.environment ?? []) { + if (!variable.sensitive) continue; + const secretName = providerEnvironmentSecretName({ instanceId, name: variable.name }); + if (nextSecretKeys.has(secretName)) continue; + yield* secretStore + .remove(secretName) + .pipe( + Effect.mapError((cause) => + toSettingsError( + `failed to remove stale environment secret ${variable.name}`, + cause, + ), + ), + ); + } + } + + return { + ...next, + providerInstances: providerInstances as ServerSettings["providerInstances"], + }; + }); + const writeSettingsAtomically = (settings: ServerSettings) => { const sparseSettings = stripDefaultServerSettings(settings, DEFAULT_SERVER_SETTINGS) ?? {}; @@ -333,14 +523,19 @@ const makeServerSettings = Effect.gen(function* () { return { start, ready: Deferred.await(startedDeferred), - getSettings: getSettingsFromCache.pipe(Effect.map(resolveTextGenerationProvider)), + getSettings: getSettingsFromCache.pipe( + Effect.flatMap(materializeProviderEnvironmentSecrets), + Effect.map(resolveTextGenerationProvider), + ), updateSettings: (patch) => writeSemaphore.withPermits(1)( Effect.gen(function* () { const current = yield* getSettingsFromCache; - const next = yield* Schema.decodeEffect(ServerSettings)( + const nextPersisted = yield* persistProviderEnvironmentSecrets( + current, applyServerSettingsPatch(current, patch), - ).pipe( + ); + const next = yield* Schema.decodeEffect(ServerSettings)(nextPersisted).pipe( Effect.mapError( (cause) => new ServerSettingsError({ @@ -353,13 +548,27 @@ const makeServerSettings = Effect.gen(function* () { yield* writeSettingsAtomically(next); yield* Cache.set(settingsCache, cacheKey, next); yield* emitChange(next); - return resolveTextGenerationProvider(next); + const materialized = yield* materializeProviderEnvironmentSecrets(next); + return resolveTextGenerationProvider(materialized); }), ), get streamChanges() { - return Stream.fromPubSub(changesPubSub).pipe(Stream.map(resolveTextGenerationProvider)); + return Stream.fromPubSub(changesPubSub).pipe( + Stream.mapEffect((settings) => + materializeProviderEnvironmentSecrets(settings).pipe( + Effect.catch((error: ServerSettingsError) => + Effect.logWarning("failed to materialize provider environment secrets", { + detail: error.detail, + }).pipe(Effect.as(settings)), + ), + ), + ), + Stream.map(resolveTextGenerationProvider), + ); }, } satisfies ServerSettingsShape; }); -export const ServerSettingsLive = Layer.effect(ServerSettingsService, makeServerSettings); +export const ServerSettingsLive = Layer.effect(ServerSettingsService, makeServerSettings).pipe( + Layer.provide(ServerSecretStoreLive), +); diff --git a/apps/server/src/sourceControl/AzureDevOpsCli.test.ts b/apps/server/src/sourceControl/AzureDevOpsCli.test.ts new file mode 100644 index 00000000000..406c9772f34 --- /dev/null +++ b/apps/server/src/sourceControl/AzureDevOpsCli.test.ts @@ -0,0 +1,288 @@ +import * as NodeServices from "@effect/platform-node/NodeServices"; +import { assert, it, afterEach, describe, expect, vi } from "@effect/vitest"; +import { Effect, FileSystem, Layer, Option } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; +import type { VcsError } from "@t3tools/contracts"; + +import * as VcsProcess from "../vcs/VcsProcess.ts"; +import * as AzureDevOpsCli from "./AzureDevOpsCli.ts"; + +const processOutput = (stdout: string): VcsProcess.VcsProcessOutput => ({ + exitCode: ChildProcessSpawner.ExitCode(0), + stdout, + stderr: "", + stdoutTruncated: false, + stderrTruncated: false, +}); + +const mockRun = vi.fn(); + +const supportLayer = Layer.mergeAll( + Layer.mock(VcsProcess.VcsProcess)({ + run: mockRun, + }), + NodeServices.layer, +); +const layer = Layer.mergeAll(AzureDevOpsCli.layer.pipe(Layer.provide(supportLayer)), supportLayer); + +afterEach(() => { + mockRun.mockReset(); +}); + +describe("AzureDevOpsCli.layer", () => { + it.effect("parses pull request view output", () => + Effect.gen(function* () { + mockRun.mockReturnValueOnce( + Effect.succeed( + processOutput( + JSON.stringify({ + pullRequestId: 42, + title: "Add Azure provider", + sourceRefName: "refs/heads/feature/source-control", + targetRefName: "refs/heads/main", + status: "active", + creationDate: "2026-01-02T00:00:00.000Z", + closedDate: null, + _links: { + web: { + href: "https://dev.azure.com/acme/project/_git/repo/pullrequest/42", + }, + }, + }), + ), + ), + ); + + const az = yield* AzureDevOpsCli.AzureDevOpsCli; + const result = yield* az.getPullRequest({ + cwd: "/repo", + reference: "#42", + }); + + assert.strictEqual(result.number, 42); + assert.strictEqual(result.title, "Add Azure provider"); + assert.strictEqual(result.baseRefName, "main"); + assert.strictEqual(result.headRefName, "feature/source-control"); + assert.strictEqual(result.state, "open"); + assert.deepStrictEqual(result.updatedAt._tag, Option.some(1)._tag); + assert.deepStrictEqual(mockRun.mock.calls.at(-1)?.[0], { + operation: "AzureDevOpsCli.execute", + command: "az", + args: [ + "repos", + "pr", + "show", + "--detect", + "true", + "--id", + "42", + "--only-show-errors", + "--output", + "json", + ], + cwd: "/repo", + timeoutMs: 30_000, + }); + }).pipe(Effect.provide(layer)), + ); + + it.effect("lists pull requests with Azure status and source branch arguments", () => + Effect.gen(function* () { + mockRun.mockReturnValueOnce( + Effect.succeed( + processOutput( + JSON.stringify([ + { + pullRequestId: 7, + title: "Merged work", + sourceRefName: "refs/heads/feature/merged", + targetRefName: "refs/heads/main", + status: "completed", + closedDate: "2026-01-03T00:00:00.000Z", + _links: { + web: { + href: "https://dev.azure.com/acme/project/_git/repo/pullrequest/7", + }, + }, + }, + ]), + ), + ), + ); + + const az = yield* AzureDevOpsCli.AzureDevOpsCli; + const result = yield* az.listPullRequests({ + cwd: "/repo", + headSelector: "origin:feature/merged", + state: "merged", + limit: 10, + }); + + assert.strictEqual(result[0]?.state, "merged"); + expect(mockRun).toHaveBeenCalledWith({ + operation: "AzureDevOpsCli.execute", + command: "az", + args: [ + "repos", + "pr", + "list", + "--detect", + "true", + "--source-branch", + "feature/merged", + "--status", + "completed", + "--top", + "10", + "--only-show-errors", + "--output", + "json", + ], + cwd: "/repo", + timeoutMs: 30_000, + }); + }).pipe(Effect.provide(layer)), + ); + + it.effect("reads repository clone URLs", () => + Effect.gen(function* () { + mockRun.mockReturnValueOnce( + Effect.succeed( + processOutput( + JSON.stringify({ + name: "repo", + webUrl: "https://dev.azure.com/acme/project/_git/repo", + remoteUrl: "https://dev.azure.com/acme/project/_git/repo", + sshUrl: "git@ssh.dev.azure.com:v3/acme/project/repo", + project: { + name: "project", + }, + }), + ), + ), + ); + + const az = yield* AzureDevOpsCli.AzureDevOpsCli; + const result = yield* az.getRepositoryCloneUrls({ + cwd: "/repo", + repository: "repo", + }); + + assert.deepStrictEqual(result, { + nameWithOwner: "project/repo", + url: "https://dev.azure.com/acme/project/_git/repo", + sshUrl: "git@ssh.dev.azure.com:v3/acme/project/repo", + }); + }).pipe(Effect.provide(layer)), + ); + + it.effect("creates repositories through Azure Repos", () => + Effect.gen(function* () { + mockRun.mockReturnValueOnce( + Effect.succeed( + processOutput( + JSON.stringify({ + name: "repo", + webUrl: "https://dev.azure.com/acme/project/_git/repo", + remoteUrl: "https://dev.azure.com/acme/project/_git/repo", + sshUrl: "git@ssh.dev.azure.com:v3/acme/project/repo", + project: { + name: "project", + }, + }), + ), + ), + ); + + const az = yield* AzureDevOpsCli.AzureDevOpsCli; + const result = yield* az.createRepository({ + cwd: "/repo", + repository: "project/repo", + visibility: "private", + }); + + assert.deepStrictEqual(result, { + nameWithOwner: "project/repo", + url: "https://dev.azure.com/acme/project/_git/repo", + sshUrl: "git@ssh.dev.azure.com:v3/acme/project/repo", + }); + expect(mockRun).toHaveBeenCalledWith({ + operation: "AzureDevOpsCli.execute", + command: "az", + args: [ + "repos", + "create", + "--detect", + "true", + "--name", + "repo", + "--project", + "project", + "--only-show-errors", + "--output", + "json", + ], + cwd: "/repo", + timeoutMs: 30_000, + }); + }).pipe(Effect.provide(layer)), + ); + + it.effect("creates pull requests using the body file as the Azure description", () => + Effect.gen(function* () { + const fileSystem = yield* FileSystem.FileSystem; + const bodyFile = `/tmp/t3code-azure-devops-cli-${Date.now()}.md`; + yield* fileSystem.writeFileString(bodyFile, "Generated body"); + mockRun.mockReturnValueOnce(Effect.succeed(processOutput("{}"))); + + const az = yield* AzureDevOpsCli.AzureDevOpsCli; + yield* az.createPullRequest({ + cwd: "/repo", + baseBranch: "main", + headSelector: "feature/provider", + title: "Provider PR", + bodyFile, + }); + + expect(mockRun).toHaveBeenCalledWith( + expect.objectContaining({ + command: "az", + cwd: "/repo", + args: expect.arrayContaining(["--description", `@${bodyFile}`]), + }), + ); + expect(mockRun.mock.calls[0]?.[0].args).not.toContain("--output"); + }).pipe(Effect.provide(layer)), + ); + + it.effect("does not force JSON output on checkout side-effect commands", () => + Effect.gen(function* () { + mockRun.mockReturnValueOnce(Effect.succeed(processOutput(""))); + + const az = yield* AzureDevOpsCli.AzureDevOpsCli; + yield* az.checkoutPullRequest({ + cwd: "/repo", + reference: "42", + }); + + expect(mockRun).toHaveBeenCalledWith({ + operation: "AzureDevOpsCli.execute", + command: "az", + args: [ + "repos", + "pr", + "checkout", + "--only-show-errors", + "--detect", + "true", + "--id", + "42", + "--remote-name", + "origin", + ], + cwd: "/repo", + timeoutMs: 30_000, + }); + }).pipe(Effect.provide(layer)), + ); +}); diff --git a/apps/server/src/sourceControl/AzureDevOpsCli.ts b/apps/server/src/sourceControl/AzureDevOpsCli.ts new file mode 100644 index 00000000000..375bfc1a54b --- /dev/null +++ b/apps/server/src/sourceControl/AzureDevOpsCli.ts @@ -0,0 +1,429 @@ +import { Context, Effect, Layer, Result, Schema, SchemaIssue } from "effect"; +import { + TrimmedNonEmptyString, + type SourceControlRepositoryVisibility, + type VcsError, +} from "@t3tools/contracts"; + +import * as VcsProcess from "../vcs/VcsProcess.ts"; +import * as AzureDevOpsPullRequests from "./azureDevOpsPullRequests.ts"; +import * as SourceControlProvider from "./SourceControlProvider.ts"; + +const DEFAULT_TIMEOUT_MS = 30_000; + +export class AzureDevOpsCliError extends Schema.TaggedErrorClass()( + "AzureDevOpsCliError", + { + operation: Schema.String, + detail: Schema.String, + cause: Schema.optional(Schema.Defect), + }, +) { + override get message(): string { + return `Azure DevOps CLI failed in ${this.operation}: ${this.detail}`; + } +} + +export interface AzureDevOpsRepositoryCloneUrls { + readonly nameWithOwner: string; + readonly url: string; + readonly sshUrl: string; +} + +export interface AzureDevOpsCliShape { + readonly execute: (input: { + readonly cwd: string; + readonly args: ReadonlyArray; + readonly timeoutMs?: number; + }) => Effect.Effect; + + readonly listPullRequests: (input: { + readonly cwd: string; + readonly headSelector: string; + readonly source?: SourceControlProvider.SourceControlRefSelector; + readonly state: "open" | "closed" | "merged" | "all"; + readonly limit?: number; + }) => Effect.Effect< + ReadonlyArray, + AzureDevOpsCliError + >; + + readonly getPullRequest: (input: { + readonly cwd: string; + readonly reference: string; + }) => Effect.Effect< + AzureDevOpsPullRequests.NormalizedAzureDevOpsPullRequestRecord, + AzureDevOpsCliError + >; + + readonly getRepositoryCloneUrls: (input: { + readonly cwd: string; + readonly repository: string; + }) => Effect.Effect; + + readonly createRepository: (input: { + readonly cwd: string; + readonly repository: string; + readonly visibility: SourceControlRepositoryVisibility; + }) => Effect.Effect; + + readonly createPullRequest: (input: { + readonly cwd: string; + readonly baseBranch: string; + readonly headSelector: string; + readonly source?: SourceControlProvider.SourceControlRefSelector; + readonly target?: SourceControlProvider.SourceControlRefSelector; + readonly title: string; + readonly bodyFile: string; + }) => Effect.Effect; + + readonly getDefaultBranch: (input: { + readonly cwd: string; + }) => Effect.Effect; + + readonly checkoutPullRequest: (input: { + readonly cwd: string; + readonly reference: string; + readonly remoteName?: string; + }) => Effect.Effect; +} + +export class AzureDevOpsCli extends Context.Service()( + "t3/source-control/AzureDevOpsCli", +) {} + +function errorText(error: VcsError | unknown): string { + if (typeof error === "object" && error !== null) { + const tag = "_tag" in error && typeof error._tag === "string" ? error._tag : ""; + const detail = "detail" in error && typeof error.detail === "string" ? error.detail : ""; + const message = "message" in error && typeof error.message === "string" ? error.message : ""; + return [tag, detail, message].filter(Boolean).join("\n"); + } + + return String(error); +} + +function normalizeAzureDevOpsCliError( + operation: "execute", + error: VcsError | unknown, +): AzureDevOpsCliError { + const text = errorText(error); + const lower = text.toLowerCase(); + + if (lower.includes("command not found: az") || lower.includes("enoent")) { + return new AzureDevOpsCliError({ + operation, + detail: + "Azure CLI (`az`) with the Azure DevOps extension is required but not available on PATH.", + cause: error, + }); + } + + if ( + lower.includes("az devops login") || + lower.includes("please run az login") || + lower.includes("not logged in") || + lower.includes("authentication failed") || + lower.includes("unauthorized") + ) { + return new AzureDevOpsCliError({ + operation, + detail: "Azure DevOps CLI is not authenticated. Run `az devops login` and retry.", + cause: error, + }); + } + + if ( + lower.includes("pull request") && + (lower.includes("not found") || lower.includes("does not exist")) + ) { + return new AzureDevOpsCliError({ + operation, + detail: "Pull request not found. Check the PR number or URL and try again.", + cause: error, + }); + } + + return new AzureDevOpsCliError({ + operation, + detail: text, + cause: error, + }); +} + +function normalizeChangeRequestId(reference: string): string { + const trimmed = reference.trim().replace(/^#/, ""); + const urlMatch = /(?:pullrequest|pull-request|pull|_pulls?)\/(\d+)(?:\D.*)?$/i.exec(trimmed); + return urlMatch?.[1] ?? trimmed; +} + +function toAzureStatus(state: "open" | "closed" | "merged" | "all"): string { + switch (state) { + case "open": + return "active"; + case "closed": + return "abandoned"; + case "merged": + return "completed"; + case "all": + return "all"; + } +} + +const RawAzureDevOpsRepositorySchema = Schema.Struct({ + name: TrimmedNonEmptyString, + webUrl: TrimmedNonEmptyString, + remoteUrl: TrimmedNonEmptyString, + sshUrl: TrimmedNonEmptyString, + project: Schema.optional( + Schema.Struct({ + name: TrimmedNonEmptyString, + }), + ), + defaultBranch: Schema.optional(Schema.NullOr(Schema.String)), +}); + +function normalizeDefaultBranch(value: string | null | undefined): string | null { + const trimmed = value?.trim().replace(/^refs\/heads\//, "") ?? ""; + return trimmed.length > 0 ? trimmed : null; +} + +function normalizeRepositoryCloneUrls( + raw: Schema.Schema.Type, +): AzureDevOpsRepositoryCloneUrls { + const projectName = raw.project?.name.trim(); + return { + nameWithOwner: projectName ? `${projectName}/${raw.name}` : raw.name, + url: raw.remoteUrl, + sshUrl: raw.sshUrl, + }; +} + +function parseRepositorySpecifier(repository: string): { + readonly project: string | null; + readonly name: string; +} { + const parts = repository + .split("/") + .map((part) => part.trim()) + .filter((part) => part.length > 0); + return { + project: parts.length > 1 ? (parts.at(-2) ?? null) : null, + name: parts.at(-1) ?? repository.trim(), + }; +} + +function decodeAzureDevOpsJson( + raw: string, + schema: S, + operation: "getRepositoryCloneUrls" | "getDefaultBranch" | "createRepository", + invalidDetail: string, +): Effect.Effect { + return Schema.decodeEffect(Schema.fromJsonString(schema))(raw).pipe( + Effect.mapError( + (error) => + new AzureDevOpsCliError({ + operation, + detail: `${invalidDetail}: ${SchemaIssue.makeFormatterDefault()(error.issue)}`, + cause: error, + }), + ), + ); +} + +export const make = Effect.fn("makeAzureDevOpsCli")(function* () { + const process = yield* VcsProcess.VcsProcess; + + const execute: AzureDevOpsCliShape["execute"] = (input) => + process + .run({ + operation: "AzureDevOpsCli.execute", + command: "az", + args: input.args, + cwd: input.cwd, + timeoutMs: input.timeoutMs ?? DEFAULT_TIMEOUT_MS, + }) + .pipe(Effect.mapError((error) => normalizeAzureDevOpsCliError("execute", error))); + + const executeJson = (input: Parameters[0]) => + execute({ + ...input, + args: [...input.args, "--only-show-errors", "--output", "json"], + }); + + return AzureDevOpsCli.of({ + execute, + listPullRequests: (input) => + executeJson({ + cwd: input.cwd, + args: [ + "repos", + "pr", + "list", + "--detect", + "true", + "--source-branch", + SourceControlProvider.sourceBranch(input), + "--status", + toAzureStatus(input.state), + "--top", + String(input.limit ?? 20), + ], + }).pipe( + Effect.map((result) => result.stdout.trim()), + Effect.flatMap((raw) => + raw.length === 0 + ? Effect.succeed([]) + : Effect.sync(() => + AzureDevOpsPullRequests.decodeAzureDevOpsPullRequestListJson(raw), + ).pipe( + Effect.flatMap((decoded) => { + if (!Result.isSuccess(decoded)) { + return Effect.fail( + new AzureDevOpsCliError({ + operation: "listPullRequests", + detail: `Azure DevOps CLI returned invalid PR list JSON: ${AzureDevOpsPullRequests.formatAzureDevOpsJsonDecodeError(decoded.failure)}`, + cause: decoded.failure, + }), + ); + } + + return Effect.succeed(decoded.success); + }), + ), + ), + ), + getPullRequest: (input) => + executeJson({ + cwd: input.cwd, + args: [ + "repos", + "pr", + "show", + "--detect", + "true", + "--id", + normalizeChangeRequestId(input.reference), + ], + }).pipe( + Effect.map((result) => result.stdout.trim()), + Effect.flatMap((raw) => + Effect.sync(() => AzureDevOpsPullRequests.decodeAzureDevOpsPullRequestJson(raw)).pipe( + Effect.flatMap((decoded) => { + if (!Result.isSuccess(decoded)) { + return Effect.fail( + new AzureDevOpsCliError({ + operation: "getPullRequest", + detail: `Azure DevOps CLI returned invalid pull request JSON: ${AzureDevOpsPullRequests.formatAzureDevOpsJsonDecodeError(decoded.failure)}`, + cause: decoded.failure, + }), + ); + } + + return Effect.succeed(decoded.success); + }), + ), + ), + ), + getRepositoryCloneUrls: (input) => + executeJson({ + cwd: input.cwd, + args: ["repos", "show", "--detect", "true", "--repository", input.repository], + }).pipe( + Effect.map((result) => result.stdout.trim()), + Effect.flatMap((raw) => + decodeAzureDevOpsJson( + raw, + RawAzureDevOpsRepositorySchema, + "getRepositoryCloneUrls", + "Azure DevOps CLI returned invalid repository JSON.", + ), + ), + Effect.map(normalizeRepositoryCloneUrls), + ), + createRepository: (input) => { + const repository = parseRepositorySpecifier(input.repository); + // Azure Repos access is governed by project/organization permissions. + // `az repos create` does not expose a per-repository visibility flag, so + // the generic source-control visibility input is intentionally not + // translated into CLI args for this provider. + return executeJson({ + cwd: input.cwd, + args: [ + "repos", + "create", + "--detect", + "true", + "--name", + repository.name, + ...(repository.project ? ["--project", repository.project] : []), + ], + }).pipe( + Effect.map((result) => result.stdout.trim()), + Effect.flatMap((raw) => + decodeAzureDevOpsJson( + raw, + RawAzureDevOpsRepositorySchema, + "createRepository", + "Azure DevOps CLI returned invalid repository JSON.", + ), + ), + Effect.map(normalizeRepositoryCloneUrls), + ); + }, + createPullRequest: (input) => + execute({ + cwd: input.cwd, + args: [ + "repos", + "pr", + "create", + "--only-show-errors", + "--detect", + "true", + "--target-branch", + input.target?.refName ?? input.baseBranch, + "--source-branch", + SourceControlProvider.sourceBranch(input), + "--title", + input.title, + "--description", + `@${input.bodyFile}`, + ], + }).pipe(Effect.asVoid), + getDefaultBranch: (input) => + executeJson({ + cwd: input.cwd, + args: ["repos", "show", "--detect", "true"], + }).pipe( + Effect.map((result) => result.stdout.trim()), + Effect.flatMap((raw) => + decodeAzureDevOpsJson( + raw, + RawAzureDevOpsRepositorySchema, + "getDefaultBranch", + "Azure DevOps CLI returned invalid repository JSON.", + ), + ), + Effect.map((repo) => normalizeDefaultBranch(repo.defaultBranch)), + ), + checkoutPullRequest: (input) => + execute({ + cwd: input.cwd, + args: [ + "repos", + "pr", + "checkout", + "--only-show-errors", + "--detect", + "true", + "--id", + normalizeChangeRequestId(input.reference), + "--remote-name", + input.remoteName ?? "origin", + ], + }).pipe(Effect.asVoid), + }); +}); + +export const layer = Layer.effect(AzureDevOpsCli, make()); diff --git a/apps/server/src/sourceControl/AzureDevOpsSourceControlProvider.test.ts b/apps/server/src/sourceControl/AzureDevOpsSourceControlProvider.test.ts new file mode 100644 index 00000000000..c46bc0ee7fc --- /dev/null +++ b/apps/server/src/sourceControl/AzureDevOpsSourceControlProvider.test.ts @@ -0,0 +1,91 @@ +import { assert, it } from "@effect/vitest"; +import { Effect, Layer, Option } from "effect"; + +import * as AzureDevOpsCli from "./AzureDevOpsCli.ts"; +import * as AzureDevOpsSourceControlProvider from "./AzureDevOpsSourceControlProvider.ts"; + +function makeProvider(azure: Partial) { + return AzureDevOpsSourceControlProvider.make().pipe( + Effect.provide(Layer.mock(AzureDevOpsCli.AzureDevOpsCli)(azure)), + ); +} + +it.effect("maps Azure DevOps PR summaries into provider-neutral change requests", () => + Effect.gen(function* () { + const provider = yield* makeProvider({ + getPullRequest: () => + Effect.succeed({ + number: 42, + title: "Add Azure provider", + url: "https://dev.azure.com/acme/project/_git/repo/pullrequest/42", + baseRefName: "main", + headRefName: "feature/source-control", + state: "open", + updatedAt: Option.none(), + }), + }); + + const changeRequest = yield* provider.getChangeRequest({ + cwd: "/repo", + reference: "42", + }); + + assert.deepStrictEqual(changeRequest, { + provider: "azure-devops", + number: 42, + title: "Add Azure provider", + url: "https://dev.azure.com/acme/project/_git/repo/pullrequest/42", + baseRefName: "main", + headRefName: "feature/source-control", + state: "open", + updatedAt: Option.none(), + isCrossRepository: false, + }); + }), +); + +it.effect("creates Azure DevOps PRs through provider-neutral input names", () => + Effect.gen(function* () { + let createInput: Parameters[0] | null = + null; + const provider = yield* makeProvider({ + createPullRequest: (input) => { + createInput = input; + return Effect.void; + }, + }); + + yield* provider.createChangeRequest({ + cwd: "/repo", + baseRefName: "main", + headSelector: "feature/provider", + title: "Provider PR", + bodyFile: "/tmp/body.md", + }); + + assert.deepStrictEqual(createInput, { + cwd: "/repo", + baseBranch: "main", + headSelector: "feature/provider", + title: "Provider PR", + bodyFile: "/tmp/body.md", + }); + }), +); + +it.effect("uses Azure CLI repository detection for default branch lookup", () => + Effect.gen(function* () { + let cwdInput: string | null = null; + const provider = yield* makeProvider({ + getDefaultBranch: (input) => { + cwdInput = input.cwd; + return Effect.succeed("main"); + }, + }); + + const defaultBranch = yield* provider.getDefaultBranch({ cwd: "/repo" }); + + assert.strictEqual(defaultBranch, "main"); + assert.strictEqual(cwdInput, "/repo"); + }), +); diff --git a/apps/server/src/sourceControl/AzureDevOpsSourceControlProvider.ts b/apps/server/src/sourceControl/AzureDevOpsSourceControlProvider.ts new file mode 100644 index 00000000000..cd4162a934e --- /dev/null +++ b/apps/server/src/sourceControl/AzureDevOpsSourceControlProvider.ts @@ -0,0 +1,144 @@ +import { Effect, Layer } from "effect"; +import { SourceControlProviderError, type ChangeRequest } from "@t3tools/contracts"; + +import * as AzureDevOpsCli from "./AzureDevOpsCli.ts"; +import * as SourceControlProvider from "./SourceControlProvider.ts"; +import * as SourceControlProviderDiscovery from "./SourceControlProviderDiscovery.ts"; + +function providerError( + operation: string, + cause: AzureDevOpsCli.AzureDevOpsCliError, +): SourceControlProviderError { + return new SourceControlProviderError({ + provider: "azure-devops", + operation, + detail: cause.detail, + cause, + }); +} + +function parseAzureAuth(input: SourceControlProviderDiscovery.SourceControlAuthProbeInput) { + const account = input.stdout.trim().split(/\r?\n/)[0]?.trim(); + + if (input.exitCode !== 0) { + return SourceControlProviderDiscovery.providerAuth({ + status: "unauthenticated", + detail: + SourceControlProviderDiscovery.firstSafeAuthLine( + SourceControlProviderDiscovery.combinedAuthOutput(input), + ) ?? "Run `az login` to authenticate Azure CLI.", + }); + } + + if (account && account.length > 0) { + return SourceControlProviderDiscovery.providerAuth({ + status: "authenticated", + account, + host: "dev.azure.com", + }); + } + + return SourceControlProviderDiscovery.providerAuth({ + status: "unknown", + host: "dev.azure.com", + detail: "Azure CLI account status could not be parsed.", + }); +} + +export const discovery = { + type: "cli", + kind: "azure-devops", + label: "Azure DevOps", + executable: "az", + versionArgs: ["--version"], + authArgs: ["account", "show", "--query", "user.name", "-o", "tsv"], + parseAuth: parseAzureAuth, + installHint: + "Install the Azure command-line tools (`az`), then enable Azure DevOps support with `az extension add --name azure-devops`.", +} satisfies SourceControlProviderDiscovery.SourceControlCliDiscoverySpec; + +function toChangeRequest(summary: { + readonly number: number; + readonly title: string; + readonly url: string; + readonly baseRefName: string; + readonly headRefName: string; + readonly state: "open" | "closed" | "merged"; + readonly updatedAt: ChangeRequest["updatedAt"]; +}): ChangeRequest { + return { + provider: "azure-devops", + number: summary.number, + title: summary.title, + url: summary.url, + baseRefName: summary.baseRefName, + headRefName: summary.headRefName, + state: summary.state, + updatedAt: summary.updatedAt, + isCrossRepository: false, + }; +} + +export const make = Effect.fn("makeAzureDevOpsSourceControlProvider")(function* () { + const azure = yield* AzureDevOpsCli.AzureDevOpsCli; + + return SourceControlProvider.SourceControlProvider.of({ + kind: "azure-devops", + listChangeRequests: (input) => { + const source = SourceControlProvider.sourceControlRefFromInput(input); + return azure + .listPullRequests({ + cwd: input.cwd, + headSelector: input.headSelector, + ...(source ? { source } : {}), + state: input.state, + ...(input.limit !== undefined ? { limit: input.limit } : {}), + }) + .pipe( + Effect.map((items) => items.map(toChangeRequest)), + Effect.mapError((error) => providerError("listChangeRequests", error)), + ); + }, + getChangeRequest: (input) => + azure.getPullRequest(input).pipe( + Effect.map(toChangeRequest), + Effect.mapError((error) => providerError("getChangeRequest", error)), + ), + createChangeRequest: (input) => { + const source = SourceControlProvider.sourceControlRefFromInput(input); + return azure + .createPullRequest({ + cwd: input.cwd, + baseBranch: input.baseRefName, + headSelector: input.headSelector, + ...(source ? { source } : {}), + ...(input.target ? { target: input.target } : {}), + title: input.title, + bodyFile: input.bodyFile, + }) + .pipe(Effect.mapError((error) => providerError("createChangeRequest", error))); + }, + getRepositoryCloneUrls: (input) => + azure + .getRepositoryCloneUrls(input) + .pipe(Effect.mapError((error) => providerError("getRepositoryCloneUrls", error))), + createRepository: (input) => + azure + .createRepository(input) + .pipe(Effect.mapError((error) => providerError("createRepository", error))), + getDefaultBranch: (input) => + azure + .getDefaultBranch({ cwd: input.cwd }) + .pipe(Effect.mapError((error) => providerError("getDefaultBranch", error))), + checkoutChangeRequest: (input) => + azure + .checkoutPullRequest({ + cwd: input.cwd, + reference: input.reference, + ...(input.context ? { remoteName: input.context.remoteName } : {}), + }) + .pipe(Effect.mapError((error) => providerError("checkoutChangeRequest", error))), + }); +}); + +export const layer = Layer.effect(SourceControlProvider.SourceControlProvider, make()); diff --git a/apps/server/src/sourceControl/BitbucketApi.test.ts b/apps/server/src/sourceControl/BitbucketApi.test.ts new file mode 100644 index 00000000000..5542ea40921 --- /dev/null +++ b/apps/server/src/sourceControl/BitbucketApi.test.ts @@ -0,0 +1,513 @@ +import { assert, it, vi } from "@effect/vitest"; +import * as NodeServices from "@effect/platform-node/NodeServices"; +import { ConfigProvider, DateTime, Effect, FileSystem, Layer, Option } from "effect"; +import { HttpClient, HttpClientRequest, HttpClientResponse } from "effect/unstable/http"; + +import * as BitbucketApi from "./BitbucketApi.ts"; +import * as GitVcsDriver from "../vcs/GitVcsDriver.ts"; +import * as VcsDriverRegistry from "../vcs/VcsDriverRegistry.ts"; +import type * as VcsDriver from "../vcs/VcsDriver.ts"; + +const bitbucketPullRequest = { + id: 42, + title: "Add Bitbucket provider", + state: "OPEN", + updated_on: "2026-01-02T00:00:00.000Z", + links: { + html: { + href: "https://bitbucket.org/pingdotgg/t3code/pull-requests/42", + }, + }, + source: { + branch: { name: "feature/source-control" }, + repository: { + full_name: "octocat/t3code", + workspace: { slug: "octocat" }, + }, + }, + destination: { + branch: { name: "main" }, + repository: { + full_name: "pingdotgg/t3code", + workspace: { slug: "pingdotgg" }, + }, + }, +}; + +const repositoryJson = { + full_name: "pingdotgg/t3code", + links: { + html: { href: "https://bitbucket.org/pingdotgg/t3code" }, + clone: [ + { name: "https", href: "https://bitbucket.org/pingdotgg/t3code.git" }, + { name: "ssh", href: "git@bitbucket.org:pingdotgg/t3code.git" }, + ], + }, + mainbranch: { name: "main" }, +}; + +function makeLayer(input: { + readonly response: (request: HttpClientRequest.HttpClientRequest) => Response; + readonly git?: Partial; +}) { + const execute = vi.fn((request: HttpClientRequest.HttpClientRequest) => + Effect.succeed(HttpClientResponse.fromWeb(request, input.response(request))), + ); + const gitMock = { + readConfigValue: vi.fn(() => + Effect.succeed("git@bitbucket.org:pingdotgg/t3code.git"), + ), + resolvePrimaryRemoteName: vi.fn( + () => Effect.succeed("origin"), + ), + ensureRemote: vi.fn(() => + Effect.succeed("octocat"), + ), + fetchRemoteBranch: vi.fn( + () => Effect.void, + ), + fetchRemoteTrackingBranch: vi.fn( + () => Effect.void, + ), + setBranchUpstream: vi.fn( + () => Effect.void, + ), + switchRef: vi.fn((request) => + Effect.succeed({ refName: request.refName }), + ), + listLocalBranchNames: vi.fn(() => + Effect.succeed([]), + ), + }; + const git = { + ...gitMock, + ...input.git, + } satisfies Partial; + + const driver = { + listRemotes: () => + Effect.succeed({ + remotes: [ + { + name: "origin", + url: "git@bitbucket.org:pingdotgg/t3code.git", + pushUrl: Option.none(), + isPrimary: true, + }, + ], + freshness: { + source: "live-local" as const, + observedAt: DateTime.makeUnsafe("1970-01-01T00:00:00.000Z"), + expiresAt: Option.none(), + }, + }), + } satisfies Partial; + + const layer = BitbucketApi.layer.pipe( + Layer.provide( + Layer.succeed( + HttpClient.HttpClient, + HttpClient.make((request) => execute(request)), + ), + ), + Layer.provide( + Layer.mock(VcsDriverRegistry.VcsDriverRegistry)({ + resolve: () => + Effect.succeed({ + kind: "git", + repository: { + kind: "git", + rootPath: "/repo", + metadataPath: null, + freshness: { + source: "live-local" as const, + observedAt: DateTime.makeUnsafe("1970-01-01T00:00:00.000Z"), + expiresAt: Option.none(), + }, + }, + driver: driver as unknown as VcsDriver.VcsDriverShape, + }), + }), + ), + Layer.provide(Layer.mock(GitVcsDriver.GitVcsDriver)(git)), + Layer.provide( + ConfigProvider.layer( + ConfigProvider.fromEnv({ + env: { + T3CODE_BITBUCKET_API_BASE_URL: "https://api.test.local/2.0", + T3CODE_BITBUCKET_EMAIL: "user@example.com", + T3CODE_BITBUCKET_API_TOKEN: "token", + }, + }), + ), + ), + Layer.provideMerge(NodeServices.layer), + ); + + return { execute, git: gitMock, layer }; +} + +it.effect("parses pull request responses from the Bitbucket REST API", () => { + const { execute, layer } = makeLayer({ + response: () => + Response.json({ + ...bitbucketPullRequest, + }), + }); + + return Effect.gen(function* () { + const bitbucket = yield* BitbucketApi.BitbucketApi; + const result = yield* bitbucket.getPullRequest({ + cwd: "/repo", + reference: "#42", + }); + + assert.deepStrictEqual(result, { + number: 42, + title: "Add Bitbucket provider", + url: "https://bitbucket.org/pingdotgg/t3code/pull-requests/42", + baseRefName: "main", + headRefName: "feature/source-control", + state: "open", + updatedAt: Option.some(DateTime.makeUnsafe("2026-01-02T00:00:00.000Z")), + isCrossRepository: true, + headRepositoryNameWithOwner: "octocat/t3code", + headRepositoryOwnerLogin: "octocat", + }); + assert.strictEqual( + execute.mock.calls[0]?.[0].url, + "https://api.test.local/2.0/repositories/pingdotgg/t3code/pullrequests/42", + ); + }).pipe(Effect.provide(layer)); +}); + +it.effect("lists pull requests with Bitbucket state and source branch query params", () => { + const { execute, layer } = makeLayer({ + response: () => + Response.json({ + values: [ + { + ...bitbucketPullRequest, + id: 7, + state: "MERGED", + source: { + branch: { name: "feature/merged" }, + repository: { full_name: "pingdotgg/t3code" }, + }, + }, + ], + }), + }); + + return Effect.gen(function* () { + const bitbucket = yield* BitbucketApi.BitbucketApi; + const result = yield* bitbucket.listPullRequests({ + cwd: "/repo", + headSelector: "origin:feature/merged", + state: "merged", + limit: 10, + }); + + assert.strictEqual(result[0]?.state, "merged"); + const request = execute.mock.calls[0]?.[0]; + assert.strictEqual( + request?.url, + "https://api.test.local/2.0/repositories/pingdotgg/t3code/pullrequests", + ); + assert.deepStrictEqual(request?.urlParams.params, [ + ["pagelen", "10"], + ["sort", "-updated_on"], + ["q", 'source.branch.name = "feature/merged" AND state = "MERGED"'], + ["state", "MERGED"], + ]); + }).pipe(Effect.provide(layer)); +}); + +it.effect("lists closed pull requests with both closed Bitbucket states", () => { + const { execute, layer } = makeLayer({ + response: () => + Response.json({ + values: [], + }), + }); + + return Effect.gen(function* () { + const bitbucket = yield* BitbucketApi.BitbucketApi; + yield* bitbucket.listPullRequests({ + cwd: "/repo", + headSelector: "feature/closed", + state: "closed", + limit: 10, + }); + + assert.deepStrictEqual(execute.mock.calls[0]?.[0].urlParams.params, [ + ["pagelen", "10"], + ["sort", "-updated_on"], + [ + "q", + 'source.branch.name = "feature/closed" AND (state = "DECLINED" OR state = "SUPERSEDED")', + ], + ["state", "DECLINED"], + ["state", "SUPERSEDED"], + ]); + }).pipe(Effect.provide(layer)); +}); + +it.effect("expands all-state pull request listing instead of relying on Bitbucket defaults", () => { + const { execute, layer } = makeLayer({ + response: () => + Response.json({ + values: [], + }), + }); + + return Effect.gen(function* () { + const bitbucket = yield* BitbucketApi.BitbucketApi; + yield* bitbucket.listPullRequests({ + cwd: "/repo", + headSelector: "feature/all", + state: "all", + limit: 10, + }); + + assert.deepStrictEqual(execute.mock.calls[0]?.[0].urlParams.params, [ + ["pagelen", "10"], + ["sort", "-updated_on"], + [ + "q", + 'source.branch.name = "feature/all" AND (state = "OPEN" OR state = "MERGED" OR state = "DECLINED" OR state = "SUPERSEDED")', + ], + ["state", "OPEN"], + ["state", "MERGED"], + ["state", "DECLINED"], + ["state", "SUPERSEDED"], + ]); + }).pipe(Effect.provide(layer)); +}); + +it.effect("reads repository clone URLs and default branch", () => { + const { layer } = makeLayer({ + response: () => Response.json(repositoryJson), + }); + + return Effect.gen(function* () { + const bitbucket = yield* BitbucketApi.BitbucketApi; + const cloneUrls = yield* bitbucket.getRepositoryCloneUrls({ + cwd: "/repo", + repository: "pingdotgg/t3code", + }); + const defaultBranch = yield* bitbucket.getDefaultBranch({ cwd: "/repo" }); + + assert.deepStrictEqual(cloneUrls, { + nameWithOwner: "pingdotgg/t3code", + url: "https://bitbucket.org/pingdotgg/t3code.git", + sshUrl: "git@bitbucket.org:pingdotgg/t3code.git", + }); + assert.strictEqual(defaultBranch, "main"); + }).pipe(Effect.provide(layer)); +}); + +it.effect("creates repositories through the Bitbucket REST API", () => { + const { execute, layer } = makeLayer({ + response: () => Response.json(repositoryJson), + }); + + return Effect.gen(function* () { + const bitbucket = yield* BitbucketApi.BitbucketApi; + const cloneUrls = yield* bitbucket.createRepository({ + cwd: "/repo", + repository: "pingdotgg/t3code", + visibility: "private", + }); + + assert.deepStrictEqual(cloneUrls, { + nameWithOwner: "pingdotgg/t3code", + url: "https://bitbucket.org/pingdotgg/t3code.git", + sshUrl: "git@bitbucket.org:pingdotgg/t3code.git", + }); + + const request = execute.mock.calls[0]?.[0]; + assert.strictEqual(request?.url, "https://api.test.local/2.0/repositories/pingdotgg/t3code"); + assert.strictEqual(request?.method, "POST"); + assert.ok(request); + const rawBody = (request.body as { readonly body?: Uint8Array }).body; + assert.ok(rawBody); + assert.deepStrictEqual(JSON.parse(new TextDecoder().decode(rawBody)), { + scm: "git", + is_private: true, + }); + }).pipe(Effect.provide(layer)); +}); + +it.effect("creates pull requests using the official REST payload shape", () => { + const { execute, layer } = makeLayer({ + response: () => Response.json(bitbucketPullRequest), + }); + + return Effect.gen(function* () { + const fileSystem = yield* FileSystem.FileSystem; + const bodyFile = yield* fileSystem.makeTempFileScoped({ prefix: "bitbucket-pr-body-" }); + yield* fileSystem.writeFileString(bodyFile, "PR body"); + + const bitbucket = yield* BitbucketApi.BitbucketApi; + yield* bitbucket.createPullRequest({ + cwd: "/repo", + baseBranch: "main", + headSelector: "owner:feature/provider", + title: "Provider PR", + bodyFile, + }); + + const request = execute.mock.calls[0]?.[0]; + assert.strictEqual( + request?.url, + "https://api.test.local/2.0/repositories/pingdotgg/t3code/pullrequests", + ); + assert.strictEqual(request?.method, "POST"); + assert.ok(request); + const rawBody = (request.body as { readonly body?: Uint8Array }).body; + assert.ok(rawBody); + assert.deepStrictEqual(JSON.parse(new TextDecoder().decode(rawBody)), { + title: "Provider PR", + description: "PR body", + source: { + branch: { name: "feature/provider" }, + repository: { full_name: "owner/t3code" }, + }, + destination: { + branch: { name: "main" }, + }, + }); + }).pipe(Effect.provide(layer), Effect.scoped); +}); + +it.effect("reports auth status through the Bitbucket REST /user endpoint", () => { + const { layer } = makeLayer({ + response: () => Response.json({ username: "bitbucket-user" }), + }); + + return Effect.gen(function* () { + const bitbucket = yield* BitbucketApi.BitbucketApi; + const auth = yield* bitbucket.probeAuth; + + assert.deepStrictEqual(auth, { + status: "authenticated", + account: Option.some("bitbucket-user"), + host: Option.some("bitbucket.org"), + detail: Option.none(), + }); + }).pipe(Effect.provide(layer)); +}); + +it.effect("checks out same-repository pull requests with the existing Bitbucket remote", () => { + const { git, layer } = makeLayer({ + response: () => + Response.json({ + ...bitbucketPullRequest, + source: { + branch: { name: "feature/source-control" }, + repository: { + full_name: "pingdotgg/t3code", + workspace: { slug: "pingdotgg" }, + }, + }, + }), + }); + + return Effect.gen(function* () { + const bitbucket = yield* BitbucketApi.BitbucketApi; + yield* bitbucket.checkoutPullRequest({ + cwd: "/repo", + context: { + provider: { + kind: "bitbucket", + name: "Bitbucket", + baseUrl: "https://bitbucket.org", + }, + remoteName: "origin", + remoteUrl: "git@bitbucket.org:pingdotgg/t3code.git", + }, + reference: "42", + force: true, + }); + + assert.strictEqual(git.ensureRemote.mock.calls.length, 0); + assert.deepStrictEqual(git.fetchRemoteBranch.mock.calls[0]?.[0], { + cwd: "/repo", + remoteName: "origin", + remoteBranch: "feature/source-control", + localBranch: "feature/source-control", + }); + assert.deepStrictEqual(git.setBranchUpstream.mock.calls[0]?.[0], { + cwd: "/repo", + branch: "feature/source-control", + remoteName: "origin", + remoteBranch: "feature/source-control", + }); + assert.deepStrictEqual(git.switchRef.mock.calls[0]?.[0], { + cwd: "/repo", + refName: "feature/source-control", + }); + }).pipe(Effect.provide(layer)); +}); + +it.effect("checks out fork pull requests through an ensured fork remote", () => { + const { git, layer } = makeLayer({ + response: (request) => { + if (request.url.endsWith("/repositories/octocat/t3code")) { + return Response.json({ + ...repositoryJson, + full_name: "octocat/t3code", + links: { + html: { href: "https://bitbucket.org/octocat/t3code" }, + clone: [ + { name: "https", href: "https://bitbucket.org/octocat/t3code.git" }, + { name: "ssh", href: "git@bitbucket.org:octocat/t3code.git" }, + ], + }, + }); + } + return Response.json({ + ...bitbucketPullRequest, + source: { + branch: { name: "main" }, + repository: { + full_name: "octocat/t3code", + workspace: { slug: "octocat" }, + }, + }, + }); + }, + }); + + return Effect.gen(function* () { + const bitbucket = yield* BitbucketApi.BitbucketApi; + yield* bitbucket.checkoutPullRequest({ + cwd: "/repo", + reference: "42", + force: true, + }); + + assert.deepStrictEqual(git.ensureRemote.mock.calls[0]?.[0], { + cwd: "/repo", + preferredName: "octocat", + url: "git@bitbucket.org:octocat/t3code.git", + }); + assert.deepStrictEqual(git.fetchRemoteBranch.mock.calls[0]?.[0], { + cwd: "/repo", + remoteName: "octocat", + remoteBranch: "main", + localBranch: "t3code/pr-42/main", + }); + assert.deepStrictEqual(git.setBranchUpstream.mock.calls[0]?.[0], { + cwd: "/repo", + branch: "t3code/pr-42/main", + remoteName: "octocat", + remoteBranch: "main", + }); + assert.deepStrictEqual(git.switchRef.mock.calls[0]?.[0], { + cwd: "/repo", + refName: "t3code/pr-42/main", + }); + }).pipe(Effect.provide(layer)); +}); diff --git a/apps/server/src/sourceControl/BitbucketApi.ts b/apps/server/src/sourceControl/BitbucketApi.ts new file mode 100644 index 00000000000..4f795e864a1 --- /dev/null +++ b/apps/server/src/sourceControl/BitbucketApi.ts @@ -0,0 +1,703 @@ +import { Config, Context, Effect, FileSystem, Layer, Option, Schema } from "effect"; +import { + TrimmedNonEmptyString, + type SourceControlProviderAuth, + type SourceControlRepositoryCloneUrls, + type SourceControlRepositoryVisibility, +} from "@t3tools/contracts"; +import { HttpClient, HttpClientRequest, HttpClientResponse } from "effect/unstable/http"; +import { sanitizeBranchFragment } from "@t3tools/shared/git"; +import { detectSourceControlProviderFromRemoteUrl } from "@t3tools/shared/sourceControl"; + +import * as BitbucketPullRequests from "./bitbucketPullRequests.ts"; +import * as SourceControlProvider from "./SourceControlProvider.ts"; +import * as GitVcsDriver from "../vcs/GitVcsDriver.ts"; +import * as VcsDriverRegistry from "../vcs/VcsDriverRegistry.ts"; + +const DEFAULT_API_BASE_URL = "https://api.bitbucket.org/2.0"; + +const BitbucketApiEnvConfig = Config.all({ + baseUrl: Config.string("T3CODE_BITBUCKET_API_BASE_URL").pipe( + Config.withDefault(DEFAULT_API_BASE_URL), + ), + accessToken: Config.string("T3CODE_BITBUCKET_ACCESS_TOKEN").pipe(Config.option), + email: Config.string("T3CODE_BITBUCKET_EMAIL").pipe(Config.option), + apiToken: Config.string("T3CODE_BITBUCKET_API_TOKEN").pipe(Config.option), +}); + +export class BitbucketApiError extends Schema.TaggedErrorClass()( + "BitbucketApiError", + { + operation: Schema.String, + detail: Schema.String, + status: Schema.optional(Schema.Number), + cause: Schema.optional(Schema.Defect), + }, +) { + override get message(): string { + return `Bitbucket API failed in ${this.operation}: ${this.detail}`; + } +} + +const RawBitbucketRepositorySchema = Schema.Struct({ + full_name: TrimmedNonEmptyString, + links: Schema.Struct({ + html: Schema.optional( + Schema.Struct({ + href: TrimmedNonEmptyString, + }), + ), + clone: Schema.optional( + Schema.Array( + Schema.Struct({ + name: TrimmedNonEmptyString, + href: TrimmedNonEmptyString, + }), + ), + ), + }), + mainbranch: Schema.optional( + Schema.NullOr( + Schema.Struct({ + name: TrimmedNonEmptyString, + }), + ), + ), +}); + +const BitbucketUserSchema = Schema.Struct({ + username: Schema.optional(TrimmedNonEmptyString), + display_name: Schema.optional(TrimmedNonEmptyString), + account_id: Schema.optional(TrimmedNonEmptyString), +}); + +export interface BitbucketRepositoryLocator { + readonly workspace: string; + readonly repoSlug: string; +} + +export interface BitbucketApiShape { + readonly probeAuth: Effect.Effect; + readonly listPullRequests: (input: { + readonly cwd: string; + readonly context?: SourceControlProvider.SourceControlProviderContext; + readonly headSelector: string; + readonly source?: SourceControlProvider.SourceControlRefSelector; + readonly state: "open" | "closed" | "merged" | "all"; + readonly limit?: number; + }) => Effect.Effect< + ReadonlyArray, + BitbucketApiError + >; + readonly getPullRequest: (input: { + readonly cwd: string; + readonly context?: SourceControlProvider.SourceControlProviderContext; + readonly reference: string; + }) => Effect.Effect< + BitbucketPullRequests.NormalizedBitbucketPullRequestRecord, + BitbucketApiError + >; + readonly getRepositoryCloneUrls: (input: { + readonly cwd: string; + readonly context?: SourceControlProvider.SourceControlProviderContext; + readonly repository: string; + }) => Effect.Effect; + readonly createRepository: (input: { + readonly cwd: string; + readonly repository: string; + readonly visibility: SourceControlRepositoryVisibility; + }) => Effect.Effect; + readonly createPullRequest: (input: { + readonly cwd: string; + readonly context?: SourceControlProvider.SourceControlProviderContext; + readonly baseBranch: string; + readonly headSelector: string; + readonly source?: SourceControlProvider.SourceControlRefSelector; + readonly target?: SourceControlProvider.SourceControlRefSelector; + readonly title: string; + readonly bodyFile: string; + }) => Effect.Effect; + readonly getDefaultBranch: (input: { + readonly cwd: string; + readonly context?: SourceControlProvider.SourceControlProviderContext; + }) => Effect.Effect; + readonly checkoutPullRequest: (input: { + readonly cwd: string; + readonly context?: SourceControlProvider.SourceControlProviderContext; + readonly reference: string; + readonly force?: boolean; + }) => Effect.Effect; +} + +export class BitbucketApi extends Context.Service()( + "t3/source-control/BitbucketApi", +) {} + +function nonEmpty(value: string | undefined): Option.Option { + const trimmed = value?.trim(); + return trimmed === undefined || trimmed.length === 0 ? Option.none() : Option.some(trimmed); +} + +function normalizeChangeRequestId(reference: string): string { + const trimmed = reference.trim().replace(/^#/, ""); + const urlMatch = /(?:pull-requests|pullrequests|pull-request|pull|pr)\/(\d+)(?:\D.*)?$/i.exec( + trimmed, + ); + return urlMatch?.[1] ?? trimmed; +} + +function sourceWorkspace(input: { + readonly headSelector: string; + readonly source?: SourceControlProvider.SourceControlRefSelector; +}): string | undefined { + if (input.source?.owner) return input.source.owner; + return SourceControlProvider.parseSourceControlOwnerRef(input.headSelector)?.owner; +} + +function toBitbucketStates(state: "open" | "closed" | "merged" | "all"): ReadonlyArray { + switch (state) { + case "open": + return ["OPEN"]; + case "closed": + return ["DECLINED", "SUPERSEDED"]; + case "merged": + return ["MERGED"]; + case "all": + return ["OPEN", "MERGED", "DECLINED", "SUPERSEDED"]; + } +} + +function bitbucketQueryString(filters: ReadonlyArray): string { + return filters.join(" AND "); +} + +function bitbucketStateFilter(states: ReadonlyArray): string { + return states.length === 1 + ? `state = "${states[0]}"` + : `(${states.map((state) => `state = "${state}"`).join(" OR ")})`; +} + +function parseBitbucketRepositorySlug(value: string): BitbucketRepositoryLocator | null { + const normalized = value.trim().replace(/\.git$/u, ""); + const parts = normalized.split("/").filter((part) => part.length > 0); + if (parts.length < 2) return null; + const workspace = parts.at(-2); + const repoSlug = parts.at(-1); + return workspace && repoSlug ? { workspace, repoSlug } : null; +} + +function requireRepositoryLocator( + operation: string, + repository: string, +): Effect.Effect { + const locator = parseBitbucketRepositorySlug(repository); + return locator + ? Effect.succeed(locator) + : Effect.fail( + new BitbucketApiError({ + operation, + detail: "Bitbucket repositories must be specified as workspace/repository.", + }), + ); +} + +function parseBitbucketRemoteUrl(remoteUrl: string): BitbucketRepositoryLocator | null { + const trimmed = remoteUrl.trim(); + if (trimmed.startsWith("git@")) { + const pathStart = trimmed.indexOf(":"); + return pathStart < 0 ? null : parseBitbucketRepositorySlug(trimmed.slice(pathStart + 1)); + } + + try { + return parseBitbucketRepositorySlug(new URL(trimmed).pathname); + } catch { + return null; + } +} + +function normalizeRepositoryCloneUrls( + raw: Schema.Schema.Type, +): SourceControlRepositoryCloneUrls { + const httpClone = + raw.links.clone?.find((entry) => entry.name.toLowerCase() === "https")?.href ?? + raw.links.html?.href; + const sshClone = raw.links.clone?.find((entry) => entry.name.toLowerCase() === "ssh")?.href; + + return { + nameWithOwner: raw.full_name, + url: httpClone ?? raw.links.html?.href ?? raw.full_name, + sshUrl: sshClone ?? httpClone ?? raw.full_name, + }; +} + +function shouldPreferSshRemote(originRemoteUrl: string | null): boolean { + const trimmed = originRemoteUrl?.trim() ?? ""; + return trimmed.startsWith("git@") || trimmed.startsWith("ssh://"); +} + +function selectCloneUrl(input: { + readonly cloneUrls: SourceControlRepositoryCloneUrls; + readonly originRemoteUrl: string | null; +}): string { + return shouldPreferSshRemote(input.originRemoteUrl) + ? input.cloneUrls.sshUrl + : input.cloneUrls.url; +} + +function checkoutBranchName(input: { + readonly pullRequestId: number; + readonly headBranch: string; + readonly isCrossRepository: boolean; +}): string { + if (!input.isCrossRepository) { + return input.headBranch; + } + + return `t3code/pr-${input.pullRequestId}/${sanitizeBranchFragment(input.headBranch)}`; +} + +function repositoryNameWithOwner( + repository: Schema.Schema.Type< + typeof BitbucketPullRequests.BitbucketPullRequestSchema + >["source"]["repository"], +): string | null { + const fullName = repository?.full_name?.trim() ?? ""; + return fullName.length > 0 ? fullName : null; +} + +function repositoryOwnerName(repositoryName: string): string { + return repositoryName.split("/")[0]?.trim() || "bitbucket"; +} + +function authFromConfig( + config: Config.Success, +): SourceControlProviderAuth { + if (Option.isSome(config.accessToken)) { + return { + status: "unknown", + account: Option.none(), + host: Option.some("bitbucket.org"), + detail: Option.some("Bitbucket access token is configured."), + }; + } + + if (Option.isSome(config.email) && Option.isSome(config.apiToken)) { + return { + status: "unknown", + account: config.email, + host: Option.some("bitbucket.org"), + detail: Option.some("Bitbucket API token is configured."), + }; + } + + return { + status: "unauthenticated", + account: Option.none(), + host: Option.some("bitbucket.org"), + detail: Option.some( + "Set T3CODE_BITBUCKET_EMAIL and T3CODE_BITBUCKET_API_TOKEN, or T3CODE_BITBUCKET_ACCESS_TOKEN.", + ), + }; +} + +function requestError(operation: string, cause: unknown): BitbucketApiError { + return new BitbucketApiError({ + operation, + detail: cause instanceof Error ? cause.message : String(cause), + cause, + }); +} + +function isBitbucketApiError(cause: unknown): cause is BitbucketApiError { + return Schema.is(BitbucketApiError)(cause); +} + +function responseError( + operation: string, + response: HttpClientResponse.HttpClientResponse, +): Effect.Effect { + return response.text.pipe( + Effect.catch(() => Effect.succeed("")), + Effect.flatMap((body) => + Effect.fail( + new BitbucketApiError({ + operation, + status: response.status, + detail: + body.trim().length > 0 + ? `Bitbucket returned HTTP ${response.status}: ${body.trim()}` + : `Bitbucket returned HTTP ${response.status}.`, + }), + ), + ), + ); +} + +export const make = Effect.fn("makeBitbucketApi")(function* () { + const config = yield* BitbucketApiEnvConfig; + const httpClient = yield* HttpClient.HttpClient; + const fileSystem = yield* FileSystem.FileSystem; + const git = yield* GitVcsDriver.GitVcsDriver; + const vcsRegistry = yield* VcsDriverRegistry.VcsDriverRegistry; + + const apiUrl = (path: string) => `${config.baseUrl.replace(/\/+$/u, "")}${path}`; + + const withAuth = (request: HttpClientRequest.HttpClientRequest) => { + if (Option.isSome(config.accessToken)) { + return request.pipe(HttpClientRequest.bearerToken(config.accessToken.value)); + } + if (Option.isSome(config.email) && Option.isSome(config.apiToken)) { + return request.pipe(HttpClientRequest.basicAuth(config.email.value, config.apiToken.value)); + } + return request; + }; + + const decodeResponse = ( + operation: string, + schema: S, + response: HttpClientResponse.HttpClientResponse, + ): Effect.Effect => + HttpClientResponse.matchStatus({ + "2xx": (success) => + HttpClientResponse.schemaBodyJson(schema)(success).pipe( + Effect.mapError( + (cause) => + new BitbucketApiError({ + operation, + detail: "Bitbucket returned invalid JSON for the requested resource.", + cause, + }), + ), + ), + orElse: (failed) => responseError(operation, failed), + })(response); + + const executeJson = ( + operation: string, + request: HttpClientRequest.HttpClientRequest, + schema: S, + ): Effect.Effect => + httpClient.execute(withAuth(request.pipe(HttpClientRequest.acceptJson))).pipe( + Effect.mapError((cause) => requestError(operation, cause)), + Effect.flatMap((response) => decodeResponse(operation, schema, response)), + ); + + const resolveRepository = Effect.fn("BitbucketApi.resolveRepository")(function* (input: { + readonly cwd: string; + readonly context?: SourceControlProvider.SourceControlProviderContext; + readonly repository?: string; + }) { + const fromRepository = + input.repository !== undefined ? parseBitbucketRepositorySlug(input.repository) : null; + if (fromRepository) return fromRepository; + + const fromContext = + input.context?.provider.kind === "bitbucket" + ? parseBitbucketRemoteUrl(input.context.remoteUrl) + : null; + if (fromContext) return fromContext; + + const handle = yield* vcsRegistry.resolve({ cwd: input.cwd }).pipe( + Effect.mapError( + (cause) => + new BitbucketApiError({ + operation: "resolveRepository", + detail: `Failed to resolve VCS repository for ${input.cwd}.`, + cause, + }), + ), + ); + const remotes = yield* handle.driver.listRemotes(input.cwd).pipe( + Effect.mapError( + (cause) => + new BitbucketApiError({ + operation: "resolveRepository", + detail: `Failed to list remotes for ${input.cwd}.`, + cause, + }), + ), + ); + + for (const remote of remotes.remotes) { + if (detectSourceControlProviderFromRemoteUrl(remote.url)?.kind !== "bitbucket") continue; + const parsed = parseBitbucketRemoteUrl(remote.url); + if (parsed) return parsed; + } + + return yield* new BitbucketApiError({ + operation: "resolveRepository", + detail: `No Bitbucket repository remote was detected for ${input.cwd}.`, + }); + }); + + const getRepository = (input: { + readonly cwd: string; + readonly context?: SourceControlProvider.SourceControlProviderContext; + readonly repository?: string; + }) => + resolveRepository(input).pipe( + Effect.flatMap((repository) => + executeJson( + "getRepository", + HttpClientRequest.get( + apiUrl( + `/repositories/${encodeURIComponent(repository.workspace)}/${encodeURIComponent(repository.repoSlug)}`, + ), + ), + RawBitbucketRepositorySchema, + ), + ), + ); + + const getRawPullRequestFromRepository = ( + repository: BitbucketRepositoryLocator, + reference: string, + ) => + executeJson( + "getPullRequest", + HttpClientRequest.get( + apiUrl( + `/repositories/${encodeURIComponent(repository.workspace)}/${encodeURIComponent(repository.repoSlug)}/pullrequests/${encodeURIComponent(normalizeChangeRequestId(reference))}`, + ), + ), + BitbucketPullRequests.BitbucketPullRequestSchema, + ); + + const getRawPullRequest = (input: { + readonly cwd: string; + readonly context?: SourceControlProvider.SourceControlProviderContext; + readonly reference: string; + }) => + resolveRepository(input).pipe( + Effect.flatMap((repository) => getRawPullRequestFromRepository(repository, input.reference)), + ); + + const readConfigValueNullable = (cwd: string, key: string) => + git.readConfigValue(cwd, key).pipe(Effect.catch(() => Effect.succeed(null))); + + const resolveCheckoutRemote = Effect.fn("BitbucketApi.resolveCheckoutRemote")(function* (input: { + readonly cwd: string; + readonly context?: SourceControlProvider.SourceControlProviderContext; + readonly destinationRepository: BitbucketRepositoryLocator; + readonly sourceRepositoryName: string; + readonly isCrossRepository: boolean; + }) { + if ( + input.context?.provider.kind === "bitbucket" && + !input.isCrossRepository && + parseBitbucketRemoteUrl(input.context.remoteUrl) !== null + ) { + return input.context.remoteName; + } + + if (!input.isCrossRepository) { + const remoteName = yield* git + .resolvePrimaryRemoteName(input.cwd) + .pipe(Effect.catch(() => Effect.succeed(null))); + if (remoteName) return remoteName; + } + + const cloneUrls = yield* getRepository({ + cwd: input.cwd, + repository: input.sourceRepositoryName, + ...(input.context ? { context: input.context } : {}), + }).pipe(Effect.map(normalizeRepositoryCloneUrls)); + const originRemoteUrl = yield* readConfigValueNullable(input.cwd, "remote.origin.url"); + return yield* git.ensureRemote({ + cwd: input.cwd, + preferredName: input.isCrossRepository + ? repositoryOwnerName(input.sourceRepositoryName) + : input.destinationRepository.workspace, + url: selectCloneUrl({ cloneUrls, originRemoteUrl }), + }); + }); + + return BitbucketApi.of({ + probeAuth: executeJson( + "probeAuth", + HttpClientRequest.get(apiUrl("/user")), + BitbucketUserSchema, + ).pipe( + Effect.map((user) => ({ + status: "authenticated" as const, + account: nonEmpty(user.username ?? user.display_name ?? user.account_id), + host: Option.some("bitbucket.org"), + detail: Option.none(), + })), + Effect.catch(() => Effect.succeed(authFromConfig(config))), + ), + listPullRequests: (input) => + resolveRepository(input).pipe( + Effect.flatMap((repository) => { + const states = toBitbucketStates(input.state); + const query: Record> = { + pagelen: String(Math.max(1, Math.min(input.limit ?? 20, 50))), + sort: "-updated_on", + q: bitbucketQueryString([ + `source.branch.name = "${SourceControlProvider.sourceBranch(input).replaceAll('"', '\\"')}"`, + bitbucketStateFilter(states), + ]), + state: states, + }; + + return executeJson( + "listPullRequests", + HttpClientRequest.get( + apiUrl( + `/repositories/${encodeURIComponent(repository.workspace)}/${encodeURIComponent(repository.repoSlug)}/pullrequests`, + ), + { urlParams: query }, + ), + BitbucketPullRequests.BitbucketPullRequestListSchema, + ); + }), + Effect.map((list) => + list.values.map(BitbucketPullRequests.normalizeBitbucketPullRequestRecord), + ), + ), + getPullRequest: (input) => + getRawPullRequest(input).pipe( + Effect.map(BitbucketPullRequests.normalizeBitbucketPullRequestRecord), + ), + getRepositoryCloneUrls: (input) => + getRepository(input).pipe(Effect.map(normalizeRepositoryCloneUrls)), + createRepository: (input) => + requireRepositoryLocator("createRepository", input.repository).pipe( + Effect.flatMap((repository) => + executeJson( + "createRepository", + HttpClientRequest.post( + apiUrl( + `/repositories/${encodeURIComponent(repository.workspace)}/${encodeURIComponent(repository.repoSlug)}`, + ), + ).pipe( + HttpClientRequest.bodyJsonUnsafe({ + scm: "git", + is_private: input.visibility === "private", + }), + ), + RawBitbucketRepositorySchema, + ), + ), + Effect.map(normalizeRepositoryCloneUrls), + ), + createPullRequest: (input) => + Effect.gen(function* () { + const repository = yield* resolveRepository(input); + const description = yield* fileSystem.readFileString(input.bodyFile).pipe( + Effect.mapError( + (cause) => + new BitbucketApiError({ + operation: "createPullRequest", + detail: `Failed to read pull request body file ${input.bodyFile}.`, + cause, + }), + ), + ); + const sourceOwner = sourceWorkspace(input); + const body = { + title: input.title, + description, + source: { + branch: { + name: SourceControlProvider.sourceBranch(input), + }, + ...(sourceOwner + ? { + repository: { + full_name: `${sourceOwner}/${input.source?.repository ?? repository.repoSlug}`, + }, + } + : {}), + }, + destination: { + branch: { + name: input.target?.refName ?? input.baseBranch, + }, + }, + }; + + yield* executeJson( + "createPullRequest", + HttpClientRequest.post( + apiUrl( + `/repositories/${encodeURIComponent(repository.workspace)}/${encodeURIComponent(repository.repoSlug)}/pullrequests`, + ), + ).pipe(HttpClientRequest.bodyJsonUnsafe(body)), + BitbucketPullRequests.BitbucketPullRequestSchema, + ); + }), + getDefaultBranch: (input) => + getRepository(input).pipe(Effect.map((repository) => repository.mainbranch?.name ?? null)), + // Bitbucket Cloud pull requests are Git-backed and Bitbucket does not provide + // an official checkout CLI. This provider-local path uses GitVcsDriver as a + // narrow escape hatch to materialize Bitbucket PR refs. Do not generalize this + // as the source-control provider model: if we support non-Git-compatible + // hosting providers or native JJ/Sapling checkout flows, move this into a + // VCS-specific change-request checkout capability. + checkoutPullRequest: (input) => + Effect.gen(function* () { + const destinationRepository = yield* resolveRepository(input); + const pullRequest = yield* getRawPullRequestFromRepository( + destinationRepository, + input.reference, + ); + const destinationRepositoryName = + repositoryNameWithOwner(pullRequest.destination.repository) ?? + `${destinationRepository.workspace}/${destinationRepository.repoSlug}`; + const sourceRepositoryName = + repositoryNameWithOwner(pullRequest.source.repository) ?? destinationRepositoryName; + const isCrossRepository = sourceRepositoryName !== destinationRepositoryName; + const remoteName = yield* resolveCheckoutRemote({ + cwd: input.cwd, + destinationRepository, + sourceRepositoryName, + isCrossRepository, + ...(input.context ? { context: input.context } : {}), + }); + const remoteBranch = pullRequest.source.branch.name; + const localBranch = checkoutBranchName({ + pullRequestId: pullRequest.id, + headBranch: remoteBranch, + isCrossRepository, + }); + const localBranchNames = yield* git.listLocalBranchNames(input.cwd); + const localBranchExists = localBranchNames.includes(localBranch); + + if (input.force === true || !localBranchExists) { + yield* git.fetchRemoteBranch({ + cwd: input.cwd, + remoteName, + remoteBranch, + localBranch, + }); + } else { + yield* git.fetchRemoteTrackingBranch({ + cwd: input.cwd, + remoteName, + remoteBranch, + }); + } + + yield* git.setBranchUpstream({ + cwd: input.cwd, + branch: localBranch, + remoteName, + remoteBranch, + }); + yield* Effect.scoped(git.switchRef({ cwd: input.cwd, refName: localBranch })); + }).pipe( + Effect.mapError((cause) => + isBitbucketApiError(cause) + ? cause + : new BitbucketApiError({ + operation: "checkoutPullRequest", + detail: cause instanceof Error ? cause.message : String(cause), + cause, + }), + ), + ), + }); +}); + +export const layer = Layer.effect(BitbucketApi, make()); diff --git a/apps/server/src/sourceControl/BitbucketSourceControlProvider.test.ts b/apps/server/src/sourceControl/BitbucketSourceControlProvider.test.ts new file mode 100644 index 00000000000..4bf658f5685 --- /dev/null +++ b/apps/server/src/sourceControl/BitbucketSourceControlProvider.test.ts @@ -0,0 +1,126 @@ +import { assert, it } from "@effect/vitest"; +import { Effect, Layer, Option } from "effect"; + +import * as BitbucketApi from "./BitbucketApi.ts"; +import * as BitbucketSourceControlProvider from "./BitbucketSourceControlProvider.ts"; + +function makeProvider(bitbucket: Partial) { + return BitbucketSourceControlProvider.make().pipe( + Effect.provide(Layer.mock(BitbucketApi.BitbucketApi)(bitbucket)), + ); +} + +it.effect("maps Bitbucket PR summaries into provider-neutral change requests", () => + Effect.gen(function* () { + const provider = yield* makeProvider({ + getPullRequest: () => + Effect.succeed({ + number: 42, + title: "Add Bitbucket provider", + url: "https://bitbucket.org/pingdotgg/t3code/pull-requests/42", + baseRefName: "main", + headRefName: "feature/source-control", + state: "open", + updatedAt: Option.none(), + isCrossRepository: true, + headRepositoryNameWithOwner: "fork/t3code", + headRepositoryOwnerLogin: "fork", + }), + }); + + const changeRequest = yield* provider.getChangeRequest({ + cwd: "/repo", + reference: "42", + }); + + assert.deepStrictEqual(changeRequest, { + provider: "bitbucket", + number: 42, + title: "Add Bitbucket provider", + url: "https://bitbucket.org/pingdotgg/t3code/pull-requests/42", + baseRefName: "main", + headRefName: "feature/source-control", + state: "open", + updatedAt: Option.none(), + isCrossRepository: true, + headRepositoryNameWithOwner: "fork/t3code", + headRepositoryOwnerLogin: "fork", + }); + }), +); + +it.effect("lists Bitbucket PRs through provider-neutral input names", () => + Effect.gen(function* () { + let listInput: Parameters[0] | null = null; + const provider = yield* makeProvider({ + listPullRequests: (input) => { + listInput = input; + return Effect.succeed([]); + }, + }); + + yield* provider.listChangeRequests({ + cwd: "/repo", + headSelector: "feature/provider", + state: "all", + limit: 10, + }); + + assert.deepStrictEqual(listInput, { + cwd: "/repo", + headSelector: "feature/provider", + state: "all", + limit: 10, + }); + }), +); + +it.effect("creates Bitbucket PRs through provider-neutral input names", () => + Effect.gen(function* () { + let createInput: Parameters[0] | null = + null; + const provider = yield* makeProvider({ + createPullRequest: (input) => { + createInput = input; + return Effect.void; + }, + }); + + yield* provider.createChangeRequest({ + cwd: "/repo", + baseRefName: "main", + headSelector: "owner:feature/provider", + title: "Provider PR", + bodyFile: "/tmp/body.md", + }); + + assert.deepStrictEqual(createInput, { + cwd: "/repo", + baseBranch: "main", + headSelector: "owner:feature/provider", + source: { + owner: "owner", + refName: "feature/provider", + }, + title: "Provider PR", + bodyFile: "/tmp/body.md", + }); + }), +); + +it.effect("uses Bitbucket API repository detection for default branch lookup", () => + Effect.gen(function* () { + let cwdInput: string | null = null; + const provider = yield* makeProvider({ + getDefaultBranch: (input) => { + cwdInput = input.cwd; + return Effect.succeed("main"); + }, + }); + + const defaultBranch = yield* provider.getDefaultBranch({ cwd: "/repo" }); + + assert.strictEqual(defaultBranch, "main"); + assert.strictEqual(cwdInput, "/repo"); + }), +); diff --git a/apps/server/src/sourceControl/BitbucketSourceControlProvider.ts b/apps/server/src/sourceControl/BitbucketSourceControlProvider.ts new file mode 100644 index 00000000000..ede80e921dc --- /dev/null +++ b/apps/server/src/sourceControl/BitbucketSourceControlProvider.ts @@ -0,0 +1,126 @@ +import { Effect, Layer, Option } from "effect"; +import { SourceControlProviderError, type ChangeRequest } from "@t3tools/contracts"; + +import * as BitbucketApi from "./BitbucketApi.ts"; +import * as BitbucketPullRequests from "./bitbucketPullRequests.ts"; +import * as SourceControlProvider from "./SourceControlProvider.ts"; +import type * as SourceControlProviderDiscovery from "./SourceControlProviderDiscovery.ts"; + +function providerError( + operation: string, + cause: BitbucketApi.BitbucketApiError, +): SourceControlProviderError { + return new SourceControlProviderError({ + provider: "bitbucket", + operation, + detail: cause.detail, + cause, + }); +} + +function toChangeRequest( + summary: BitbucketPullRequests.NormalizedBitbucketPullRequestRecord, +): ChangeRequest { + return { + provider: "bitbucket", + number: summary.number, + title: summary.title, + url: summary.url, + baseRefName: summary.baseRefName, + headRefName: summary.headRefName, + state: summary.state, + updatedAt: summary.updatedAt ?? Option.none(), + ...(summary.isCrossRepository !== undefined + ? { isCrossRepository: summary.isCrossRepository } + : {}), + ...(summary.headRepositoryNameWithOwner !== undefined + ? { headRepositoryNameWithOwner: summary.headRepositoryNameWithOwner } + : {}), + ...(summary.headRepositoryOwnerLogin !== undefined + ? { headRepositoryOwnerLogin: summary.headRepositoryOwnerLogin } + : {}), + }; +} + +export const make = Effect.fn("makeBitbucketSourceControlProvider")(function* () { + const bitbucket = yield* BitbucketApi.BitbucketApi; + + return SourceControlProvider.SourceControlProvider.of({ + kind: "bitbucket", + listChangeRequests: (input) => { + const source = SourceControlProvider.sourceControlRefFromInput(input); + return bitbucket + .listPullRequests({ + cwd: input.cwd, + ...(input.context ? { context: input.context } : {}), + headSelector: input.headSelector, + ...(source ? { source } : {}), + state: input.state, + ...(input.limit !== undefined ? { limit: input.limit } : {}), + }) + .pipe( + Effect.map((items) => items.map(toChangeRequest)), + Effect.mapError((error) => providerError("listChangeRequests", error)), + ); + }, + getChangeRequest: (input) => + bitbucket.getPullRequest(input).pipe( + Effect.map(toChangeRequest), + Effect.mapError((error) => providerError("getChangeRequest", error)), + ), + createChangeRequest: (input) => { + const source = SourceControlProvider.sourceControlRefFromInput(input); + return bitbucket + .createPullRequest({ + cwd: input.cwd, + ...(input.context ? { context: input.context } : {}), + baseBranch: input.baseRefName, + headSelector: input.headSelector, + ...(source ? { source } : {}), + ...(input.target ? { target: input.target } : {}), + title: input.title, + bodyFile: input.bodyFile, + }) + .pipe(Effect.mapError((error) => providerError("createChangeRequest", error))); + }, + getRepositoryCloneUrls: (input) => + bitbucket + .getRepositoryCloneUrls(input) + .pipe(Effect.mapError((error) => providerError("getRepositoryCloneUrls", error))), + createRepository: (input) => + bitbucket + .createRepository(input) + .pipe(Effect.mapError((error) => providerError("createRepository", error))), + getDefaultBranch: (input) => + bitbucket + .getDefaultBranch({ + cwd: input.cwd, + ...(input.context ? { context: input.context } : {}), + }) + .pipe(Effect.mapError((error) => providerError("getDefaultBranch", error))), + checkoutChangeRequest: (input) => + bitbucket + .checkoutPullRequest({ + cwd: input.cwd, + ...(input.context ? { context: input.context } : {}), + reference: input.reference, + ...(input.force !== undefined ? { force: input.force } : {}), + }) + .pipe(Effect.mapError((error) => providerError("checkoutChangeRequest", error))), + }); +}); + +export const layer = Layer.effect(SourceControlProvider.SourceControlProvider, make()); + +export const makeDiscovery = Effect.fn("makeBitbucketSourceControlProviderDiscovery")(function* () { + const bitbucket = yield* BitbucketApi.BitbucketApi; + + return { + type: "api", + kind: "bitbucket", + label: "Bitbucket", + installHint: + "Set T3CODE_BITBUCKET_EMAIL and T3CODE_BITBUCKET_API_TOKEN on the server (use a Bitbucket API token with pull request and repository scopes).", + probeAuth: bitbucket.probeAuth, + } satisfies SourceControlProviderDiscovery.SourceControlApiDiscoverySpec; +}); diff --git a/apps/server/src/sourceControl/GitHubCli.test.ts b/apps/server/src/sourceControl/GitHubCli.test.ts new file mode 100644 index 00000000000..778e0c4962e --- /dev/null +++ b/apps/server/src/sourceControl/GitHubCli.test.ts @@ -0,0 +1,291 @@ +import { assert, it, afterEach, describe, expect, vi } from "@effect/vitest"; +import { Effect, Layer } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; +import { VcsProcessExitError, type VcsError } from "@t3tools/contracts"; + +import * as VcsProcess from "../vcs/VcsProcess.ts"; +import * as GitHubCli from "./GitHubCli.ts"; + +const processOutput = (stdout: string): VcsProcess.VcsProcessOutput => ({ + exitCode: ChildProcessSpawner.ExitCode(0), + stdout, + stderr: "", + stdoutTruncated: false, + stderrTruncated: false, +}); + +const mockRun = vi.fn(); + +const layer = GitHubCli.layer.pipe( + Layer.provide( + Layer.mock(VcsProcess.VcsProcess)({ + run: mockRun, + }), + ), +); + +afterEach(() => { + mockRun.mockReset(); +}); + +describe("GitHubCli.layer", () => { + it.effect("parses pull request view output", () => + Effect.gen(function* () { + mockRun.mockReturnValueOnce( + Effect.succeed( + processOutput( + JSON.stringify({ + number: 42, + title: "Add PR thread creation", + url: "https://github.com/pingdotgg/codething-mvp/pull/42", + baseRefName: "main", + headRefName: "feature/pr-threads", + state: "OPEN", + mergedAt: null, + isCrossRepository: true, + headRepository: { + nameWithOwner: "octocat/codething-mvp", + }, + headRepositoryOwner: { + login: "octocat", + }, + }), + ), + ), + ); + + const gh = yield* GitHubCli.GitHubCli; + const result = yield* gh.getPullRequest({ + cwd: "/repo", + reference: "#42", + }); + + assert.deepStrictEqual(result, { + number: 42, + title: "Add PR thread creation", + url: "https://github.com/pingdotgg/codething-mvp/pull/42", + baseRefName: "main", + headRefName: "feature/pr-threads", + state: "open", + isCrossRepository: true, + headRepositoryNameWithOwner: "octocat/codething-mvp", + headRepositoryOwnerLogin: "octocat", + }); + expect(mockRun).toHaveBeenCalledWith({ + operation: "GitHubCli.execute", + command: "gh", + args: [ + "pr", + "view", + "#42", + "--json", + "number,title,url,baseRefName,headRefName,state,mergedAt,isCrossRepository,headRepository,headRepositoryOwner", + ], + cwd: "/repo", + timeoutMs: 30_000, + }); + }).pipe(Effect.provide(layer)), + ); + + it.effect("trims pull request fields decoded from gh json", () => + Effect.gen(function* () { + mockRun.mockReturnValueOnce( + Effect.succeed( + processOutput( + JSON.stringify({ + number: 42, + title: " Add PR thread creation \n", + url: " https://github.com/pingdotgg/codething-mvp/pull/42 ", + baseRefName: " main ", + headRefName: "\tfeature/pr-threads\t", + state: "OPEN", + mergedAt: null, + isCrossRepository: true, + headRepository: { + nameWithOwner: " octocat/codething-mvp ", + }, + headRepositoryOwner: { + login: " octocat ", + }, + }), + ), + ), + ); + + const gh = yield* GitHubCli.GitHubCli; + const result = yield* gh.getPullRequest({ + cwd: "/repo", + reference: "#42", + }); + + assert.deepStrictEqual(result, { + number: 42, + title: "Add PR thread creation", + url: "https://github.com/pingdotgg/codething-mvp/pull/42", + baseRefName: "main", + headRefName: "feature/pr-threads", + state: "open", + isCrossRepository: true, + headRepositoryNameWithOwner: "octocat/codething-mvp", + headRepositoryOwnerLogin: "octocat", + }); + }).pipe(Effect.provide(layer)), + ); + + it.effect("skips invalid entries when parsing pr lists", () => + Effect.gen(function* () { + mockRun.mockReturnValueOnce( + Effect.succeed( + processOutput( + JSON.stringify([ + { + number: 0, + title: "invalid", + url: "https://github.com/pingdotgg/codething-mvp/pull/0", + baseRefName: "main", + headRefName: "feature/invalid", + }, + { + number: 43, + title: " Valid PR ", + url: " https://github.com/pingdotgg/codething-mvp/pull/43 ", + baseRefName: " main ", + headRefName: " feature/pr-list ", + headRepository: { + nameWithOwner: " ", + }, + headRepositoryOwner: { + login: " ", + }, + }, + ]), + ), + ), + ); + + const gh = yield* GitHubCli.GitHubCli; + const result = yield* gh.listOpenPullRequests({ + cwd: "/repo", + headSelector: "feature/pr-list", + }); + + assert.deepStrictEqual(result, [ + { + number: 43, + title: "Valid PR", + url: "https://github.com/pingdotgg/codething-mvp/pull/43", + baseRefName: "main", + headRefName: "feature/pr-list", + state: "open", + }, + ]); + }).pipe(Effect.provide(layer)), + ); + + it.effect("reads repository clone URLs", () => + Effect.gen(function* () { + mockRun.mockReturnValueOnce( + Effect.succeed( + processOutput( + JSON.stringify({ + nameWithOwner: "octocat/codething-mvp", + url: "https://github.com/octocat/codething-mvp", + sshUrl: "git@github.com:octocat/codething-mvp.git", + }), + ), + ), + ); + + const gh = yield* GitHubCli.GitHubCli; + const result = yield* gh.getRepositoryCloneUrls({ + cwd: "/repo", + repository: "octocat/codething-mvp", + }); + + assert.deepStrictEqual(result, { + nameWithOwner: "octocat/codething-mvp", + url: "https://github.com/octocat/codething-mvp", + sshUrl: "git@github.com:octocat/codething-mvp.git", + }); + }).pipe(Effect.provide(layer)), + ); + + it.effect("creates repositories and parses clone URLs from create output", () => + Effect.gen(function* () { + mockRun.mockReturnValueOnce( + Effect.succeed( + processOutput( + "✓ Created repository octocat/codething-mvp on github.com\nhttps://github.com/octocat/codething-mvp\n", + ), + ), + ); + + const gh = yield* GitHubCli.GitHubCli; + const result = yield* gh.createRepository({ + cwd: "/repo", + repository: "octocat/codething-mvp", + visibility: "private", + }); + + assert.deepStrictEqual(result, { + nameWithOwner: "octocat/codething-mvp", + url: "https://github.com/octocat/codething-mvp", + sshUrl: "git@github.com:octocat/codething-mvp.git", + }); + expect(mockRun).toHaveBeenCalledTimes(1); + expect(mockRun).toHaveBeenNthCalledWith(1, { + operation: "GitHubCli.execute", + command: "gh", + args: ["repo", "create", "octocat/codething-mvp", "--private"], + cwd: "/repo", + timeoutMs: 30_000, + }); + }).pipe(Effect.provide(layer)), + ); + + it.effect("falls back to constructed URLs when create output omits a URL", () => + Effect.gen(function* () { + mockRun.mockReturnValueOnce(Effect.succeed(processOutput(""))); + + const gh = yield* GitHubCli.GitHubCli; + const result = yield* gh.createRepository({ + cwd: "/repo", + repository: "octocat/codething-mvp", + visibility: "private", + }); + + assert.deepStrictEqual(result, { + nameWithOwner: "octocat/codething-mvp", + url: "https://github.com/octocat/codething-mvp", + sshUrl: "git@github.com:octocat/codething-mvp.git", + }); + }).pipe(Effect.provide(layer)), + ); + + it.effect("surfaces a friendly error when the pull request is not found", () => + Effect.gen(function* () { + mockRun.mockReturnValueOnce( + Effect.fail( + new VcsProcessExitError({ + operation: "GitHubCli.execute", + command: "gh pr view", + cwd: "/repo", + exitCode: 1, + detail: + "GraphQL: Could not resolve to a PullRequest with the number of 4888. (repository.pullRequest)", + }), + ), + ); + + const gh = yield* GitHubCli.GitHubCli; + const error = yield* gh + .getPullRequest({ + cwd: "/repo", + reference: "4888", + }) + .pipe(Effect.flip); + + assert.equal(error.message.includes("Pull request not found"), true); + }).pipe(Effect.provide(layer)), + ); +}); diff --git a/apps/server/src/sourceControl/GitHubCli.ts b/apps/server/src/sourceControl/GitHubCli.ts new file mode 100644 index 00000000000..fe83d41ef43 --- /dev/null +++ b/apps/server/src/sourceControl/GitHubCli.ts @@ -0,0 +1,370 @@ +import { Context, Effect, Layer, Result, Schema, SchemaIssue } from "effect"; + +import { + TrimmedNonEmptyString, + type SourceControlRepositoryVisibility, + type VcsError, +} from "@t3tools/contracts"; + +import * as VcsProcess from "../vcs/VcsProcess.ts"; +import * as GitHubPullRequests from "./gitHubPullRequests.ts"; + +const DEFAULT_TIMEOUT_MS = 30_000; + +export class GitHubCliError extends Schema.TaggedErrorClass()("GitHubCliError", { + operation: Schema.String, + detail: Schema.String, + cause: Schema.optional(Schema.Defect), +}) { + override get message(): string { + return `GitHub CLI failed in ${this.operation}: ${this.detail}`; + } +} + +export interface GitHubPullRequestSummary { + readonly number: number; + readonly title: string; + readonly url: string; + readonly baseRefName: string; + readonly headRefName: string; + readonly state?: "open" | "closed" | "merged"; + readonly isCrossRepository?: boolean; + readonly headRepositoryNameWithOwner?: string | null; + readonly headRepositoryOwnerLogin?: string | null; +} + +export interface GitHubRepositoryCloneUrls { + readonly nameWithOwner: string; + readonly url: string; + readonly sshUrl: string; +} + +export interface GitHubCliShape { + readonly execute: (input: { + readonly cwd: string; + readonly args: ReadonlyArray; + readonly timeoutMs?: number; + }) => Effect.Effect; + + readonly listOpenPullRequests: (input: { + readonly cwd: string; + readonly headSelector: string; + readonly limit?: number; + }) => Effect.Effect, GitHubCliError>; + + readonly getPullRequest: (input: { + readonly cwd: string; + readonly reference: string; + }) => Effect.Effect; + + readonly getRepositoryCloneUrls: (input: { + readonly cwd: string; + readonly repository: string; + }) => Effect.Effect; + + readonly createRepository: (input: { + readonly cwd: string; + readonly repository: string; + readonly visibility: SourceControlRepositoryVisibility; + }) => Effect.Effect; + + readonly createPullRequest: (input: { + readonly cwd: string; + readonly baseBranch: string; + readonly headSelector: string; + readonly title: string; + readonly bodyFile: string; + }) => Effect.Effect; + + readonly getDefaultBranch: (input: { + readonly cwd: string; + }) => Effect.Effect; + + readonly checkoutPullRequest: (input: { + readonly cwd: string; + readonly reference: string; + readonly force?: boolean; + }) => Effect.Effect; +} + +export class GitHubCli extends Context.Service()( + "t3/source-control/GitHubCli", +) {} + +function errorText(error: VcsError | unknown): string { + if (typeof error === "object" && error !== null) { + const tag = "_tag" in error && typeof error._tag === "string" ? error._tag : ""; + const detail = "detail" in error && typeof error.detail === "string" ? error.detail : ""; + const message = "message" in error && typeof error.message === "string" ? error.message : ""; + return [tag, detail, message].filter(Boolean).join("\n"); + } + + return String(error); +} + +function normalizeGitHubCliError( + operation: "execute" | "stdout", + error: VcsError | unknown, +): GitHubCliError { + const text = errorText(error); + const lower = text.toLowerCase(); + + if (lower.includes("command not found: gh") || lower.includes("enoent")) { + return new GitHubCliError({ + operation, + detail: "GitHub CLI (`gh`) is required but not available on PATH.", + cause: error, + }); + } + + if ( + lower.includes("authentication failed") || + lower.includes("not logged in") || + lower.includes("gh auth login") || + lower.includes("no oauth token") + ) { + return new GitHubCliError({ + operation, + detail: "GitHub CLI is not authenticated. Run `gh auth login` and retry.", + cause: error, + }); + } + + if ( + lower.includes("could not resolve to a pullrequest") || + lower.includes("repository.pullrequest") || + lower.includes("no pull requests found for branch") || + lower.includes("pull request not found") + ) { + return new GitHubCliError({ + operation, + detail: "Pull request not found. Check the PR number or URL and try again.", + cause: error, + }); + } + + return new GitHubCliError({ + operation, + detail: text, + cause: error, + }); +} + +const RawGitHubRepositoryCloneUrlsSchema = Schema.Struct({ + nameWithOwner: TrimmedNonEmptyString, + url: TrimmedNonEmptyString, + sshUrl: TrimmedNonEmptyString, +}); + +function normalizeRepositoryCloneUrls( + raw: Schema.Schema.Type, +): GitHubRepositoryCloneUrls { + return { + nameWithOwner: raw.nameWithOwner, + url: raw.url, + sshUrl: raw.sshUrl, + }; +} + +/** + * `gh repo create` prints the canonical URL of the new repository on stdout + * (e.g. `https://github.com/owner/repo`). Reading it back here avoids a + * follow-up `gh repo view`, which can race GitHub's GraphQL eventual + * consistency window and falsely report the just-created repo as missing. + */ +function deriveRepositoryCloneUrlsFromCreateOutput( + stdout: string, + repository: string, +): GitHubRepositoryCloneUrls { + const fallbackHost = "github.com"; + const match = stdout.match(/https?:\/\/[^\s]+/); + if (match) { + const cleaned = match[0].replace(/\.git$/, ""); + try { + const parsed = new URL(cleaned); + const pathname = parsed.pathname.replace(/^\/+|\/+$/g, ""); + const segments = pathname.split("/").filter(Boolean); + if (segments.length === 2) { + const nameWithOwner = `${segments[0]}/${segments[1]}`; + return { + nameWithOwner, + url: `${parsed.origin}/${nameWithOwner}`, + sshUrl: `git@${parsed.host}:${nameWithOwner}.git`, + }; + } + } catch { + // Fall through to the input-derived defaults below. + } + } + return { + nameWithOwner: repository, + url: `https://${fallbackHost}/${repository}`, + sshUrl: `git@${fallbackHost}:${repository}.git`, + }; +} + +function decodeGitHubJson( + raw: string, + schema: S, + operation: "listOpenPullRequests" | "getPullRequest" | "getRepositoryCloneUrls", + invalidDetail: string, +): Effect.Effect { + return Schema.decodeEffect(Schema.fromJsonString(schema))(raw).pipe( + Effect.mapError( + (error) => + new GitHubCliError({ + operation, + detail: `${invalidDetail}: ${SchemaIssue.makeFormatterDefault()(error.issue)}`, + cause: error, + }), + ), + ); +} + +export const make = Effect.fn("makeGitHubCli")(function* () { + const process = yield* VcsProcess.VcsProcess; + + const execute: GitHubCliShape["execute"] = (input) => + process + .run({ + operation: "GitHubCli.execute", + command: "gh", + args: input.args, + cwd: input.cwd, + timeoutMs: input.timeoutMs ?? DEFAULT_TIMEOUT_MS, + }) + .pipe(Effect.mapError((error) => normalizeGitHubCliError("execute", error))); + + return GitHubCli.of({ + execute, + listOpenPullRequests: (input) => + execute({ + cwd: input.cwd, + args: [ + "pr", + "list", + "--head", + input.headSelector, + "--state", + "open", + "--limit", + String(input.limit ?? 1), + "--json", + "number,title,url,baseRefName,headRefName,state,mergedAt,isCrossRepository,headRepository,headRepositoryOwner", + ], + }).pipe( + Effect.map((result) => result.stdout.trim()), + Effect.flatMap((raw) => + raw.length === 0 + ? Effect.succeed([]) + : Effect.sync(() => GitHubPullRequests.decodeGitHubPullRequestListJson(raw)).pipe( + Effect.flatMap((decoded) => { + if (!Result.isSuccess(decoded)) { + return Effect.fail( + new GitHubCliError({ + operation: "listOpenPullRequests", + detail: `GitHub CLI returned invalid PR list JSON: ${GitHubPullRequests.formatGitHubJsonDecodeError(decoded.failure)}`, + cause: decoded.failure, + }), + ); + } + + return Effect.succeed( + decoded.success.map(({ updatedAt: _updatedAt, ...summary }) => summary), + ); + }), + ), + ), + ), + getPullRequest: (input) => + execute({ + cwd: input.cwd, + args: [ + "pr", + "view", + input.reference, + "--json", + "number,title,url,baseRefName,headRefName,state,mergedAt,isCrossRepository,headRepository,headRepositoryOwner", + ], + }).pipe( + Effect.map((result) => result.stdout.trim()), + Effect.flatMap((raw) => + Effect.sync(() => GitHubPullRequests.decodeGitHubPullRequestJson(raw)).pipe( + Effect.flatMap((decoded) => { + if (!Result.isSuccess(decoded)) { + return Effect.fail( + new GitHubCliError({ + operation: "getPullRequest", + detail: `GitHub CLI returned invalid pull request JSON: ${GitHubPullRequests.formatGitHubJsonDecodeError(decoded.failure)}`, + cause: decoded.failure, + }), + ); + } + + return Effect.succeed( + (({ updatedAt: _updatedAt, ...summary }) => summary)(decoded.success), + ); + }), + ), + ), + ), + getRepositoryCloneUrls: (input) => + execute({ + cwd: input.cwd, + args: ["repo", "view", input.repository, "--json", "nameWithOwner,url,sshUrl"], + }).pipe( + Effect.map((result) => result.stdout.trim()), + Effect.flatMap((raw) => + decodeGitHubJson( + raw, + RawGitHubRepositoryCloneUrlsSchema, + "getRepositoryCloneUrls", + "GitHub CLI returned invalid repository JSON.", + ), + ), + Effect.map(normalizeRepositoryCloneUrls), + ), + createRepository: (input) => + execute({ + cwd: input.cwd, + args: ["repo", "create", input.repository, `--${input.visibility}`], + }).pipe( + Effect.map((result) => + deriveRepositoryCloneUrlsFromCreateOutput(result.stdout, input.repository), + ), + ), + createPullRequest: (input) => + execute({ + cwd: input.cwd, + args: [ + "pr", + "create", + "--base", + input.baseBranch, + "--head", + input.headSelector, + "--title", + input.title, + "--body-file", + input.bodyFile, + ], + }).pipe(Effect.asVoid), + getDefaultBranch: (input) => + execute({ + cwd: input.cwd, + args: ["repo", "view", "--json", "defaultBranchRef", "--jq", ".defaultBranchRef.name"], + }).pipe( + Effect.map((value) => { + const trimmed = value.stdout.trim(); + return trimmed.length > 0 ? trimmed : null; + }), + ), + checkoutPullRequest: (input) => + execute({ + cwd: input.cwd, + args: ["pr", "checkout", input.reference, ...(input.force ? ["--force"] : [])], + }).pipe(Effect.asVoid), + }); +}); + +export const layer = Layer.effect(GitHubCli, make()); diff --git a/apps/server/src/sourceControl/GitHubSourceControlProvider.test.ts b/apps/server/src/sourceControl/GitHubSourceControlProvider.test.ts new file mode 100644 index 00000000000..3c4ad5ac473 --- /dev/null +++ b/apps/server/src/sourceControl/GitHubSourceControlProvider.test.ts @@ -0,0 +1,156 @@ +import { assert, it } from "@effect/vitest"; +import { DateTime, Effect, Layer, Option } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; + +import * as VcsProcess from "../vcs/VcsProcess.ts"; +import * as GitHubCli from "./GitHubCli.ts"; +import * as GitHubSourceControlProvider from "./GitHubSourceControlProvider.ts"; + +const processResult = (stdout: string): VcsProcess.VcsProcessOutput => ({ + exitCode: ChildProcessSpawner.ExitCode(0), + stdout, + stderr: "", + stdoutTruncated: false, + stderrTruncated: false, +}); + +function makeProvider(github: Partial) { + return GitHubSourceControlProvider.make().pipe( + Effect.provide(Layer.mock(GitHubCli.GitHubCli)(github)), + ); +} + +it.effect("maps GitHub PR summaries into provider-neutral change requests", () => + Effect.gen(function* () { + const provider = yield* makeProvider({ + getPullRequest: () => + Effect.succeed({ + number: 42, + title: "Add GitHub provider", + url: "https://github.com/pingdotgg/t3code/pull/42", + baseRefName: "main", + headRefName: "feature/source-control", + state: "open", + isCrossRepository: true, + headRepositoryNameWithOwner: "fork/t3code", + headRepositoryOwnerLogin: "fork", + }), + }); + + const changeRequest = yield* provider.getChangeRequest({ + cwd: "/repo", + reference: "42", + }); + + assert.deepStrictEqual(changeRequest, { + provider: "github", + number: 42, + title: "Add GitHub provider", + url: "https://github.com/pingdotgg/t3code/pull/42", + baseRefName: "main", + headRefName: "feature/source-control", + state: "open", + updatedAt: Option.none(), + isCrossRepository: true, + headRepositoryNameWithOwner: "fork/t3code", + headRepositoryOwnerLogin: "fork", + }); + }), +); + +it.effect("uses gh json listing for non-open change request state queries", () => + Effect.gen(function* () { + let executeArgs: ReadonlyArray = []; + const provider = yield* makeProvider({ + execute: (input) => { + executeArgs = input.args; + return Effect.succeed( + processResult( + JSON.stringify([ + { + number: 7, + title: "Merged work", + url: "https://github.com/pingdotgg/t3code/pull/7", + baseRefName: "main", + headRefName: "feature/merged", + state: "merged", + updatedAt: "2026-01-02T00:00:00.000Z", + }, + ]), + ), + ); + }, + }); + + const changeRequests = yield* provider.listChangeRequests({ + cwd: "/repo", + headSelector: "feature/merged", + state: "all", + limit: 10, + }); + + assert.deepStrictEqual(executeArgs, [ + "pr", + "list", + "--head", + "feature/merged", + "--state", + "all", + "--limit", + "10", + "--json", + "number,title,url,baseRefName,headRefName,state,mergedAt,updatedAt,isCrossRepository,headRepository,headRepositoryOwner", + ]); + assert.strictEqual(changeRequests[0]?.provider, "github"); + assert.strictEqual(changeRequests[0]?.state, "merged"); + assert.deepStrictEqual( + changeRequests[0]?.updatedAt, + Option.some(DateTime.makeUnsafe("2026-01-02T00:00:00.000Z")), + ); + }), +); + +it.effect("treats empty non-open change request listing output as no results", () => + Effect.gen(function* () { + const provider = yield* makeProvider({ + execute: () => Effect.succeed(processResult("")), + }); + + const changeRequests = yield* provider.listChangeRequests({ + cwd: "/repo", + headSelector: "feature/empty", + state: "all", + limit: 10, + }); + + assert.deepStrictEqual(changeRequests, []); + }), +); + +it.effect("creates GitHub PRs through provider-neutral input names", () => + Effect.gen(function* () { + let createInput: Parameters[0] | null = null; + const provider = yield* makeProvider({ + createPullRequest: (input) => { + createInput = input; + return Effect.void; + }, + }); + + yield* provider.createChangeRequest({ + cwd: "/repo", + baseRefName: "main", + headSelector: "owner:feature/provider", + title: "Provider PR", + bodyFile: "/tmp/body.md", + }); + + assert.deepStrictEqual(createInput, { + cwd: "/repo", + baseBranch: "main", + headSelector: "owner:feature/provider", + title: "Provider PR", + bodyFile: "/tmp/body.md", + }); + }), +); diff --git a/apps/server/src/sourceControl/GitHubSourceControlProvider.ts b/apps/server/src/sourceControl/GitHubSourceControlProvider.ts new file mode 100644 index 00000000000..7dba4893697 --- /dev/null +++ b/apps/server/src/sourceControl/GitHubSourceControlProvider.ts @@ -0,0 +1,196 @@ +import { Effect, Layer, Option, Result, Schema } from "effect"; +import { + SourceControlProviderError, + type ChangeRequest, + type ChangeRequestState, +} from "@t3tools/contracts"; + +import * as GitHubCli from "./GitHubCli.ts"; +import * as GitHubPullRequests from "./gitHubPullRequests.ts"; +import * as SourceControlProvider from "./SourceControlProvider.ts"; +import * as SourceControlProviderDiscovery from "./SourceControlProviderDiscovery.ts"; + +function providerError( + operation: string, + cause: GitHubCli.GitHubCliError, +): SourceControlProviderError { + return new SourceControlProviderError({ + provider: "github", + operation, + detail: cause.detail, + cause, + }); +} + +function toChangeRequest(summary: GitHubCli.GitHubPullRequestSummary): ChangeRequest { + return { + provider: "github", + number: summary.number, + title: summary.title, + url: summary.url, + baseRefName: summary.baseRefName, + headRefName: summary.headRefName, + state: summary.state ?? "open", + updatedAt: Option.none(), + ...(summary.isCrossRepository !== undefined + ? { isCrossRepository: summary.isCrossRepository } + : {}), + ...(summary.headRepositoryNameWithOwner !== undefined + ? { headRepositoryNameWithOwner: summary.headRepositoryNameWithOwner } + : {}), + ...(summary.headRepositoryOwnerLogin !== undefined + ? { headRepositoryOwnerLogin: summary.headRepositoryOwnerLogin } + : {}), + }; +} + +function parseGitHubAuth(input: SourceControlProviderDiscovery.SourceControlAuthProbeInput) { + const output = SourceControlProviderDiscovery.combinedAuthOutput(input); + const account = SourceControlProviderDiscovery.matchFirst(output, [ + /Logged in to .* account\s+([^\s(]+)/iu, + /Logged in to .* as\s+([^\s(]+)/iu, + ]); + const host = SourceControlProviderDiscovery.parseCliHost(output); + + if (input.exitCode !== 0) { + return SourceControlProviderDiscovery.providerAuth({ + status: "unauthenticated", + host, + detail: + SourceControlProviderDiscovery.firstSafeAuthLine(output) ?? + "Run `gh auth login` to authenticate GitHub CLI.", + }); + } + + if (account) { + return SourceControlProviderDiscovery.providerAuth({ status: "authenticated", account, host }); + } + + return SourceControlProviderDiscovery.providerAuth({ + status: "unknown", + host, + detail: + SourceControlProviderDiscovery.firstSafeAuthLine(output) ?? + "GitHub CLI auth status could not be parsed.", + }); +} + +export const discovery = { + type: "cli", + kind: "github", + label: "GitHub", + executable: "gh", + versionArgs: ["--version"], + authArgs: ["auth", "status"], + parseAuth: parseGitHubAuth, + installHint: + "Install the GitHub command-line tool (`gh`) via https://cli.github.com/ or your package manager (for example `brew install gh`).", +} satisfies SourceControlProviderDiscovery.SourceControlCliDiscoverySpec; + +export const make = Effect.fn("makeGitHubSourceControlProvider")(function* () { + const github = yield* GitHubCli.GitHubCli; + + const listChangeRequests: SourceControlProvider.SourceControlProviderShape["listChangeRequests"] = + (input) => { + if (input.state === "open") { + return github + .listOpenPullRequests({ + cwd: input.cwd, + headSelector: input.headSelector, + ...(input.limit !== undefined ? { limit: input.limit } : {}), + }) + .pipe( + Effect.map((items) => items.map(toChangeRequest)), + Effect.mapError((error) => providerError("listChangeRequests", error)), + ); + } + + const stateArg: ChangeRequestState | "all" = input.state; + return github + .execute({ + cwd: input.cwd, + args: [ + "pr", + "list", + "--head", + input.headSelector, + "--state", + stateArg, + "--limit", + String(input.limit ?? 20), + "--json", + "number,title,url,baseRefName,headRefName,state,mergedAt,updatedAt,isCrossRepository,headRepository,headRepositoryOwner", + ], + }) + .pipe( + Effect.flatMap((result) => { + const raw = result.stdout.trim(); + if (raw.length === 0) { + return Effect.succeed([]); + } + return Effect.sync(() => GitHubPullRequests.decodeGitHubPullRequestListJson(raw)).pipe( + Effect.flatMap((decoded) => + Result.isSuccess(decoded) + ? Effect.succeed( + decoded.success.map((item) => ({ + ...toChangeRequest(item), + updatedAt: item.updatedAt, + })), + ) + : Effect.fail( + new SourceControlProviderError({ + provider: "github", + operation: "listChangeRequests", + detail: "GitHub CLI returned invalid change request JSON.", + cause: decoded.failure, + }), + ), + ), + ); + }), + Effect.mapError((error) => + Schema.is(SourceControlProviderError)(error) + ? error + : providerError("listChangeRequests", error), + ), + ); + }; + + return SourceControlProvider.SourceControlProvider.of({ + kind: "github", + listChangeRequests, + getChangeRequest: (input) => + github.getPullRequest(input).pipe( + Effect.map(toChangeRequest), + Effect.mapError((error) => providerError("getChangeRequest", error)), + ), + createChangeRequest: (input) => + github + .createPullRequest({ + cwd: input.cwd, + baseBranch: input.baseRefName, + headSelector: input.headSelector, + title: input.title, + bodyFile: input.bodyFile, + }) + .pipe(Effect.mapError((error) => providerError("createChangeRequest", error))), + getRepositoryCloneUrls: (input) => + github + .getRepositoryCloneUrls(input) + .pipe(Effect.mapError((error) => providerError("getRepositoryCloneUrls", error))), + createRepository: (input) => + github + .createRepository(input) + .pipe(Effect.mapError((error) => providerError("createRepository", error))), + getDefaultBranch: (input) => + github + .getDefaultBranch(input) + .pipe(Effect.mapError((error) => providerError("getDefaultBranch", error))), + checkoutChangeRequest: (input) => + github + .checkoutPullRequest(input) + .pipe(Effect.mapError((error) => providerError("checkoutChangeRequest", error))), + }); +}); + +export const layer = Layer.effect(SourceControlProvider.SourceControlProvider, make()); diff --git a/apps/server/src/sourceControl/GitLabCli.test.ts b/apps/server/src/sourceControl/GitLabCli.test.ts new file mode 100644 index 00000000000..5c7e978d857 --- /dev/null +++ b/apps/server/src/sourceControl/GitLabCli.test.ts @@ -0,0 +1,326 @@ +import { assert, it, afterEach, expect, vi } from "@effect/vitest"; +import { Effect, Layer } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; + +import { VcsProcessExitError } from "@t3tools/contracts"; + +import * as VcsProcess from "../vcs/VcsProcess.ts"; +import * as GitLabCli from "./GitLabCli.ts"; + +const mockedRun = vi.fn(); +const layer = it.layer( + GitLabCli.layer.pipe( + Layer.provide( + Layer.mock(VcsProcess.VcsProcess)({ + run: mockedRun, + }), + ), + ), +); + +function processOutput(stdout: string): VcsProcess.VcsProcessOutput { + return { + exitCode: ChildProcessSpawner.ExitCode(0), + stdout, + stderr: "", + stdoutTruncated: false, + stderrTruncated: false, + }; +} + +afterEach(() => { + mockedRun.mockReset(); +}); + +layer("GitLabCli.layer", (it) => { + it.effect("parses merge request view output", () => + Effect.gen(function* () { + mockedRun.mockReturnValueOnce( + Effect.succeed( + processOutput( + JSON.stringify({ + iid: 42, + title: "Add MR thread creation", + web_url: "https://gitlab.com/pingdotgg/t3code/-/merge_requests/42", + target_branch: "main", + source_branch: "feature/mr-threads", + state: "opened", + source_project_id: 101, + target_project_id: 100, + source_project: { + path_with_namespace: "octocat/t3code", + }, + }), + ), + ), + ); + + const result = yield* Effect.gen(function* () { + const glab = yield* GitLabCli.GitLabCli; + return yield* glab.getMergeRequest({ + cwd: "/repo", + reference: "42", + }); + }); + + assert.deepStrictEqual(result, { + number: 42, + title: "Add MR thread creation", + url: "https://gitlab.com/pingdotgg/t3code/-/merge_requests/42", + baseRefName: "main", + headRefName: "feature/mr-threads", + state: "open", + isCrossRepository: true, + headRepositoryNameWithOwner: "octocat/t3code", + headRepositoryOwnerLogin: "octocat", + }); + expect(mockedRun).toHaveBeenCalledWith( + expect.objectContaining({ + command: "glab", + cwd: "/repo", + args: ["mr", "view", "42", "--output", "json"], + }), + ); + }), + ); + + it.effect("skips invalid entries when parsing MR lists", () => + Effect.gen(function* () { + mockedRun.mockReturnValueOnce( + Effect.succeed( + processOutput( + JSON.stringify([ + { + iid: 0, + title: "invalid", + web_url: "https://gitlab.com/pingdotgg/t3code/-/merge_requests/0", + target_branch: "main", + source_branch: "feature/invalid", + }, + { + iid: 43, + title: " Valid MR ", + web_url: " https://gitlab.com/pingdotgg/t3code/-/merge_requests/43 ", + target_branch: " main ", + source_branch: " feature/mr-list ", + state: "merged", + }, + ]), + ), + ), + ); + + const result = yield* Effect.gen(function* () { + const glab = yield* GitLabCli.GitLabCli; + return yield* glab.listMergeRequests({ + cwd: "/repo", + headSelector: "feature/mr-list", + state: "all", + }); + }); + + assert.deepStrictEqual(result, [ + { + number: 43, + title: "Valid MR", + url: "https://gitlab.com/pingdotgg/t3code/-/merge_requests/43", + baseRefName: "main", + headRefName: "feature/mr-list", + state: "merged", + }, + ]); + expect(mockedRun).toHaveBeenCalledWith( + expect.objectContaining({ + command: "glab", + cwd: "/repo", + args: [ + "mr", + "list", + "--source-branch", + "feature/mr-list", + "--all", + "--per-page", + "20", + "--output", + "json", + ], + }), + ); + }), + ); + + it.effect("reads repository clone URLs", () => + Effect.gen(function* () { + mockedRun.mockReturnValueOnce( + Effect.succeed( + processOutput( + JSON.stringify({ + path_with_namespace: "octocat/t3code", + web_url: "https://gitlab.com/octocat/t3code", + http_url_to_repo: "https://gitlab.com/octocat/t3code.git", + ssh_url_to_repo: "git@gitlab.com:octocat/t3code.git", + }), + ), + ), + ); + + const result = yield* Effect.gen(function* () { + const glab = yield* GitLabCli.GitLabCli; + return yield* glab.getRepositoryCloneUrls({ + cwd: "/repo", + repository: "octocat/t3code", + }); + }); + + assert.deepStrictEqual(result, { + nameWithOwner: "octocat/t3code", + url: "https://gitlab.com/octocat/t3code", + sshUrl: "git@gitlab.com:octocat/t3code.git", + }); + }), + ); + + it.effect("creates merge requests through the GitLab API without placing the body in argv", () => + Effect.gen(function* () { + mockedRun.mockReturnValueOnce(Effect.succeed(processOutput("{}"))); + + const glab = yield* GitLabCli.GitLabCli; + yield* glab.createMergeRequest({ + cwd: "/repo", + baseBranch: "main", + headSelector: "owner:feature/provider", + title: "Provider MR", + bodyFile: "/tmp/t3-mr-body.md", + }); + + expect(mockedRun).toHaveBeenCalledWith( + expect.objectContaining({ + command: "glab", + cwd: "/repo", + args: [ + "api", + "--method", + "POST", + "projects/:fullpath/merge_requests", + "--raw-field", + "source_branch=feature/provider", + "--raw-field", + "target_branch=main", + "--raw-field", + "title=Provider MR", + "--field", + "description=@/tmp/t3-mr-body.md", + ], + }), + ); + }), + ); + + it.effect("creates repositories under an explicit namespace", () => + Effect.gen(function* () { + mockedRun + .mockReturnValueOnce(Effect.succeed(processOutput(JSON.stringify({ id: 1234 })))) + .mockReturnValueOnce( + Effect.succeed( + processOutput( + JSON.stringify({ + path_with_namespace: "octocat/t3code", + web_url: "https://gitlab.com/octocat/t3code", + http_url_to_repo: "https://gitlab.com/octocat/t3code.git", + ssh_url_to_repo: "git@gitlab.com:octocat/t3code.git", + }), + ), + ), + ); + + const glab = yield* GitLabCli.GitLabCli; + const result = yield* glab.createRepository({ + cwd: "/repo", + repository: "octocat/t3code", + visibility: "public", + }); + + assert.deepStrictEqual(result, { + nameWithOwner: "octocat/t3code", + url: "https://gitlab.com/octocat/t3code", + sshUrl: "git@gitlab.com:octocat/t3code.git", + }); + expect(mockedRun).toHaveBeenNthCalledWith( + 1, + expect.objectContaining({ + command: "glab", + cwd: "/repo", + args: ["api", "namespaces/octocat"], + }), + ); + expect(mockedRun).toHaveBeenNthCalledWith( + 2, + expect.objectContaining({ + command: "glab", + cwd: "/repo", + args: [ + "api", + "--method", + "POST", + "projects", + "--raw-field", + "path=t3code", + "--raw-field", + "name=t3code", + "--raw-field", + "visibility=public", + "--raw-field", + "namespace_id=1234", + ], + }), + ); + }), + ); + + it.effect("does not pass unsupported force flags when checking out merge requests", () => + Effect.gen(function* () { + mockedRun.mockReturnValueOnce(Effect.succeed(processOutput(""))); + + const glab = yield* GitLabCli.GitLabCli; + yield* glab.checkoutMergeRequest({ + cwd: "/repo", + reference: "42", + force: true, + }); + + expect(mockedRun).toHaveBeenCalledWith( + expect.objectContaining({ + command: "glab", + cwd: "/repo", + args: ["mr", "checkout", "42"], + }), + ); + }), + ); + + it.effect("surfaces a friendly error when the merge request is not found", () => + Effect.gen(function* () { + mockedRun.mockReturnValueOnce( + Effect.fail( + new VcsProcessExitError({ + operation: "GitLabCli.execute", + command: "glab mr view 4888", + cwd: "/repo", + exitCode: 1, + detail: "GET 404 merge request not found", + }), + ), + ); + + const error = yield* Effect.gen(function* () { + const glab = yield* GitLabCli.GitLabCli; + return yield* glab.getMergeRequest({ + cwd: "/repo", + reference: "4888", + }); + }).pipe(Effect.flip); + + assert.equal(error.message.includes("Merge request not found"), true); + }), + ); +}); diff --git a/apps/server/src/sourceControl/GitLabCli.ts b/apps/server/src/sourceControl/GitLabCli.ts new file mode 100644 index 00000000000..c4485bb09b7 --- /dev/null +++ b/apps/server/src/sourceControl/GitLabCli.ts @@ -0,0 +1,442 @@ +import { Context, Effect, Layer, Option, Result, Schema, SchemaIssue, type DateTime } from "effect"; + +import { TrimmedNonEmptyString, type SourceControlRepositoryVisibility } from "@t3tools/contracts"; + +import * as VcsProcess from "../vcs/VcsProcess.ts"; +import * as GitLabMergeRequests from "./gitLabMergeRequests.ts"; +import type * as SourceControlProvider from "./SourceControlProvider.ts"; + +const DEFAULT_TIMEOUT_MS = 30_000; + +export class GitLabCliError extends Schema.TaggedErrorClass()("GitLabCliError", { + operation: Schema.String, + detail: Schema.String, + cause: Schema.optional(Schema.Defect), +}) { + override get message(): string { + return `GitLab CLI failed in ${this.operation}: ${this.detail}`; + } +} + +export interface GitLabMergeRequestSummary { + readonly number: number; + readonly title: string; + readonly url: string; + readonly baseRefName: string; + readonly headRefName: string; + readonly state?: "open" | "closed" | "merged"; + readonly updatedAt?: Option.Option; + readonly isCrossRepository?: boolean; + readonly headRepositoryNameWithOwner?: string | null; + readonly headRepositoryOwnerLogin?: string | null; +} + +export interface GitLabRepositoryCloneUrls { + readonly nameWithOwner: string; + readonly url: string; + readonly sshUrl: string; +} + +export interface GitLabCliShape { + readonly execute: (input: { + readonly cwd: string; + readonly args: ReadonlyArray; + readonly timeoutMs?: number; + }) => Effect.Effect; + + readonly listMergeRequests: (input: { + readonly cwd: string; + readonly headSelector: string; + readonly source?: SourceControlProvider.SourceControlRefSelector; + readonly state: "open" | "closed" | "merged" | "all"; + readonly limit?: number; + }) => Effect.Effect, GitLabCliError>; + + readonly getMergeRequest: (input: { + readonly cwd: string; + readonly reference: string; + }) => Effect.Effect; + + readonly getRepositoryCloneUrls: (input: { + readonly cwd: string; + readonly repository: string; + }) => Effect.Effect; + + readonly createRepository: (input: { + readonly cwd: string; + readonly repository: string; + readonly visibility: SourceControlRepositoryVisibility; + }) => Effect.Effect; + + readonly createMergeRequest: (input: { + readonly cwd: string; + readonly baseBranch: string; + readonly headSelector: string; + readonly source?: SourceControlProvider.SourceControlRefSelector; + readonly target?: SourceControlProvider.SourceControlRefSelector; + readonly title: string; + readonly bodyFile: string; + }) => Effect.Effect; + + readonly getDefaultBranch: (input: { + readonly cwd: string; + }) => Effect.Effect; + + readonly checkoutMergeRequest: (input: { + readonly cwd: string; + readonly reference: string; + readonly force?: boolean; + }) => Effect.Effect; +} + +export class GitLabCli extends Context.Service()( + "t3/source-control/GitLabCli", +) {} + +function isVcsProcessSpawnError(error: unknown): boolean { + return ( + typeof error === "object" && + error !== null && + "_tag" in error && + error._tag === "VcsProcessSpawnError" + ); +} + +function normalizeGitLabCliError(operation: "execute" | "stdout", error: unknown): GitLabCliError { + if (error instanceof Error) { + if (error.message.includes("Command not found: glab") || isVcsProcessSpawnError(error)) { + return new GitLabCliError({ + operation, + detail: "GitLab CLI (`glab`) is required but not available on PATH.", + cause: error, + }); + } + + const lower = error.message.toLowerCase(); + if ( + lower.includes("authentication failed") || + lower.includes("not logged in") || + lower.includes("glab auth login") || + lower.includes("token") + ) { + return new GitLabCliError({ + operation, + detail: "GitLab CLI is not authenticated. Run `glab auth login` and retry.", + cause: error, + }); + } + + if ( + lower.includes("merge request not found") || + lower.includes("not found") || + lower.includes("404") + ) { + return new GitLabCliError({ + operation, + detail: "Merge request not found. Check the MR number or URL and try again.", + cause: error, + }); + } + + return new GitLabCliError({ + operation, + detail: `GitLab CLI command failed: ${error.message}`, + cause: error, + }); + } + + return new GitLabCliError({ + operation, + detail: "GitLab CLI command failed.", + cause: error, + }); +} + +const RawGitLabRepositoryCloneUrlsSchema = Schema.Struct({ + path_with_namespace: TrimmedNonEmptyString, + web_url: TrimmedNonEmptyString, + http_url_to_repo: TrimmedNonEmptyString, + ssh_url_to_repo: TrimmedNonEmptyString, +}); + +const RawGitLabDefaultBranchSchema = Schema.Struct({ + default_branch: Schema.optional(Schema.NullOr(TrimmedNonEmptyString)), +}); + +const RawGitLabNamespaceSchema = Schema.Struct({ + id: Schema.Number, +}); + +function normalizeRepositoryCloneUrls( + raw: Schema.Schema.Type, +): GitLabRepositoryCloneUrls { + return { + nameWithOwner: raw.path_with_namespace, + url: raw.web_url, + sshUrl: raw.ssh_url_to_repo, + }; +} + +function decodeGitLabJson( + raw: string, + schema: S, + operation: "getRepositoryCloneUrls" | "getDefaultBranch" | "createRepository", + invalidDetail: string, +): Effect.Effect { + return Schema.decodeEffect(Schema.fromJsonString(schema))(raw).pipe( + Effect.mapError( + (error) => + new GitLabCliError({ + operation, + detail: `${invalidDetail}: ${SchemaIssue.makeFormatterDefault()(error.issue)}`, + cause: error, + }), + ), + ); +} + +function stateArgs(state: "open" | "closed" | "merged" | "all"): ReadonlyArray { + switch (state) { + case "open": + return []; + case "closed": + return ["--closed"]; + case "merged": + return ["--merged"]; + case "all": + return ["--all"]; + } +} + +function normalizeHeadSelector(headSelector: string): string { + const trimmed = headSelector.trim(); + const ownerBranch = /^[^:]+:(.+)$/.exec(trimmed); + return ownerBranch?.[1]?.trim() || trimmed; +} + +function sourceRefName(input: { + readonly headSelector: string; + readonly source?: SourceControlProvider.SourceControlRefSelector; +}): string { + return input.source?.refName ?? normalizeHeadSelector(input.headSelector); +} + +function sourceProjectIdentifier( + source: SourceControlProvider.SourceControlRefSelector | undefined, +): string | null { + return source?.repository ?? source?.owner ?? null; +} + +function toSummaryWithOptionalUpdatedAt( + record: GitLabMergeRequestSummary & { + readonly updatedAt: Option.Option; + }, +): GitLabMergeRequestSummary { + const { updatedAt, ...summary } = record; + return Option.isSome(updatedAt) ? { ...summary, updatedAt } : summary; +} + +function parseRepositoryPath(repository: string): { + readonly namespacePath: string | null; + readonly projectPath: string; +} { + const parts = repository + .split("/") + .map((part) => part.trim()) + .filter((part) => part.length > 0); + const projectPath = parts.at(-1) ?? repository.trim(); + const namespacePath = parts.length > 1 ? parts.slice(0, -1).join("/") : null; + return { namespacePath, projectPath }; +} + +export const make = Effect.fn("makeGitLabCli")(function* () { + const process = yield* VcsProcess.VcsProcess; + + const execute: GitLabCliShape["execute"] = (input) => + process + .run({ + operation: "GitLabCli.execute", + command: "glab", + args: input.args, + cwd: input.cwd, + timeoutMs: input.timeoutMs ?? DEFAULT_TIMEOUT_MS, + }) + .pipe(Effect.mapError((error) => normalizeGitLabCliError("execute", error))); + + return GitLabCli.of({ + execute, + listMergeRequests: (input) => + execute({ + cwd: input.cwd, + args: [ + "mr", + "list", + "--source-branch", + sourceRefName(input), + ...stateArgs(input.state), + "--per-page", + String(input.limit ?? 20), + "--output", + "json", + ], + }).pipe( + Effect.map((result) => result.stdout.trim()), + Effect.flatMap((raw) => + raw.length === 0 + ? Effect.succeed([]) + : Effect.sync(() => GitLabMergeRequests.decodeGitLabMergeRequestListJson(raw)).pipe( + Effect.flatMap((decoded) => { + if (!Result.isSuccess(decoded)) { + return Effect.fail( + new GitLabCliError({ + operation: "listMergeRequests", + detail: `GitLab CLI returned invalid MR list JSON: ${GitLabMergeRequests.formatGitLabJsonDecodeError(decoded.failure)}`, + cause: decoded.failure, + }), + ); + } + + return Effect.succeed(decoded.success.map(toSummaryWithOptionalUpdatedAt)); + }), + ), + ), + ), + getMergeRequest: (input) => + execute({ + cwd: input.cwd, + args: ["mr", "view", input.reference, "--output", "json"], + }).pipe( + Effect.map((result) => result.stdout.trim()), + Effect.flatMap((raw) => + Effect.sync(() => GitLabMergeRequests.decodeGitLabMergeRequestJson(raw)).pipe( + Effect.flatMap((decoded) => { + if (!Result.isSuccess(decoded)) { + return Effect.fail( + new GitLabCliError({ + operation: "getMergeRequest", + detail: `GitLab CLI returned invalid merge request JSON: ${GitLabMergeRequests.formatGitLabJsonDecodeError(decoded.failure)}`, + cause: decoded.failure, + }), + ); + } + + return Effect.succeed(toSummaryWithOptionalUpdatedAt(decoded.success)); + }), + ), + ), + ), + getRepositoryCloneUrls: (input) => + execute({ + cwd: input.cwd, + args: ["api", `projects/${encodeURIComponent(input.repository)}`], + }).pipe( + Effect.map((result) => result.stdout.trim()), + Effect.flatMap((raw) => + decodeGitLabJson( + raw, + RawGitLabRepositoryCloneUrlsSchema, + "getRepositoryCloneUrls", + "GitLab CLI returned invalid repository JSON.", + ), + ), + Effect.map(normalizeRepositoryCloneUrls), + ), + createRepository: (input) => { + const { namespacePath, projectPath } = parseRepositoryPath(input.repository); + const namespaceId: Effect.Effect = namespacePath + ? execute({ + cwd: input.cwd, + args: ["api", `namespaces/${encodeURIComponent(namespacePath)}`], + }).pipe( + Effect.map((result) => result.stdout.trim()), + Effect.flatMap((raw) => + decodeGitLabJson( + raw, + RawGitLabNamespaceSchema, + "createRepository", + "GitLab CLI returned invalid namespace JSON.", + ), + ), + Effect.map((namespace) => namespace.id), + ) + : Effect.succeed(null); + + return namespaceId.pipe( + Effect.flatMap((resolvedNamespaceId) => + execute({ + cwd: input.cwd, + args: [ + "api", + "--method", + "POST", + "projects", + "--raw-field", + `path=${projectPath}`, + "--raw-field", + `name=${projectPath}`, + "--raw-field", + `visibility=${input.visibility}`, + ...(resolvedNamespaceId === null + ? [] + : ["--raw-field", `namespace_id=${resolvedNamespaceId}`]), + ], + }), + ), + Effect.map((result) => result.stdout.trim()), + Effect.flatMap((raw) => + decodeGitLabJson( + raw, + RawGitLabRepositoryCloneUrlsSchema, + "createRepository", + "GitLab CLI returned invalid repository JSON.", + ), + ), + Effect.map(normalizeRepositoryCloneUrls), + ); + }, + createMergeRequest: (input) => { + const sourceProject = sourceProjectIdentifier(input.source); + return execute({ + cwd: input.cwd, + args: [ + "api", + "--method", + "POST", + "projects/:fullpath/merge_requests", + "--raw-field", + `source_branch=${sourceRefName(input)}`, + "--raw-field", + `target_branch=${input.target?.refName ?? input.baseBranch}`, + ...(sourceProject ? ["--raw-field", `source_project_id=${sourceProject}`] : []), + "--raw-field", + `title=${input.title}`, + "--field", + `description=@${input.bodyFile}`, + ], + }).pipe(Effect.asVoid); + }, + getDefaultBranch: (input) => + execute({ + cwd: input.cwd, + args: ["api", "projects/:fullpath"], + }).pipe( + Effect.map((result) => result.stdout.trim()), + Effect.flatMap((raw) => + decodeGitLabJson( + raw, + RawGitLabDefaultBranchSchema, + "getDefaultBranch", + "GitLab CLI returned invalid repository JSON.", + ), + ), + Effect.map((value) => value.default_branch ?? null), + ), + checkoutMergeRequest: (input) => + execute({ + cwd: input.cwd, + args: ["mr", "checkout", input.reference], + }).pipe(Effect.asVoid), + }); +}); + +export const layer = Layer.effect(GitLabCli, make()); diff --git a/apps/server/src/sourceControl/GitLabSourceControlProvider.test.ts b/apps/server/src/sourceControl/GitLabSourceControlProvider.test.ts new file mode 100644 index 00000000000..930c1c018f5 --- /dev/null +++ b/apps/server/src/sourceControl/GitLabSourceControlProvider.test.ts @@ -0,0 +1,107 @@ +import { assert, it } from "@effect/vitest"; +import { Effect, Layer, Option } from "effect"; + +import * as GitLabCli from "./GitLabCli.ts"; +import * as GitLabSourceControlProvider from "./GitLabSourceControlProvider.ts"; + +function makeProvider(gitlab: Partial) { + return GitLabSourceControlProvider.make().pipe( + Effect.provide(Layer.mock(GitLabCli.GitLabCli)(gitlab)), + ); +} + +it.effect("maps GitLab MR summaries into provider-neutral change requests", () => + Effect.gen(function* () { + const provider = yield* makeProvider({ + getMergeRequest: () => + Effect.succeed({ + number: 42, + title: "Add GitLab provider", + url: "https://gitlab.com/pingdotgg/t3code/-/merge_requests/42", + baseRefName: "main", + headRefName: "feature/source-control", + state: "open", + isCrossRepository: true, + headRepositoryNameWithOwner: "fork/t3code", + headRepositoryOwnerLogin: "fork", + }), + }); + + const changeRequest = yield* provider.getChangeRequest({ + cwd: "/repo", + reference: "42", + }); + + assert.deepStrictEqual(changeRequest, { + provider: "gitlab", + number: 42, + title: "Add GitLab provider", + url: "https://gitlab.com/pingdotgg/t3code/-/merge_requests/42", + baseRefName: "main", + headRefName: "feature/source-control", + state: "open", + updatedAt: Option.none(), + isCrossRepository: true, + headRepositoryNameWithOwner: "fork/t3code", + headRepositoryOwnerLogin: "fork", + }); + }), +); + +it.effect("lists GitLab MRs through provider-neutral input names", () => + Effect.gen(function* () { + let listInput: Parameters[0] | null = null; + const provider = yield* makeProvider({ + listMergeRequests: (input) => { + listInput = input; + return Effect.succeed([]); + }, + }); + + yield* provider.listChangeRequests({ + cwd: "/repo", + headSelector: "feature/provider", + state: "all", + limit: 10, + }); + + assert.deepStrictEqual(listInput, { + cwd: "/repo", + headSelector: "feature/provider", + state: "all", + limit: 10, + }); + }), +); + +it.effect("creates GitLab MRs through provider-neutral input names", () => + Effect.gen(function* () { + let createInput: Parameters[0] | null = null; + const provider = yield* makeProvider({ + createMergeRequest: (input) => { + createInput = input; + return Effect.void; + }, + }); + + yield* provider.createChangeRequest({ + cwd: "/repo", + baseRefName: "main", + headSelector: "owner:feature/provider", + title: "Provider MR", + bodyFile: "/tmp/body.md", + }); + + assert.deepStrictEqual(createInput, { + cwd: "/repo", + baseBranch: "main", + headSelector: "owner:feature/provider", + source: { + owner: "owner", + refName: "feature/provider", + }, + title: "Provider MR", + bodyFile: "/tmp/body.md", + }); + }), +); diff --git a/apps/server/src/sourceControl/GitLabSourceControlProvider.ts b/apps/server/src/sourceControl/GitLabSourceControlProvider.ts new file mode 100644 index 00000000000..5b0538babd8 --- /dev/null +++ b/apps/server/src/sourceControl/GitLabSourceControlProvider.ts @@ -0,0 +1,144 @@ +import { Effect, Layer, Option } from "effect"; +import { SourceControlProviderError, type ChangeRequest } from "@t3tools/contracts"; + +import * as GitLabCli from "./GitLabCli.ts"; +import * as SourceControlProvider from "./SourceControlProvider.ts"; +import * as SourceControlProviderDiscovery from "./SourceControlProviderDiscovery.ts"; + +function providerError( + operation: string, + cause: GitLabCli.GitLabCliError, +): SourceControlProviderError { + return new SourceControlProviderError({ + provider: "gitlab", + operation, + detail: cause.detail, + cause, + }); +} + +function toChangeRequest(summary: GitLabCli.GitLabMergeRequestSummary): ChangeRequest { + return { + provider: "gitlab", + number: summary.number, + title: summary.title, + url: summary.url, + baseRefName: summary.baseRefName, + headRefName: summary.headRefName, + state: summary.state ?? "open", + updatedAt: summary.updatedAt ?? Option.none(), + ...(summary.isCrossRepository !== undefined + ? { isCrossRepository: summary.isCrossRepository } + : {}), + ...(summary.headRepositoryNameWithOwner !== undefined + ? { headRepositoryNameWithOwner: summary.headRepositoryNameWithOwner } + : {}), + ...(summary.headRepositoryOwnerLogin !== undefined + ? { headRepositoryOwnerLogin: summary.headRepositoryOwnerLogin } + : {}), + }; +} + +function parseGitLabAuth(input: SourceControlProviderDiscovery.SourceControlAuthProbeInput) { + const output = SourceControlProviderDiscovery.combinedAuthOutput(input); + const account = SourceControlProviderDiscovery.matchFirst(output, [ + /Logged in to .* as\s+([^\s(]+)/iu, + /Logged in to .* account\s+([^\s(]+)/iu, + /account:\s*([^\s(]+)/iu, + ]); + const host = SourceControlProviderDiscovery.parseCliHost(output); + + if (input.exitCode !== 0) { + return SourceControlProviderDiscovery.providerAuth({ + status: "unauthenticated", + host, + detail: + SourceControlProviderDiscovery.firstSafeAuthLine(output) ?? + "Run `glab auth login` to authenticate GitLab CLI.", + }); + } + + if (account) { + return SourceControlProviderDiscovery.providerAuth({ status: "authenticated", account, host }); + } + + return SourceControlProviderDiscovery.providerAuth({ + status: "unknown", + host, + detail: + SourceControlProviderDiscovery.firstSafeAuthLine(output) ?? + "GitLab CLI auth status could not be parsed.", + }); +} + +export const discovery = { + type: "cli", + kind: "gitlab", + label: "GitLab", + executable: "glab", + versionArgs: ["--version"], + authArgs: ["auth", "status"], + parseAuth: parseGitLabAuth, + installHint: + "Install the GitLab command-line tool (`glab`) from https://gitlab.com/gitlab-org/cli or your package manager (for example `brew install glab`).", +} satisfies SourceControlProviderDiscovery.SourceControlCliDiscoverySpec; + +export const make = Effect.fn("makeGitLabSourceControlProvider")(function* () { + const gitlab = yield* GitLabCli.GitLabCli; + + return SourceControlProvider.SourceControlProvider.of({ + kind: "gitlab", + listChangeRequests: (input) => { + const source = SourceControlProvider.sourceControlRefFromInput(input); + return gitlab + .listMergeRequests({ + cwd: input.cwd, + headSelector: input.headSelector, + ...(source ? { source } : {}), + state: input.state, + ...(input.limit !== undefined ? { limit: input.limit } : {}), + }) + .pipe( + Effect.map((items) => items.map(toChangeRequest)), + Effect.mapError((error) => providerError("listChangeRequests", error)), + ); + }, + getChangeRequest: (input) => + gitlab.getMergeRequest(input).pipe( + Effect.map(toChangeRequest), + Effect.mapError((error) => providerError("getChangeRequest", error)), + ), + createChangeRequest: (input) => { + const source = SourceControlProvider.sourceControlRefFromInput(input); + return gitlab + .createMergeRequest({ + cwd: input.cwd, + baseBranch: input.baseRefName, + headSelector: input.headSelector, + ...(source ? { source } : {}), + ...(input.target ? { target: input.target } : {}), + title: input.title, + bodyFile: input.bodyFile, + }) + .pipe(Effect.mapError((error) => providerError("createChangeRequest", error))); + }, + getRepositoryCloneUrls: (input) => + gitlab + .getRepositoryCloneUrls(input) + .pipe(Effect.mapError((error) => providerError("getRepositoryCloneUrls", error))), + createRepository: (input) => + gitlab + .createRepository(input) + .pipe(Effect.mapError((error) => providerError("createRepository", error))), + getDefaultBranch: (input) => + gitlab + .getDefaultBranch(input) + .pipe(Effect.mapError((error) => providerError("getDefaultBranch", error))), + checkoutChangeRequest: (input) => + gitlab + .checkoutMergeRequest(input) + .pipe(Effect.mapError((error) => providerError("checkoutChangeRequest", error))), + }); +}); + +export const layer = Layer.effect(SourceControlProvider.SourceControlProvider, make()); diff --git a/apps/server/src/sourceControl/SourceControlDiscovery.test.ts b/apps/server/src/sourceControl/SourceControlDiscovery.test.ts new file mode 100644 index 00000000000..ce41265d336 --- /dev/null +++ b/apps/server/src/sourceControl/SourceControlDiscovery.test.ts @@ -0,0 +1,257 @@ +import { assert, it } from "@effect/vitest"; +import * as NodeServices from "@effect/platform-node/NodeServices"; +import { Effect, Layer, Option } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; +import { VcsProcessSpawnError } from "@t3tools/contracts"; + +import { ServerConfig } from "../config.ts"; +import * as VcsDriverRegistry from "../vcs/VcsDriverRegistry.ts"; +import * as VcsProcess from "../vcs/VcsProcess.ts"; +import * as AzureDevOpsCli from "./AzureDevOpsCli.ts"; +import * as BitbucketApi from "./BitbucketApi.ts"; +import * as GitHubCli from "./GitHubCli.ts"; +import * as GitLabCli from "./GitLabCli.ts"; +import * as SourceControlDiscovery from "./SourceControlDiscovery.ts"; +import * as SourceControlProviderRegistry from "./SourceControlProviderRegistry.ts"; + +const sourceControlProviderRegistryTestLayer = (input: { + readonly bitbucket: Partial; + readonly process: Partial; +}) => + SourceControlProviderRegistry.layer.pipe( + Layer.provide( + Layer.mergeAll( + ServerConfig.layerTest(process.cwd(), { prefix: "t3-source-control-registry-test-" }).pipe( + Layer.provide(NodeServices.layer), + ), + Layer.mock(AzureDevOpsCli.AzureDevOpsCli)({}), + Layer.mock(BitbucketApi.BitbucketApi)(input.bitbucket), + Layer.mock(GitHubCli.GitHubCli)({}), + Layer.mock(GitLabCli.GitLabCli)({}), + Layer.mock(VcsDriverRegistry.VcsDriverRegistry)({}), + Layer.mock(VcsProcess.VcsProcess)(input.process), + ), + ), + ); + +const processOutput = ( + stdout: string, + options?: { + readonly stderr?: string; + readonly exitCode?: ChildProcessSpawner.ExitCode; + }, +): VcsProcess.VcsProcessOutput => ({ + exitCode: options?.exitCode ?? ChildProcessSpawner.ExitCode(0), + stdout, + stderr: options?.stderr ?? "", + stdoutTruncated: false, + stderrTruncated: false, +}); + +it.effect("reports implemented tools separately from locally available executables", () => { + const processMock = { + run: (input: VcsProcess.VcsProcessInput) => { + if (input.command === "git") { + return Effect.succeed(processOutput("git version 2.51.0\n")); + } + if (input.command === "gh" && input.args[0] === "--version") { + return Effect.succeed(processOutput("gh version 2.83.0\n")); + } + if (input.command === "gh" && input.args.join(" ") === "auth status") { + return Effect.succeed( + processOutput(`github.com +Logged in to github.com account juliusmarminge (keyring) +- Active account: true +- Git operations protocol: ssh +- Token: gho_************************************ +- Token scopes: 'admin:public_key', 'gist', 'read:org', 'repo' +`), + ); + } + return Effect.fail( + new VcsProcessSpawnError({ + operation: input.operation, + command: input.command, + cwd: input.cwd, + cause: new Error(`${input.command} not found`), + }), + ); + }, + } satisfies Partial; + const testLayer = SourceControlDiscovery.layer.pipe( + Layer.provide( + ServerConfig.layerTest(process.cwd(), { prefix: "t3-source-control-discovery-" }), + ), + Layer.provide(Layer.mock(VcsProcess.VcsProcess)(processMock)), + Layer.provide( + sourceControlProviderRegistryTestLayer({ + process: processMock, + bitbucket: { + probeAuth: Effect.succeed({ + status: "unauthenticated", + account: Option.none(), + host: Option.some("bitbucket.org"), + detail: Option.some( + "Set T3CODE_BITBUCKET_EMAIL and T3CODE_BITBUCKET_API_TOKEN, or T3CODE_BITBUCKET_ACCESS_TOKEN.", + ), + }), + }, + }), + ), + Layer.provideMerge(NodeServices.layer), + ); + + return Effect.gen(function* () { + const discovery = yield* SourceControlDiscovery.SourceControlDiscovery; + const result = yield* discovery.discover; + + assert.deepStrictEqual( + result.versionControlSystems.map((item) => ({ + kind: item.kind, + implemented: item.implemented, + status: item.status, + })), + [ + { kind: "git", implemented: true, status: "available" }, + { kind: "jj", implemented: false, status: "missing" }, + ], + ); + assert.deepStrictEqual( + result.sourceControlProviders.map((item) => ({ + kind: item.kind, + status: item.status, + auth: item.auth.status, + account: item.auth.account, + })), + [ + { + kind: "github", + status: "available", + auth: "authenticated", + account: Option.some("juliusmarminge"), + }, + { + kind: "gitlab", + status: "missing", + auth: "unknown", + account: Option.none(), + }, + { + kind: "azure-devops", + status: "missing", + auth: "unknown", + account: Option.none(), + }, + { + kind: "bitbucket", + status: "available", + auth: "unauthenticated", + account: Option.none(), + }, + ], + ); + const bitbucket = result.sourceControlProviders.find((item) => item.kind === "bitbucket"); + assert.ok(bitbucket); + assert.strictEqual(bitbucket.executable, undefined); + }).pipe(Effect.provide(testLayer)); +}); + +it.effect("probes provider authentication without exposing token details", () => { + const processMock = { + run: (input: VcsProcess.VcsProcessInput) => { + if (input.args[0] === "--version") { + return Effect.succeed(processOutput(`${input.command} version test\n`)); + } + if (input.command === "gh" && input.args.join(" ") === "auth status") { + return Effect.succeed( + processOutput(`github.com +Logged in to github.com account octocat (keyring) +- Token: gho_************************************ +- Token scopes: 'repo' +`), + ); + } + if (input.command === "glab" && input.args.join(" ") === "auth status") { + return Effect.succeed( + processOutput(`gitlab.com +Logged in to gitlab.com as gitlab-user +`), + ); + } + if ( + input.command === "az" && + input.args.join(" ") === "account show --query user.name -o tsv" + ) { + return Effect.succeed(processOutput("azure-user@example.com\n")); + } + return Effect.fail( + new VcsProcessSpawnError({ + operation: input.operation, + command: input.command, + cwd: input.cwd, + cause: new Error(`${input.command} not found`), + }), + ); + }, + } satisfies Partial; + const testLayer = SourceControlDiscovery.layer.pipe( + Layer.provide( + ServerConfig.layerTest(process.cwd(), { prefix: "t3-source-control-auth-discovery-" }), + ), + Layer.provide(Layer.mock(VcsProcess.VcsProcess)(processMock)), + Layer.provide( + sourceControlProviderRegistryTestLayer({ + process: processMock, + bitbucket: { + probeAuth: Effect.succeed({ + status: "authenticated", + account: Option.some("bitbucket-user"), + host: Option.some("bitbucket.org"), + detail: Option.none(), + }), + }, + }), + ), + Layer.provideMerge(NodeServices.layer), + ); + + return Effect.gen(function* () { + const discovery = yield* SourceControlDiscovery.SourceControlDiscovery; + const result = yield* discovery.discover; + + assert.deepStrictEqual( + result.sourceControlProviders.map((item) => ({ + kind: item.kind, + auth: item.auth.status, + account: item.auth.account, + detail: item.auth.detail, + })), + [ + { + kind: "github", + auth: "authenticated", + account: Option.some("octocat"), + detail: Option.none(), + }, + { + kind: "gitlab", + auth: "authenticated", + account: Option.some("gitlab-user"), + detail: Option.none(), + }, + { + kind: "azure-devops", + auth: "authenticated", + account: Option.some("azure-user@example.com"), + detail: Option.none(), + }, + { + kind: "bitbucket", + auth: "authenticated", + account: Option.some("bitbucket-user"), + detail: Option.none(), + }, + ], + ); + }).pipe(Effect.provide(testLayer)); +}); diff --git a/apps/server/src/sourceControl/SourceControlDiscovery.ts b/apps/server/src/sourceControl/SourceControlDiscovery.ts new file mode 100644 index 00000000000..4a44d35087a --- /dev/null +++ b/apps/server/src/sourceControl/SourceControlDiscovery.ts @@ -0,0 +1,144 @@ +import { + type SourceControlDiscoveryResult, + type VcsDiscoveryItem, + type VcsDriverKind, +} from "@t3tools/contracts"; +import { Context, Effect, Layer, Option } from "effect"; + +import { ServerConfig } from "../config.ts"; +import * as VcsProcess from "../vcs/VcsProcess.ts"; +import * as SourceControlProviderDiscovery from "./SourceControlProviderDiscovery.ts"; +import * as SourceControlProviderRegistry from "./SourceControlProviderRegistry.ts"; + +interface DiscoveryProbe { + readonly label: string; + readonly executable?: string; + readonly versionArgs?: ReadonlyArray; + readonly implemented: boolean; + readonly installHint: string; +} + +type VcsProbe = DiscoveryProbe & { + readonly kind: VcsDriverKind; + readonly executable: string; + readonly versionArgs: ReadonlyArray; +}; + +interface DiscoveryProbeResult { + readonly kind: Kind; + readonly label: string; + readonly executable?: string; + readonly implemented: boolean; + readonly status: "available" | "missing"; + readonly version: Option.Option; + readonly installHint: string; + readonly detail: Option.Option; +} + +const VCS_PROBES: ReadonlyArray = [ + { + kind: "git", + label: "Git", + executable: "git", + versionArgs: ["--version"], + implemented: true, + installHint: "Install Git from https://git-scm.com/downloads or with your package manager.", + }, + { + kind: "jj", + label: "Jujutsu", + executable: "jj", + versionArgs: ["--version"], + implemented: false, + installHint: "Install Jujutsu with `brew install jj` or from https://github.com/jj-vcs/jj.", + }, +]; + +export interface SourceControlDiscoveryShape { + readonly discover: Effect.Effect; +} + +export class SourceControlDiscovery extends Context.Service< + SourceControlDiscovery, + SourceControlDiscoveryShape +>()("t3/source-control/SourceControlDiscovery") {} + +export const layer = Layer.effect( + SourceControlDiscovery, + Effect.gen(function* () { + const config = yield* ServerConfig; + const process = yield* VcsProcess.VcsProcess; + const sourceControlProviders = + yield* SourceControlProviderRegistry.SourceControlProviderRegistry; + + const probe = ( + input: DiscoveryProbe & { readonly kind: Kind }, + ): Effect.Effect> => { + const executable = input.executable; + const versionArgs = input.versionArgs; + + if (!executable || !versionArgs) { + return Effect.succeed({ + kind: input.kind, + label: input.label, + implemented: input.implemented, + status: "missing" as const, + version: Option.none(), + installHint: input.installHint, + detail: Option.some(input.installHint), + } satisfies DiscoveryProbeResult); + } + + return process + .run({ + operation: "source-control.discovery.probe", + command: executable, + args: versionArgs, + cwd: config.cwd, + timeoutMs: 5_000, + maxOutputBytes: 8_000, + truncateOutputAtMaxBytes: true, + }) + .pipe( + Effect.map( + (result) => + ({ + kind: input.kind, + label: input.label, + executable, + implemented: input.implemented, + status: "available" as const, + version: Option.orElse( + SourceControlProviderDiscovery.firstNonEmptyLine(result.stdout), + () => SourceControlProviderDiscovery.firstNonEmptyLine(result.stderr), + ), + installHint: input.installHint, + detail: Option.none(), + }) satisfies DiscoveryProbeResult, + ), + Effect.catch((cause) => + Effect.succeed({ + kind: input.kind, + label: input.label, + executable, + implemented: input.implemented, + status: "missing" as const, + version: Option.none(), + installHint: input.installHint, + detail: SourceControlProviderDiscovery.detailFromCause(cause), + } satisfies DiscoveryProbeResult), + ), + ); + }; + + return SourceControlDiscovery.of({ + discover: Effect.all({ + versionControlSystems: Effect.all( + VCS_PROBES.map((entry) => probe(entry)) as ReadonlyArray>, + { concurrency: "unbounded" }, + ), + sourceControlProviders: sourceControlProviders.discover, + }), + }); + }), +); diff --git a/apps/server/src/sourceControl/SourceControlProvider.ts b/apps/server/src/sourceControl/SourceControlProvider.ts new file mode 100644 index 00000000000..12f89caf77e --- /dev/null +++ b/apps/server/src/sourceControl/SourceControlProvider.ts @@ -0,0 +1,101 @@ +import { Context, Effect } from "effect"; +import type { + ChangeRequest, + ChangeRequestState, + SourceControlProviderError, + SourceControlProviderInfo, + SourceControlProviderKind, + SourceControlRepositoryCloneUrls, + SourceControlRepositoryVisibility, +} from "@t3tools/contracts"; + +export interface SourceControlProviderContext { + readonly provider: SourceControlProviderInfo; + readonly remoteName: string; + readonly remoteUrl: string; +} + +export interface SourceControlRefSelector { + readonly refName: string; + readonly owner?: string; + readonly repository?: string; +} + +export function parseSourceControlOwnerRef( + headSelector: string, +): SourceControlRefSelector | undefined { + const match = /^([^:/\s]+):(.+)$/u.exec(headSelector.trim()); + const owner = match?.[1]?.trim(); + const refName = match?.[2]?.trim(); + return owner && refName ? { owner, refName } : undefined; +} + +export function normalizeSourceBranch(headSelector: string): string { + return parseSourceControlOwnerRef(headSelector)?.refName ?? headSelector.trim(); +} + +export function sourceBranch(input: { + readonly headSelector: string; + readonly source?: SourceControlRefSelector; +}): string { + return input.source?.refName ?? normalizeSourceBranch(input.headSelector); +} + +export function sourceControlRefFromInput(input: { + readonly headSelector: string; + readonly source?: SourceControlRefSelector; +}): SourceControlRefSelector | undefined { + return input.source ?? parseSourceControlOwnerRef(input.headSelector); +} + +export interface SourceControlProviderShape { + readonly kind: SourceControlProviderKind; + readonly listChangeRequests: (input: { + readonly cwd: string; + readonly context?: SourceControlProviderContext; + readonly source?: SourceControlRefSelector; + readonly headSelector: string; + readonly state: ChangeRequestState | "all"; + readonly limit?: number; + }) => Effect.Effect, SourceControlProviderError>; + readonly getChangeRequest: (input: { + readonly cwd: string; + readonly context?: SourceControlProviderContext; + readonly reference: string; + }) => Effect.Effect; + readonly createChangeRequest: (input: { + readonly cwd: string; + readonly context?: SourceControlProviderContext; + readonly source?: SourceControlRefSelector; + readonly target?: SourceControlRefSelector; + readonly baseRefName: string; + readonly headSelector: string; + readonly title: string; + readonly bodyFile: string; + }) => Effect.Effect; + readonly getRepositoryCloneUrls: (input: { + readonly cwd: string; + readonly context?: SourceControlProviderContext; + readonly repository: string; + }) => Effect.Effect; + readonly createRepository: (input: { + readonly cwd: string; + readonly repository: string; + readonly visibility: SourceControlRepositoryVisibility; + }) => Effect.Effect; + readonly getDefaultBranch: (input: { + readonly cwd: string; + readonly context?: SourceControlProviderContext; + }) => Effect.Effect; + readonly checkoutChangeRequest: (input: { + readonly cwd: string; + readonly context?: SourceControlProviderContext; + readonly reference: string; + readonly force?: boolean; + }) => Effect.Effect; +} + +export class SourceControlProvider extends Context.Service< + SourceControlProvider, + SourceControlProviderShape +>()("t3/source-control/SourceControlProvider") {} diff --git a/apps/server/src/sourceControl/SourceControlProviderDiscovery.ts b/apps/server/src/sourceControl/SourceControlProviderDiscovery.ts new file mode 100644 index 00000000000..87c0c4756a4 --- /dev/null +++ b/apps/server/src/sourceControl/SourceControlProviderDiscovery.ts @@ -0,0 +1,237 @@ +import type { + SourceControlProviderAuth, + SourceControlProviderDiscoveryItem, + SourceControlProviderKind, +} from "@t3tools/contracts"; +import { Effect, Option } from "effect"; + +import type * as VcsProcess from "../vcs/VcsProcess.ts"; + +export interface SourceControlAuthProbeInput { + readonly stdout: string; + readonly stderr: string; + readonly exitCode: VcsProcess.VcsProcessOutput["exitCode"]; +} + +interface SourceControlDiscoverySpecBase { + readonly kind: SourceControlProviderKind; + readonly label: string; + readonly installHint: string; +} + +export type SourceControlCliDiscoverySpec = SourceControlDiscoverySpecBase & { + readonly type: "cli"; + readonly executable: string; + readonly versionArgs: ReadonlyArray; + readonly authArgs: ReadonlyArray; + readonly parseAuth: (input: SourceControlAuthProbeInput) => SourceControlProviderAuth; +}; + +export type SourceControlApiDiscoverySpec = SourceControlDiscoverySpecBase & { + readonly type: "api"; + readonly probeAuth: Effect.Effect; +}; + +export type SourceControlProviderDiscoverySpec = + | SourceControlCliDiscoverySpec + | SourceControlApiDiscoverySpec; + +interface DiscoveryProbeResult { + readonly kind: SourceControlProviderKind; + readonly label: string; + readonly executable: string; + readonly status: "available" | "missing"; + readonly version: Option.Option; + readonly installHint: string; + readonly detail: Option.Option; +} + +export function firstNonEmptyLine(text: string): Option.Option { + const line = text + .split(/\r?\n/) + .map((entry) => entry.trim()) + .find((entry) => entry.length > 0); + return line === undefined ? Option.none() : Option.some(line); +} + +export function detailFromCause(cause: unknown): Option.Option { + if (cause instanceof Error && cause.message.trim().length > 0) { + return Option.some(cause.message.trim()); + } + return Option.none(); +} + +function authAccount(account: string | undefined): Option.Option { + const trimmed = account?.trim(); + return trimmed === undefined || trimmed.length === 0 ? Option.none() : Option.some(trimmed); +} + +function authHost(host: string | undefined): Option.Option { + const trimmed = host?.trim(); + return trimmed === undefined || trimmed.length === 0 ? Option.none() : Option.some(trimmed); +} + +function authDetail(detail: string | undefined): Option.Option { + const trimmed = detail?.trim(); + return trimmed === undefined || trimmed.length === 0 ? Option.none() : Option.some(trimmed); +} + +export function providerAuth(input: { + readonly status: SourceControlProviderAuth["status"]; + readonly account?: string | undefined; + readonly host?: string | undefined; + readonly detail?: string | undefined; +}): SourceControlProviderAuth { + return { + status: input.status, + account: authAccount(input.account), + host: authHost(input.host), + detail: authDetail(input.detail), + }; +} + +export function unknownAuth(detail?: string): SourceControlProviderAuth { + return providerAuth({ status: "unknown", detail }); +} + +export function combinedAuthOutput(input: SourceControlAuthProbeInput): string { + return [input.stdout, input.stderr].filter((entry) => entry.trim().length > 0).join("\n"); +} + +function sanitizedAuthLines(text: string): ReadonlyArray { + return text + .split(/\r?\n/) + .map((entry) => entry.trim()) + .filter((entry) => entry.length > 0) + .filter((entry) => !/^[-\s]*token(?:\s+scopes?)?:/iu.test(entry)); +} + +export function firstSafeAuthLine(text: string): string | undefined { + return sanitizedAuthLines(text)[0]; +} + +export function parseCliHost(text: string): string | undefined { + return sanitizedAuthLines(text) + .map((line) => line.replace(/^[^a-z0-9]+/iu, "")) + .find((line) => /^[a-z0-9][a-z0-9.-]*(?::\d+)?$/iu.test(line)); +} + +export function matchFirst(text: string, patterns: ReadonlyArray): string | undefined { + for (const pattern of patterns) { + const match = pattern.exec(text); + const value = match?.[1]?.trim(); + if (value && value.length > 0) return value; + } + return undefined; +} + +function probeCli(input: { + readonly spec: SourceControlCliDiscoverySpec; + readonly process: VcsProcess.VcsProcessShape; + readonly cwd: string; +}): Effect.Effect { + return input.process + .run({ + operation: "source-control.discovery.probe", + command: input.spec.executable, + args: input.spec.versionArgs, + cwd: input.cwd, + timeoutMs: 5_000, + maxOutputBytes: 8_000, + truncateOutputAtMaxBytes: true, + }) + .pipe( + Effect.map( + (result) => + ({ + kind: input.spec.kind, + label: input.spec.label, + executable: input.spec.executable, + status: "available" as const, + version: Option.orElse(firstNonEmptyLine(result.stdout), () => + firstNonEmptyLine(result.stderr), + ), + installHint: input.spec.installHint, + detail: Option.none(), + }) satisfies DiscoveryProbeResult, + ), + Effect.catch((cause) => + Effect.succeed({ + kind: input.spec.kind, + label: input.spec.label, + executable: input.spec.executable, + status: "missing" as const, + version: Option.none(), + installHint: input.spec.installHint, + detail: detailFromCause(cause), + } satisfies DiscoveryProbeResult), + ), + ); +} + +export function probeSourceControlProvider(input: { + readonly spec: SourceControlProviderDiscoverySpec; + readonly process: VcsProcess.VcsProcessShape; + readonly cwd: string; +}): Effect.Effect { + if (input.spec.type === "api") { + return input.spec.probeAuth.pipe( + Effect.map( + (auth) => + ({ + kind: input.spec.kind, + label: input.spec.label, + status: "available" as const, + version: Option.none(), + installHint: input.spec.installHint, + detail: Option.none(), + auth, + }) satisfies SourceControlProviderDiscoveryItem, + ), + ); + } + + const spec = input.spec; + + return probeCli({ + spec, + process: input.process, + cwd: input.cwd, + }).pipe( + Effect.flatMap((item) => { + if (item.status !== "available") { + return Effect.succeed({ + ...item, + auth: unknownAuth("Hosting integration command was not found on the server PATH."), + } satisfies SourceControlProviderDiscoveryItem); + } + + return input.process + .run({ + operation: "source-control.discovery.auth", + command: spec.executable, + args: spec.authArgs, + cwd: input.cwd, + allowNonZeroExit: true, + timeoutMs: 5_000, + maxOutputBytes: 8_000, + truncateOutputAtMaxBytes: true, + }) + .pipe( + Effect.map( + (result) => + ({ + ...item, + auth: spec.parseAuth(result), + }) satisfies SourceControlProviderDiscoveryItem, + ), + Effect.catch((cause) => + Effect.succeed({ + ...item, + auth: unknownAuth(Option.getOrUndefined(detailFromCause(cause))), + } satisfies SourceControlProviderDiscoveryItem), + ), + ); + }), + ); +} diff --git a/apps/server/src/sourceControl/SourceControlProviderRegistry.test.ts b/apps/server/src/sourceControl/SourceControlProviderRegistry.test.ts new file mode 100644 index 00000000000..395bd9b5e87 --- /dev/null +++ b/apps/server/src/sourceControl/SourceControlProviderRegistry.test.ts @@ -0,0 +1,145 @@ +import { assert, it } from "@effect/vitest"; +import * as NodeServices from "@effect/platform-node/NodeServices"; +import { DateTime, Effect, Layer, Option } from "effect"; + +import { ServerConfig } from "../config.ts"; +import type * as VcsDriver from "../vcs/VcsDriver.ts"; +import * as VcsDriverRegistry from "../vcs/VcsDriverRegistry.ts"; +import * as VcsProcess from "../vcs/VcsProcess.ts"; +import * as AzureDevOpsCli from "./AzureDevOpsCli.ts"; +import * as BitbucketApi from "./BitbucketApi.ts"; +import * as GitHubCli from "./GitHubCli.ts"; +import * as GitLabCli from "./GitLabCli.ts"; +import * as SourceControlProviderRegistry from "./SourceControlProviderRegistry.ts"; + +const TEST_EPOCH = DateTime.makeUnsafe("1970-01-01T00:00:00.000Z"); + +function makeRegistry(input: { + readonly remotes: ReadonlyArray<{ + readonly name: string; + readonly url: string; + }>; +}) { + const driver = { + listRemotes: () => + Effect.succeed({ + remotes: input.remotes.map((remote) => ({ + ...remote, + pushUrl: Option.none(), + isPrimary: remote.name === "origin", + })), + freshness: { + source: "live-local" as const, + observedAt: TEST_EPOCH, + expiresAt: Option.none(), + }, + }), + } satisfies Partial; + + const registryLayer = Layer.mock(VcsDriverRegistry.VcsDriverRegistry)({ + get: () => Effect.succeed(driver as unknown as VcsDriver.VcsDriverShape), + resolve: () => + Effect.succeed({ + kind: "git", + repository: { + kind: "git", + rootPath: "/repo", + metadataPath: null, + freshness: { + source: "live-local" as const, + observedAt: TEST_EPOCH, + expiresAt: Option.none(), + }, + }, + driver: driver as unknown as VcsDriver.VcsDriverShape, + }), + }); + + return SourceControlProviderRegistry.make().pipe( + Effect.provide( + Layer.mergeAll( + registryLayer, + Layer.mock(AzureDevOpsCli.AzureDevOpsCli)({}), + Layer.mock(BitbucketApi.BitbucketApi)({}), + Layer.mock(GitHubCli.GitHubCli)({}), + Layer.mock(GitLabCli.GitLabCli)({}), + Layer.mock(VcsProcess.VcsProcess)({}), + ServerConfig.layerTest(process.cwd(), { prefix: "t3-source-control-registry-test-" }).pipe( + Layer.provide(NodeServices.layer), + ), + ), + ), + ); +} + +it.effect("routes GitHub remotes to the GitHub provider", () => + Effect.gen(function* () { + const registry = yield* makeRegistry({ + remotes: [{ name: "origin", url: "git@github.com:pingdotgg/t3code.git" }], + }); + + const provider = yield* registry.resolve({ cwd: "/repo" }); + + assert.strictEqual(provider.kind, "github"); + }), +); + +it.effect("routes directly by provider kind for remote-first workflows", () => + Effect.gen(function* () { + const registry = yield* makeRegistry({ + remotes: [], + }); + + const provider = yield* registry.get("github"); + + assert.strictEqual(provider.kind, "github"); + }), +); + +it.effect("routes GitLab remotes to the GitLab provider", () => + Effect.gen(function* () { + const registry = yield* makeRegistry({ + remotes: [{ name: "origin", url: "git@gitlab.com:group/project.git" }], + }); + + const provider = yield* registry.resolve({ cwd: "/repo" }); + + assert.strictEqual(provider.kind, "gitlab"); + }), +); + +it.effect("routes Bitbucket remotes to the Bitbucket provider", () => + Effect.gen(function* () { + const registry = yield* makeRegistry({ + remotes: [{ name: "origin", url: "git@bitbucket.org:pingdotgg/t3code.git" }], + }); + + const provider = yield* registry.resolve({ cwd: "/repo" }); + + assert.strictEqual(provider.kind, "bitbucket"); + }), +); + +it.effect("routes Azure DevOps remotes to the Azure DevOps provider", () => + Effect.gen(function* () { + const registry = yield* makeRegistry({ + remotes: [{ name: "origin", url: "https://dev.azure.com/acme/project/_git/repo" }], + }); + + const provider = yield* registry.resolve({ cwd: "/repo" }); + + assert.strictEqual(provider.kind, "azure-devops"); + }), +); + +it.effect("falls back to a non-origin remote when origin is not configured", () => + Effect.gen(function* () { + const registry = yield* makeRegistry({ + remotes: [{ name: "upstream", url: "https://dev.azure.com/acme/project/_git/repo" }], + }); + + const provider = yield* registry.resolve({ cwd: "/repo" }); + + assert.strictEqual(provider.kind, "azure-devops"); + }), +); diff --git a/apps/server/src/sourceControl/SourceControlProviderRegistry.ts b/apps/server/src/sourceControl/SourceControlProviderRegistry.ts new file mode 100644 index 00000000000..c8b79f21651 --- /dev/null +++ b/apps/server/src/sourceControl/SourceControlProviderRegistry.ts @@ -0,0 +1,251 @@ +import { Cache, Context, Duration, Effect, Exit, Layer } from "effect"; +import { + SourceControlProviderError, + type SourceControlProviderDiscoveryItem, +} from "@t3tools/contracts"; +import type { SourceControlProviderKind } from "@t3tools/contracts"; +import { detectSourceControlProviderFromRemoteUrl } from "@t3tools/shared/sourceControl"; + +import * as AzureDevOpsSourceControlProvider from "./AzureDevOpsSourceControlProvider.ts"; +import * as BitbucketSourceControlProvider from "./BitbucketSourceControlProvider.ts"; +import * as GitHubSourceControlProvider from "./GitHubSourceControlProvider.ts"; +import * as GitLabSourceControlProvider from "./GitLabSourceControlProvider.ts"; +import * as SourceControlProvider from "./SourceControlProvider.ts"; +import * as SourceControlProviderDiscovery from "./SourceControlProviderDiscovery.ts"; +import { ServerConfig } from "../config.ts"; +import * as VcsDriverRegistry from "../vcs/VcsDriverRegistry.ts"; +import * as VcsProcess from "../vcs/VcsProcess.ts"; + +const PROVIDER_DETECTION_CACHE_CAPACITY = 2_048; +const PROVIDER_DETECTION_CACHE_TTL = Duration.seconds(5); + +export interface SourceControlProviderRegistration { + readonly kind: SourceControlProviderKind; + readonly provider: SourceControlProvider.SourceControlProviderShape; + readonly discovery: SourceControlProviderDiscovery.SourceControlProviderDiscoverySpec; +} + +export interface SourceControlProviderHandle { + readonly provider: SourceControlProvider.SourceControlProviderShape; + readonly context: SourceControlProvider.SourceControlProviderContext | null; +} + +export interface SourceControlProviderRegistryShape { + readonly get: ( + kind: SourceControlProviderKind, + ) => Effect.Effect; + readonly resolveHandle: (input: { + readonly cwd: string; + }) => Effect.Effect; + readonly resolve: (input: { + readonly cwd: string; + }) => Effect.Effect; + readonly discover: Effect.Effect>; +} + +export class SourceControlProviderRegistry extends Context.Service< + SourceControlProviderRegistry, + SourceControlProviderRegistryShape +>()("t3/source-control/SourceControlProviderRegistry") {} + +function unsupportedProvider( + kind: SourceControlProviderKind, +): SourceControlProvider.SourceControlProviderShape { + const unsupported = (operation: string) => + Effect.fail( + new SourceControlProviderError({ + provider: kind, + operation, + detail: `No ${kind} source control provider is registered.`, + }), + ); + + return SourceControlProvider.SourceControlProvider.of({ + kind, + listChangeRequests: () => unsupported("listChangeRequests"), + getChangeRequest: () => unsupported("getChangeRequest"), + createChangeRequest: () => unsupported("createChangeRequest"), + getRepositoryCloneUrls: () => unsupported("getRepositoryCloneUrls"), + createRepository: () => unsupported("createRepository"), + getDefaultBranch: () => unsupported("getDefaultBranch"), + checkoutChangeRequest: () => unsupported("checkoutChangeRequest"), + }); +} + +function providerDetectionError(operation: string, cwd: string, cause: unknown) { + return new SourceControlProviderError({ + provider: "unknown", + operation, + detail: `Failed to detect source control provider for ${cwd}.`, + cause, + }); +} + +function selectProviderContext( + remotes: ReadonlyArray<{ + readonly name: string; + readonly url: string; + }>, +): SourceControlProvider.SourceControlProviderContext | null { + const candidates = remotes + .map((remote) => { + const provider = detectSourceControlProviderFromRemoteUrl(remote.url); + return provider + ? { + provider, + remoteName: remote.name, + remoteUrl: remote.url, + } + : null; + }) + .filter((value): value is SourceControlProvider.SourceControlProviderContext => value !== null); + + return ( + candidates.find((candidate) => candidate.remoteName === "origin") ?? + candidates.find((candidate) => candidate.provider.kind !== "unknown") ?? + candidates[0] ?? + null + ); +} + +function bindProviderContext( + provider: SourceControlProvider.SourceControlProviderShape, + context: SourceControlProvider.SourceControlProviderContext | null, +): SourceControlProvider.SourceControlProviderShape { + if (context === null) { + return provider; + } + + return SourceControlProvider.SourceControlProvider.of({ + kind: provider.kind, + listChangeRequests: (input) => + provider.listChangeRequests({ + ...input, + context: input.context ?? context, + }), + getChangeRequest: (input) => + provider.getChangeRequest({ + ...input, + context: input.context ?? context, + }), + createChangeRequest: (input) => + provider.createChangeRequest({ + ...input, + context: input.context ?? context, + }), + getRepositoryCloneUrls: (input) => + provider.getRepositoryCloneUrls({ + ...input, + context: input.context ?? context, + }), + createRepository: (input) => provider.createRepository(input), + getDefaultBranch: (input) => + provider.getDefaultBranch({ + ...input, + context: input.context ?? context, + }), + checkoutChangeRequest: (input) => + provider.checkoutChangeRequest({ + ...input, + context: input.context ?? context, + }), + }); +} + +export const makeWithProviders = Effect.fn("makeSourceControlProviderRegistryWithProviders")( + function* (registrations: ReadonlyArray) { + const config = yield* ServerConfig; + const process = yield* VcsProcess.VcsProcess; + const vcsRegistry = yield* VcsDriverRegistry.VcsDriverRegistry; + const providers = new Map< + SourceControlProviderKind, + SourceControlProvider.SourceControlProviderShape + >(registrations.map((registration) => [registration.kind, registration.provider])); + const discoverySpecs = registrations.map((registration) => registration.discovery); + + const get: SourceControlProviderRegistryShape["get"] = (kind) => + Effect.succeed(providers.get(kind) ?? unsupportedProvider(kind)); + + const detectProviderContext = Effect.fn("SourceControlProviderRegistry.detectProviderContext")( + function* (cwd: string) { + const handle = yield* vcsRegistry + .resolve({ cwd }) + .pipe(Effect.mapError((error) => providerDetectionError("detectProvider", cwd, error))); + const remotes = yield* handle.driver + .listRemotes(cwd) + .pipe(Effect.mapError((error) => providerDetectionError("detectProvider", cwd, error))); + + return selectProviderContext(remotes.remotes); + }, + ); + + const providerContextCache = yield* Cache.makeWith< + string, + SourceControlProvider.SourceControlProviderContext | null, + SourceControlProviderError + >(detectProviderContext, { + capacity: PROVIDER_DETECTION_CACHE_CAPACITY, + timeToLive: (exit) => (Exit.isSuccess(exit) ? PROVIDER_DETECTION_CACHE_TTL : Duration.zero), + }); + + const resolveHandle: SourceControlProviderRegistryShape["resolveHandle"] = (input) => + Cache.get(providerContextCache, input.cwd).pipe( + Effect.map((context) => { + const kind = context?.provider.kind ?? "unknown"; + const provider = providers.get(kind) ?? unsupportedProvider(kind); + return { + provider: bindProviderContext(provider, context), + context, + } satisfies SourceControlProviderHandle; + }), + ); + + return SourceControlProviderRegistry.of({ + get, + resolveHandle, + resolve: (input) => resolveHandle(input).pipe(Effect.map((handle) => handle.provider)), + discover: Effect.all( + discoverySpecs.map((spec) => + SourceControlProviderDiscovery.probeSourceControlProvider({ + spec, + process, + cwd: config.cwd, + }), + ), + { concurrency: "unbounded" }, + ), + }); + }, +); + +export const make = Effect.fn("makeSourceControlProviderRegistry")(function* () { + const github = yield* GitHubSourceControlProvider.make(); + const gitlab = yield* GitLabSourceControlProvider.make(); + const bitbucket = yield* BitbucketSourceControlProvider.make(); + const bitbucketDiscovery = yield* BitbucketSourceControlProvider.makeDiscovery(); + const azureDevOps = yield* AzureDevOpsSourceControlProvider.make(); + return yield* makeWithProviders([ + { + kind: "github", + provider: github, + discovery: GitHubSourceControlProvider.discovery, + }, + { + kind: "gitlab", + provider: gitlab, + discovery: GitLabSourceControlProvider.discovery, + }, + { + kind: "azure-devops", + provider: azureDevOps, + discovery: AzureDevOpsSourceControlProvider.discovery, + }, + { + kind: "bitbucket", + provider: bitbucket, + discovery: bitbucketDiscovery, + }, + ]); +}); + +export const layer = Layer.effect(SourceControlProviderRegistry, make()); diff --git a/apps/server/src/sourceControl/SourceControlRepositoryService.test.ts b/apps/server/src/sourceControl/SourceControlRepositoryService.test.ts new file mode 100644 index 00000000000..5280ee0e59c --- /dev/null +++ b/apps/server/src/sourceControl/SourceControlRepositoryService.test.ts @@ -0,0 +1,318 @@ +import * as NodeServices from "@effect/platform-node/NodeServices"; +import { assert, it } from "@effect/vitest"; +import { Effect, FileSystem, Layer } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; + +import { GitCommandError, type SourceControlProviderError } from "@t3tools/contracts"; + +import { ServerConfig } from "../config.ts"; +import * as GitVcsDriver from "../vcs/GitVcsDriver.ts"; +import type * as SourceControlProvider from "./SourceControlProvider.ts"; +import * as SourceControlProviderRegistry from "./SourceControlProviderRegistry.ts"; +import * as SourceControlRepositoryService from "./SourceControlRepositoryService.ts"; + +const CLONE_URLS = { + nameWithOwner: "octocat/t3code", + url: "https://github.com/octocat/t3code", + sshUrl: "git@github.com:octocat/t3code.git", +}; + +function makeProvider( + overrides: Partial = {}, +): SourceControlProvider.SourceControlProviderShape { + const unsupported = (operation: string) => + Effect.die(`unexpected provider operation ${operation}`) as Effect.Effect< + never, + SourceControlProviderError + >; + + return { + kind: "github", + listChangeRequests: () => unsupported("listChangeRequests"), + getChangeRequest: () => unsupported("getChangeRequest"), + createChangeRequest: () => unsupported("createChangeRequest"), + getRepositoryCloneUrls: () => Effect.succeed(CLONE_URLS), + createRepository: () => Effect.succeed(CLONE_URLS), + getDefaultBranch: () => Effect.succeed(null), + checkoutChangeRequest: () => unsupported("checkoutChangeRequest"), + ...overrides, + }; +} + +function processOutput(): GitVcsDriver.ExecuteGitResult { + return { + exitCode: ChildProcessSpawner.ExitCode(0), + stdout: "", + stderr: "", + stdoutTruncated: false, + stderrTruncated: false, + }; +} + +function makeLayer(input: { + readonly provider?: SourceControlProvider.SourceControlProviderShape; + readonly git?: Partial; +}) { + return SourceControlRepositoryService.layer.pipe( + Layer.provide( + Layer.mock(SourceControlProviderRegistry.SourceControlProviderRegistry)({ + get: () => Effect.succeed(input.provider ?? makeProvider()), + }), + ), + Layer.provide( + Layer.mock(GitVcsDriver.GitVcsDriver)({ + execute: () => Effect.succeed(processOutput()), + ensureRemote: () => Effect.succeed("origin"), + pushCurrentBranch: () => + Effect.succeed({ + status: "pushed" as const, + branch: "feature/remote-v1", + upstreamBranch: "origin/feature/remote-v1", + setUpstream: true, + }), + ...input.git, + }), + ), + Layer.provide(ServerConfig.layerTest(process.cwd(), { prefix: "t3-source-control-repos-" })), + Layer.provideMerge(NodeServices.layer), + ); +} + +it.effect("looks up repositories through the requested provider without search", () => { + const calls: Array<{ cwd: string; repository: string }> = []; + const provider = makeProvider({ + getRepositoryCloneUrls: (input) => + Effect.sync(() => { + calls.push({ cwd: input.cwd, repository: input.repository }); + return CLONE_URLS; + }), + }); + + return Effect.gen(function* () { + const service = yield* SourceControlRepositoryService.SourceControlRepositoryService; + const result = yield* service.lookupRepository({ + provider: "github", + repository: "octocat/t3code", + cwd: "/workspace", + }); + + assert.deepStrictEqual(result, { provider: "github", ...CLONE_URLS }); + assert.deepStrictEqual(calls, [{ cwd: "/workspace", repository: "octocat/t3code" }]); + }).pipe(Effect.provide(makeLayer({ provider }))); +}); + +it.effect("clones a looked-up repository into the requested destination", () => + Effect.gen(function* () { + const fs = yield* FileSystem.FileSystem; + const parent = yield* fs.makeTempDirectoryScoped({ + prefix: "t3-source-control-clone-parent-", + }); + const destinationPath = `${parent}/t3code`; + const cloneCalls: Array<{ cwd: string; args: ReadonlyArray }> = []; + + yield* Effect.gen(function* () { + const service = yield* SourceControlRepositoryService.SourceControlRepositoryService; + const result = yield* service.cloneRepository({ + provider: "github", + repository: "octocat/t3code", + destinationPath, + protocol: "https", + }); + + assert.deepStrictEqual(result, { + cwd: destinationPath, + remoteUrl: CLONE_URLS.url, + repository: { provider: "github", ...CLONE_URLS }, + }); + assert.deepStrictEqual(cloneCalls, [ + { + cwd: parent, + args: ["clone", CLONE_URLS.url, "t3code"], + }, + ]); + }).pipe( + Effect.provide( + makeLayer({ + git: { + execute: (input) => + Effect.sync(() => { + cloneCalls.push({ cwd: input.cwd, args: input.args }); + return processOutput(); + }), + }, + }), + ), + ); + }).pipe(Effect.provide(NodeServices.layer)), +); + +it.effect("publishes by creating the repository, adding a remote, and pushing upstream", () => { + const createCalls: Array<{ cwd: string; repository: string; visibility: string }> = []; + const remoteCalls: Array<{ cwd: string; preferredName: string; url: string }> = []; + const pushCalls: Array<{ cwd: string; remoteName: string | null | undefined }> = []; + const provider = makeProvider({ + createRepository: (input) => + Effect.sync(() => { + createCalls.push({ + cwd: input.cwd, + repository: input.repository, + visibility: input.visibility, + }); + return CLONE_URLS; + }), + }); + + return Effect.gen(function* () { + const service = yield* SourceControlRepositoryService.SourceControlRepositoryService; + const result = yield* service.publishRepository({ + cwd: "/workspace", + provider: "github", + repository: "octocat/t3code", + visibility: "private", + remoteName: "origin", + protocol: "ssh", + }); + + assert.deepStrictEqual(result, { + repository: { provider: "github", ...CLONE_URLS }, + remoteName: "origin", + remoteUrl: CLONE_URLS.sshUrl, + branch: "feature/remote-v1", + upstreamBranch: "origin/feature/remote-v1", + status: "pushed", + }); + assert.deepStrictEqual(createCalls, [ + { cwd: "/workspace", repository: "octocat/t3code", visibility: "private" }, + ]); + assert.deepStrictEqual(remoteCalls, [ + { cwd: "/workspace", preferredName: "origin", url: CLONE_URLS.sshUrl }, + ]); + assert.deepStrictEqual(pushCalls, [{ cwd: "/workspace", remoteName: "origin" }]); + }).pipe( + Effect.provide( + makeLayer({ + provider, + git: { + ensureRemote: (input) => + Effect.sync(() => { + remoteCalls.push(input); + return "origin"; + }), + pushCurrentBranch: (cwd, _fallbackBranch, options) => + Effect.sync(() => { + pushCalls.push({ cwd, remoteName: options?.remoteName }); + return { + status: "pushed" as const, + branch: "feature/remote-v1", + upstreamBranch: "origin/feature/remote-v1", + setUpstream: true, + }; + }), + }, + }), + ), + ); +}); + +it.effect("publishes to the remote name returned by ensureRemote", () => { + const pushCalls: Array<{ cwd: string; remoteName: string | null | undefined }> = []; + + return Effect.gen(function* () { + const service = yield* SourceControlRepositoryService.SourceControlRepositoryService; + const result = yield* service.publishRepository({ + cwd: "/workspace", + provider: "github", + repository: "octocat/t3code", + visibility: "private", + remoteName: "origin", + protocol: "ssh", + }); + + assert.equal(result.remoteName, "origin-1"); + assert.deepStrictEqual(pushCalls, [{ cwd: "/workspace", remoteName: "origin-1" }]); + }).pipe( + Effect.provide( + makeLayer({ + git: { + ensureRemote: () => Effect.succeed("origin-1"), + pushCurrentBranch: (cwd, _fallbackBranch, options) => + Effect.sync(() => { + pushCalls.push({ cwd, remoteName: options?.remoteName }); + return { + status: "pushed" as const, + branch: "feature/remote-v1", + upstreamBranch: `${options?.remoteName ?? "missing"}/feature/remote-v1`, + setUpstream: true, + }; + }), + }, + }), + ), + ); +}); + +it.effect("publish succeeds with status remote_added when the local repo has no commits", () => { + let pushCalls = 0; + return Effect.gen(function* () { + const service = yield* SourceControlRepositoryService.SourceControlRepositoryService; + const result = yield* service.publishRepository({ + cwd: "/workspace", + provider: "github", + repository: "octocat/t3code", + visibility: "private", + remoteName: "origin", + protocol: "ssh", + }); + + assert.deepStrictEqual(result, { + repository: { provider: "github", ...CLONE_URLS }, + remoteName: "origin", + remoteUrl: CLONE_URLS.sshUrl, + branch: "main", + status: "remote_added", + }); + assert.strictEqual(pushCalls, 0); + }).pipe( + Effect.provide( + makeLayer({ + git: { + execute: (input) => + input.args[0] === "rev-parse" + ? Effect.fail( + new GitCommandError({ + operation: input.operation, + command: "git rev-parse --verify HEAD", + cwd: input.cwd, + detail: "fatal: Needed a single revision", + }), + ) + : Effect.succeed(processOutput()), + statusDetails: () => + Effect.succeed({ + isRepo: true, + hasOriginRemote: true, + isDefaultBranch: true, + branch: "main", + upstreamRef: null, + hasWorkingTreeChanges: false, + workingTree: { files: [], insertions: 0, deletions: 0 }, + hasUpstream: false, + aheadCount: 0, + behindCount: 0, + aheadOfDefaultCount: 0, + }), + pushCurrentBranch: () => + Effect.sync(() => { + pushCalls += 1; + return { + status: "pushed" as const, + branch: "main", + upstreamBranch: "origin/main", + setUpstream: true, + }; + }), + }, + }), + ), + ); +}); diff --git a/apps/server/src/sourceControl/SourceControlRepositoryService.ts b/apps/server/src/sourceControl/SourceControlRepositoryService.ts new file mode 100644 index 00000000000..1bf71ac12dc --- /dev/null +++ b/apps/server/src/sourceControl/SourceControlRepositoryService.ts @@ -0,0 +1,320 @@ +import * as NodeOS from "node:os"; +import { Context, Effect, FileSystem, Layer, Path, Schema } from "effect"; + +import { + SourceControlRepositoryError, + type SourceControlCloneRepositoryInput, + type SourceControlCloneRepositoryResult, + type SourceControlCloneProtocol, + type SourceControlProviderKind, + type SourceControlPublishRepositoryInput, + type SourceControlPublishRepositoryResult, + type SourceControlRepositoryCloneUrls, + type SourceControlRepositoryInfo, + type SourceControlRepositoryLookupInput, +} from "@t3tools/contracts"; + +import { ServerConfig } from "../config.ts"; +import * as GitVcsDriver from "../vcs/GitVcsDriver.ts"; +import * as SourceControlProviderRegistry from "./SourceControlProviderRegistry.ts"; + +export interface SourceControlRepositoryServiceShape { + readonly lookupRepository: ( + input: SourceControlRepositoryLookupInput, + ) => Effect.Effect; + readonly cloneRepository: ( + input: SourceControlCloneRepositoryInput, + ) => Effect.Effect; + readonly publishRepository: ( + input: SourceControlPublishRepositoryInput, + ) => Effect.Effect; +} + +export class SourceControlRepositoryService extends Context.Service< + SourceControlRepositoryService, + SourceControlRepositoryServiceShape +>()("t3/source-control/SourceControlRepositoryService") {} + +function detailFromUnknown(cause: unknown): string { + if (typeof cause === "object" && cause !== null) { + if ("detail" in cause && typeof cause.detail === "string" && cause.detail.length > 0) { + return cause.detail; + } + if ("message" in cause && typeof cause.message === "string" && cause.message.length > 0) { + return cause.message; + } + } + + return "An unexpected source control error occurred."; +} + +function repositoryError(input: { + readonly operation: string; + readonly provider: SourceControlProviderKind; + readonly detail: string; + readonly cause?: unknown; +}): SourceControlRepositoryError { + return new SourceControlRepositoryError({ + provider: input.provider, + operation: input.operation, + detail: input.detail, + ...(input.cause === undefined ? {} : { cause: input.cause }), + }); +} + +function mapRepositoryError(operation: string, provider: SourceControlProviderKind) { + return Effect.mapError((cause: unknown) => + Schema.is(SourceControlRepositoryError)(cause) + ? cause + : repositoryError({ + operation, + provider, + detail: detailFromUnknown(cause), + cause, + }), + ); +} + +function toRepositoryInfo( + provider: SourceControlProviderKind, + urls: SourceControlRepositoryCloneUrls, +): SourceControlRepositoryInfo { + return { + provider, + nameWithOwner: urls.nameWithOwner, + url: urls.url, + sshUrl: urls.sshUrl, + }; +} + +function selectRemoteUrl( + urls: SourceControlRepositoryCloneUrls, + protocol: SourceControlCloneProtocol | undefined, +): string { + switch (protocol ?? "auto") { + case "https": + return urls.url; + case "ssh": + case "auto": + return urls.sshUrl; + } +} + +function expandHomePath(input: string, path: Path.Path): string { + if (input === "~") { + return NodeOS.homedir(); + } + if (input.startsWith("~/") || input.startsWith("~\\")) { + return path.join(NodeOS.homedir(), input.slice(2)); + } + return input; +} + +export const make = Effect.fn("makeSourceControlRepositoryService")(function* () { + const config = yield* ServerConfig; + const fileSystem = yield* FileSystem.FileSystem; + const git = yield* GitVcsDriver.GitVcsDriver; + const path = yield* Path.Path; + const providers = yield* SourceControlProviderRegistry.SourceControlProviderRegistry; + + const ensureConcreteProvider = (input: { + readonly operation: string; + readonly provider: SourceControlProviderKind; + }) => { + if (input.provider !== "unknown") { + return Effect.succeed(input.provider); + } + + return Effect.fail( + repositoryError({ + operation: input.operation, + provider: input.provider, + detail: "Choose a source control provider before continuing.", + }), + ); + }; + + const lookupRepository = Effect.fn("SourceControlRepositoryService.lookupRepository")(function* ( + input: SourceControlRepositoryLookupInput, + ) { + const providerKind = yield* ensureConcreteProvider({ + operation: "lookupRepository", + provider: input.provider, + }); + const provider = yield* providers.get(providerKind); + const urls = yield* provider.getRepositoryCloneUrls({ + cwd: input.cwd ?? config.cwd, + repository: input.repository.trim(), + }); + return toRepositoryInfo(providerKind, urls); + }); + + const normalizeDestinationPath = Effect.fn("SourceControlRepositoryService.normalizeDestination")( + function* (destinationPath: string) { + const trimmed = destinationPath.trim(); + if (trimmed.length === 0) { + return yield* Effect.fail( + repositoryError({ + operation: "cloneRepository", + provider: "unknown", + detail: "Choose a destination path before cloning.", + }), + ); + } + + return path.resolve(expandHomePath(trimmed, path)); + }, + ); + + const prepareDestination = Effect.fn("SourceControlRepositoryService.prepareDestination")( + function* (destinationPath: string) { + const normalizedDestination = yield* normalizeDestinationPath(destinationPath); + if (yield* fileSystem.exists(normalizedDestination).pipe(Effect.orElseSucceed(() => false))) { + const entries = yield* fileSystem + .readDirectory(normalizedDestination, { recursive: false }) + .pipe( + Effect.mapError((cause) => + repositoryError({ + operation: "cloneRepository", + provider: "unknown", + detail: "Destination path already exists and is not a directory.", + cause, + }), + ), + ); + if (entries.length > 0) { + return yield* Effect.fail( + repositoryError({ + operation: "cloneRepository", + provider: "unknown", + detail: "Destination path already exists and is not empty.", + }), + ); + } + } else { + yield* fileSystem.makeDirectory(path.dirname(normalizedDestination), { recursive: true }); + } + + return { + destinationPath: normalizedDestination, + parentPath: path.dirname(normalizedDestination), + directoryName: path.basename(normalizedDestination), + }; + }, + ); + + const cloneRepository = Effect.fn("SourceControlRepositoryService.cloneRepository")(function* ( + input: SourceControlCloneRepositoryInput, + ) { + const preparedDestination = yield* prepareDestination(input.destinationPath); + let repository: SourceControlRepositoryInfo | null = null; + let remoteUrl = input.remoteUrl?.trim() ?? null; + let provider: SourceControlProviderKind = input.provider ?? "unknown"; + + if (input.provider && input.repository) { + repository = yield* lookupRepository({ + provider: input.provider, + repository: input.repository, + cwd: preparedDestination.parentPath, + }); + remoteUrl = selectRemoteUrl(repository, input.protocol); + provider = input.provider; + } + + if (!remoteUrl) { + return yield* Effect.fail( + repositoryError({ + operation: "cloneRepository", + provider, + detail: "Enter a repository path or clone URL before cloning.", + }), + ); + } + + yield* git.execute({ + operation: "SourceControlRepositoryService.cloneRepository", + cwd: preparedDestination.parentPath, + args: ["clone", remoteUrl, preparedDestination.directoryName], + timeoutMs: 120_000, + maxOutputBytes: 256 * 1024, + }); + + return { + cwd: preparedDestination.destinationPath, + remoteUrl, + repository, + }; + }); + + const publishRepository = Effect.fn("SourceControlRepositoryService.publishRepository")( + function* (input: SourceControlPublishRepositoryInput) { + const providerKind = yield* ensureConcreteProvider({ + operation: "publishRepository", + provider: input.provider, + }); + const provider = yield* providers.get(providerKind); + const urls = yield* provider.createRepository({ + cwd: input.cwd, + repository: input.repository.trim(), + visibility: input.visibility, + }); + const remoteUrl = selectRemoteUrl(urls, input.protocol); + const remoteName = yield* git.ensureRemote({ + cwd: input.cwd, + preferredName: input.remoteName?.trim() || "origin", + url: remoteUrl, + }); + + // An empty local repo (no commits) would make `git push HEAD:...` fail + // with an opaque "src refspec HEAD does not match any". Treat this as a + // partial success: the remote was created and wired up, but there is + // nothing to push yet. + const hasCommits = yield* git + .execute({ + operation: "SourceControlRepositoryService.publishRepository.headCheck", + cwd: input.cwd, + args: ["rev-parse", "--verify", "HEAD"], + }) + .pipe( + Effect.map(() => true), + Effect.catch(() => Effect.succeed(false)), + ); + if (!hasCommits) { + const details = yield* git + .statusDetails(input.cwd) + .pipe(Effect.catch(() => Effect.succeed(null))); + return { + repository: toRepositoryInfo(providerKind, urls), + remoteName, + remoteUrl, + branch: details?.branch ?? "main", + status: "remote_added" as const, + }; + } + + const pushResult = yield* git.pushCurrentBranch(input.cwd, null, { remoteName }); + + return { + repository: toRepositoryInfo(providerKind, urls), + remoteName, + remoteUrl, + branch: pushResult.branch, + ...(pushResult.upstreamBranch ? { upstreamBranch: pushResult.upstreamBranch } : {}), + status: "pushed" as const, + }; + }, + ); + + return SourceControlRepositoryService.of({ + lookupRepository: (input) => + lookupRepository(input).pipe(mapRepositoryError("lookupRepository", input.provider)), + cloneRepository: (input) => + cloneRepository(input).pipe( + mapRepositoryError("cloneRepository", input.provider ?? "unknown"), + ), + publishRepository: (input) => + publishRepository(input).pipe(mapRepositoryError("publishRepository", input.provider)), + }); +}); + +export const layer = Layer.effect(SourceControlRepositoryService, make()); diff --git a/apps/server/src/sourceControl/azureDevOpsPullRequests.ts b/apps/server/src/sourceControl/azureDevOpsPullRequests.ts new file mode 100644 index 00000000000..48c7a836110 --- /dev/null +++ b/apps/server/src/sourceControl/azureDevOpsPullRequests.ts @@ -0,0 +1,106 @@ +import { Cause, DateTime, Exit, Option, Result, Schema } from "effect"; +import { PositiveInt, TrimmedNonEmptyString } from "@t3tools/contracts"; +import { decodeJsonResult, formatSchemaError } from "@t3tools/shared/schemaJson"; + +export interface NormalizedAzureDevOpsPullRequestRecord { + readonly number: number; + readonly title: string; + readonly url: string; + readonly baseRefName: string; + readonly headRefName: string; + readonly state: "open" | "closed" | "merged"; + readonly updatedAt: Option.Option; +} + +const AzureDevOpsPullRequestSchema = Schema.Struct({ + pullRequestId: PositiveInt, + title: TrimmedNonEmptyString, + url: Schema.optional(Schema.String), + sourceRefName: TrimmedNonEmptyString, + targetRefName: TrimmedNonEmptyString, + status: Schema.String, + creationDate: Schema.optional(Schema.OptionFromNullOr(Schema.DateTimeUtcFromString)), + closedDate: Schema.optional(Schema.OptionFromNullOr(Schema.DateTimeUtcFromString)), + _links: Schema.optional( + Schema.Struct({ + web: Schema.optional( + Schema.Struct({ + href: Schema.String, + }), + ), + }), + ), +}); + +function trimOptionalString(value: string | null | undefined): string | null { + const trimmed = value?.trim() ?? ""; + return trimmed.length > 0 ? trimmed : null; +} + +function normalizeRefName(refName: string): string { + return refName.trim().replace(/^refs\/heads\//, ""); +} + +function normalizeAzureDevOpsPullRequestState(status: string): "open" | "closed" | "merged" { + switch (status.trim().toLowerCase()) { + case "completed": + return "merged"; + case "abandoned": + return "closed"; + default: + return "open"; + } +} + +function normalizeAzureDevOpsPullRequestRecord( + raw: Schema.Schema.Type, +): NormalizedAzureDevOpsPullRequestRecord { + return { + number: raw.pullRequestId, + title: raw.title, + url: trimOptionalString(raw._links?.web?.href) ?? trimOptionalString(raw.url) ?? "", + baseRefName: normalizeRefName(raw.targetRefName), + headRefName: normalizeRefName(raw.sourceRefName), + state: normalizeAzureDevOpsPullRequestState(raw.status), + updatedAt: (raw.closedDate ?? Option.none()).pipe( + Option.orElse(() => raw.creationDate ?? Option.none()), + ), + }; +} + +const decodeAzureDevOpsPullRequestList = decodeJsonResult(Schema.Array(Schema.Unknown)); +const decodeAzureDevOpsPullRequest = decodeJsonResult(AzureDevOpsPullRequestSchema); +const decodeAzureDevOpsPullRequestEntry = Schema.decodeUnknownExit(AzureDevOpsPullRequestSchema); + +export const formatAzureDevOpsJsonDecodeError = formatSchemaError; + +export function decodeAzureDevOpsPullRequestListJson( + raw: string, +): Result.Result< + ReadonlyArray, + Cause.Cause +> { + const result = decodeAzureDevOpsPullRequestList(raw); + if (Result.isSuccess(result)) { + const pullRequests: NormalizedAzureDevOpsPullRequestRecord[] = []; + for (const entry of result.success) { + const decodedEntry = decodeAzureDevOpsPullRequestEntry(entry); + if (Exit.isFailure(decodedEntry)) { + continue; + } + pullRequests.push(normalizeAzureDevOpsPullRequestRecord(decodedEntry.value)); + } + return Result.succeed(pullRequests); + } + return Result.fail(result.failure); +} + +export function decodeAzureDevOpsPullRequestJson( + raw: string, +): Result.Result> { + const result = decodeAzureDevOpsPullRequest(raw); + if (Result.isSuccess(result)) { + return Result.succeed(normalizeAzureDevOpsPullRequestRecord(result.success)); + } + return Result.fail(result.failure); +} diff --git a/apps/server/src/sourceControl/bitbucketPullRequests.ts b/apps/server/src/sourceControl/bitbucketPullRequests.ts new file mode 100644 index 00000000000..5313eaba974 --- /dev/null +++ b/apps/server/src/sourceControl/bitbucketPullRequests.ts @@ -0,0 +1,104 @@ +import { DateTime, Option, Schema } from "effect"; +import { PositiveInt, TrimmedNonEmptyString } from "@t3tools/contracts"; + +export interface NormalizedBitbucketPullRequestRecord { + readonly number: number; + readonly title: string; + readonly url: string; + readonly baseRefName: string; + readonly headRefName: string; + readonly state: "open" | "closed" | "merged"; + readonly updatedAt: Option.Option; + readonly isCrossRepository?: boolean; + readonly headRepositoryNameWithOwner?: string | null; + readonly headRepositoryOwnerLogin?: string | null; +} + +export const BitbucketRepositoryRefSchema = Schema.Struct({ + full_name: Schema.optional(Schema.NullOr(TrimmedNonEmptyString)), + workspace: Schema.optional( + Schema.NullOr( + Schema.Struct({ + slug: Schema.optional(Schema.NullOr(TrimmedNonEmptyString)), + }), + ), + ), +}); + +export const BitbucketPullRequestBranchSchema = Schema.Struct({ + repository: Schema.optional(Schema.NullOr(BitbucketRepositoryRefSchema)), + branch: Schema.Struct({ + name: TrimmedNonEmptyString, + }), +}); + +export const BitbucketPullRequestSchema = Schema.Struct({ + id: PositiveInt, + title: TrimmedNonEmptyString, + state: Schema.optional(Schema.NullOr(Schema.String)), + updated_on: Schema.optional(Schema.OptionFromNullOr(Schema.DateTimeUtcFromString)), + links: Schema.Struct({ + html: Schema.Struct({ + href: TrimmedNonEmptyString, + }), + }), + source: BitbucketPullRequestBranchSchema, + destination: BitbucketPullRequestBranchSchema, +}); + +export const BitbucketPullRequestListSchema = Schema.Struct({ + values: Schema.Array(BitbucketPullRequestSchema), + next: Schema.optional(TrimmedNonEmptyString), +}); + +function trimOptionalString(value: string | null | undefined): string | null { + const trimmed = value?.trim() ?? ""; + return trimmed.length > 0 ? trimmed : null; +} + +function repositoryOwner(repository: Schema.Schema.Type) { + return ( + trimOptionalString(repository.workspace?.slug) ?? + (repository.full_name?.includes("/") ? (repository.full_name.split("/")[0] ?? null) : null) + ); +} + +function normalizeBitbucketPullRequestState(state: string | null | undefined) { + switch (state?.trim().toUpperCase()) { + case "MERGED": + return "merged" as const; + case "DECLINED": + case "SUPERSEDED": + return "closed" as const; + case "OPEN": + default: + return "open" as const; + } +} + +export function normalizeBitbucketPullRequestRecord( + raw: Schema.Schema.Type, +): NormalizedBitbucketPullRequestRecord { + const headRepositoryNameWithOwner = trimOptionalString(raw.source.repository?.full_name); + const baseRepositoryNameWithOwner = trimOptionalString(raw.destination.repository?.full_name); + const headRepositoryOwnerLogin = raw.source.repository + ? repositoryOwner(raw.source.repository) + : null; + const isCrossRepository = + headRepositoryNameWithOwner !== null && + baseRepositoryNameWithOwner !== null && + headRepositoryNameWithOwner !== baseRepositoryNameWithOwner; + + return { + number: raw.id, + title: raw.title, + url: raw.links.html.href, + baseRefName: raw.destination.branch.name, + headRefName: raw.source.branch.name, + state: normalizeBitbucketPullRequestState(raw.state), + updatedAt: raw.updated_on ?? Option.none(), + ...(isCrossRepository ? { isCrossRepository: true } : {}), + ...(headRepositoryNameWithOwner ? { headRepositoryNameWithOwner } : {}), + ...(headRepositoryOwnerLogin ? { headRepositoryOwnerLogin } : {}), + }; +} diff --git a/apps/server/src/git/githubPullRequests.ts b/apps/server/src/sourceControl/gitHubPullRequests.ts similarity index 93% rename from apps/server/src/git/githubPullRequests.ts rename to apps/server/src/sourceControl/gitHubPullRequests.ts index d137a46d6fa..f0804dda8c6 100644 --- a/apps/server/src/git/githubPullRequests.ts +++ b/apps/server/src/sourceControl/gitHubPullRequests.ts @@ -1,4 +1,4 @@ -import { Cause, Exit, Result, Schema } from "effect"; +import { Cause, DateTime, Exit, Option, Result, Schema } from "effect"; import { PositiveInt, TrimmedNonEmptyString } from "@t3tools/contracts"; import { decodeJsonResult, formatSchemaError } from "@t3tools/shared/schemaJson"; @@ -9,7 +9,7 @@ export interface NormalizedGitHubPullRequestRecord { readonly baseRefName: string; readonly headRefName: string; readonly state: "open" | "closed" | "merged"; - readonly updatedAt: string | null; + readonly updatedAt: Option.Option; readonly isCrossRepository?: boolean; readonly headRepositoryNameWithOwner?: string | null; readonly headRepositoryOwnerLogin?: string | null; @@ -23,7 +23,7 @@ const GitHubPullRequestSchema = Schema.Struct({ headRefName: TrimmedNonEmptyString, state: Schema.optional(Schema.NullOr(Schema.String)), mergedAt: Schema.optional(Schema.NullOr(Schema.String)), - updatedAt: Schema.optional(Schema.NullOr(Schema.String)), + updatedAt: Schema.optional(Schema.OptionFromNullOr(Schema.DateTimeUtcFromString)), isCrossRepository: Schema.optional(Schema.Boolean), headRepository: Schema.optional( Schema.NullOr( @@ -80,8 +80,7 @@ function normalizeGitHubPullRequestRecord( baseRefName: raw.baseRefName, headRefName: raw.headRefName, state: normalizeGitHubPullRequestState(raw), - updatedAt: - typeof raw.updatedAt === "string" && raw.updatedAt.trim().length > 0 ? raw.updatedAt : null, + updatedAt: raw.updatedAt ?? Option.none(), ...(typeof raw.isCrossRepository === "boolean" ? { isCrossRepository: raw.isCrossRepository } : {}), diff --git a/apps/server/src/sourceControl/gitLabMergeRequests.ts b/apps/server/src/sourceControl/gitLabMergeRequests.ts new file mode 100644 index 00000000000..d8245d3249a --- /dev/null +++ b/apps/server/src/sourceControl/gitLabMergeRequests.ts @@ -0,0 +1,148 @@ +import { Cause, DateTime, Exit, Option, Result, Schema } from "effect"; +import { PositiveInt, TrimmedNonEmptyString } from "@t3tools/contracts"; +import { decodeJsonResult, formatSchemaError } from "@t3tools/shared/schemaJson"; + +export interface NormalizedGitLabMergeRequestRecord { + readonly number: number; + readonly title: string; + readonly url: string; + readonly baseRefName: string; + readonly headRefName: string; + readonly state: "open" | "closed" | "merged"; + readonly updatedAt: Option.Option; + readonly isCrossRepository?: boolean; + readonly headRepositoryNameWithOwner?: string | null; + readonly headRepositoryOwnerLogin?: string | null; +} + +const GitLabProjectReferenceSchema = Schema.Struct({ + path_with_namespace: Schema.optional(Schema.String), + pathWithNamespace: Schema.optional(Schema.String), + namespace: Schema.optional( + Schema.NullOr( + Schema.Struct({ + path: Schema.optional(Schema.String), + full_path: Schema.optional(Schema.String), + fullPath: Schema.optional(Schema.String), + }), + ), + ), +}); + +const GitLabMergeRequestSchema = Schema.Struct({ + iid: PositiveInt, + title: TrimmedNonEmptyString, + web_url: TrimmedNonEmptyString, + source_branch: TrimmedNonEmptyString, + target_branch: TrimmedNonEmptyString, + state: Schema.optional(Schema.NullOr(Schema.String)), + updated_at: Schema.optional(Schema.OptionFromNullOr(Schema.DateTimeUtcFromString)), + source_project_id: Schema.optional(Schema.NullOr(Schema.Number)), + target_project_id: Schema.optional(Schema.NullOr(Schema.Number)), + source_project: Schema.optional(Schema.NullOr(GitLabProjectReferenceSchema)), + target_project: Schema.optional(Schema.NullOr(GitLabProjectReferenceSchema)), +}); + +function trimOptionalString(value: string | null | undefined): string | null { + const trimmed = value?.trim() ?? ""; + return trimmed.length > 0 ? trimmed : null; +} + +function normalizeGitLabMergeRequestState( + state: string | null | undefined, +): "open" | "closed" | "merged" { + const normalized = state?.trim().toLowerCase(); + if (normalized === "merged") { + return "merged"; + } + if (normalized === "closed") { + return "closed"; + } + return "open"; +} + +function projectPathWithNamespace( + project: Schema.Schema.Type | null | undefined, +): string | null { + const explicit = + trimOptionalString(project?.path_with_namespace) ?? + trimOptionalString(project?.pathWithNamespace); + if (explicit) { + return explicit; + } + + const namespacePath = + trimOptionalString(project?.namespace?.full_path) ?? + trimOptionalString(project?.namespace?.fullPath) ?? + trimOptionalString(project?.namespace?.path); + return namespacePath; +} + +function ownerLoginFromPathWithNamespace(pathWithNamespace: string | null): string | null { + const [owner] = pathWithNamespace?.split("/") ?? []; + return trimOptionalString(owner); +} + +function normalizeGitLabMergeRequestRecord( + raw: Schema.Schema.Type, +): NormalizedGitLabMergeRequestRecord { + const sourceProjectPath = projectPathWithNamespace(raw.source_project); + const targetProjectPath = projectPathWithNamespace(raw.target_project); + const isCrossRepository = + typeof raw.source_project_id === "number" && typeof raw.target_project_id === "number" + ? raw.source_project_id !== raw.target_project_id + : sourceProjectPath !== null && targetProjectPath !== null + ? sourceProjectPath.toLowerCase() !== targetProjectPath.toLowerCase() + : undefined; + const headRepositoryOwnerLogin = ownerLoginFromPathWithNamespace(sourceProjectPath); + + return { + number: raw.iid, + title: raw.title, + url: raw.web_url, + baseRefName: raw.target_branch, + headRefName: raw.source_branch, + state: normalizeGitLabMergeRequestState(raw.state), + updatedAt: raw.updated_at ?? Option.none(), + ...(typeof isCrossRepository === "boolean" ? { isCrossRepository } : {}), + ...(sourceProjectPath ? { headRepositoryNameWithOwner: sourceProjectPath } : {}), + ...(headRepositoryOwnerLogin ? { headRepositoryOwnerLogin } : {}), + }; +} + +const decodeGitLabMergeRequestList = decodeJsonResult(Schema.Array(Schema.Unknown)); +const decodeGitLabMergeRequest = decodeJsonResult(GitLabMergeRequestSchema); +const decodeGitLabMergeRequestEntry = Schema.decodeUnknownExit(GitLabMergeRequestSchema); + +export const formatGitLabJsonDecodeError = formatSchemaError; + +export function decodeGitLabMergeRequestListJson( + raw: string, +): Result.Result< + ReadonlyArray, + Cause.Cause +> { + const result = decodeGitLabMergeRequestList(raw); + if (Result.isSuccess(result)) { + const mergeRequests: NormalizedGitLabMergeRequestRecord[] = []; + for (const entry of result.success) { + const decodedEntry = decodeGitLabMergeRequestEntry(entry); + if (Exit.isFailure(decodedEntry)) { + continue; + } + mergeRequests.push(normalizeGitLabMergeRequestRecord(decodedEntry.value)); + } + return Result.succeed(mergeRequests); + } + return Result.fail(result.failure); +} + +export function decodeGitLabMergeRequestJson( + raw: string, +): Result.Result> { + const result = decodeGitLabMergeRequest(raw); + if (Result.isSuccess(result)) { + return Result.succeed(normalizeGitLabMergeRequestRecord(result.success)); + } + return Result.fail(result.failure); +} diff --git a/apps/server/src/textGeneration/AmpTextGeneration.ts b/apps/server/src/textGeneration/AmpTextGeneration.ts new file mode 100644 index 00000000000..f21bfac0052 --- /dev/null +++ b/apps/server/src/textGeneration/AmpTextGeneration.ts @@ -0,0 +1,54 @@ +/** + * AmpTextGeneration — Graceful "not supported" text-generation shape for Amp. + * + * Amp's CLI does not currently expose a structured-output mode that maps + * cleanly onto our `TextGenerationShape` contract (commit messages, PR + * titles, branch names, thread titles). Rather than block the driver from + * registering, this factory returns an implementation that fails every + * operation with a clear `TextGenerationError`. The user can still pick a + * different instance (Codex, Claude, OpenCode, …) for text generation + * features, and the Amp instance remains usable for chat-style sessions. + * + * @module AmpTextGeneration + */ +import { Effect } from "effect"; + +import type { GenericProviderSettings } from "@t3tools/contracts"; +import { TextGenerationError } from "@t3tools/contracts"; + +import { type TextGenerationShape } from "./TextGeneration.ts"; + +const NOT_SUPPORTED_DETAIL = + "Amp does not expose a structured text-generation API. Use a different provider instance for commit/PR/branch/title generation."; + +const fail = ( + operation: + | "generateCommitMessage" + | "generatePrContent" + | "generateBranchName" + | "generateThreadTitle", +) => Effect.fail(new TextGenerationError({ operation, detail: NOT_SUPPORTED_DETAIL })); + +export const makeAmpTextGeneration = Effect.fn("makeAmpTextGeneration")(function* ( + _ampSettings: GenericProviderSettings, + _environment: NodeJS.ProcessEnv = process.env, +) { + const generateCommitMessage: TextGenerationShape["generateCommitMessage"] = () => + fail("generateCommitMessage"); + + const generatePrContent: TextGenerationShape["generatePrContent"] = () => + fail("generatePrContent"); + + const generateBranchName: TextGenerationShape["generateBranchName"] = () => + fail("generateBranchName"); + + const generateThreadTitle: TextGenerationShape["generateThreadTitle"] = () => + fail("generateThreadTitle"); + + return { + generateCommitMessage, + generatePrContent, + generateBranchName, + generateThreadTitle, + } satisfies TextGenerationShape; +}); diff --git a/apps/server/src/textGeneration/ClaudeTextGeneration.test.ts b/apps/server/src/textGeneration/ClaudeTextGeneration.test.ts new file mode 100644 index 00000000000..19cff6f0344 --- /dev/null +++ b/apps/server/src/textGeneration/ClaudeTextGeneration.test.ts @@ -0,0 +1,339 @@ +import { ClaudeSettings, ProviderInstanceId } from "@t3tools/contracts"; +import * as NodeServices from "@effect/platform-node/NodeServices"; +import { it } from "@effect/vitest"; +import { Effect, FileSystem, Layer, Path, Schema } from "effect"; +import { createModelSelection } from "@t3tools/shared/model"; +import { expect } from "vitest"; + +import { ServerConfig } from "../config.ts"; +import { type TextGenerationShape } from "./TextGeneration.ts"; +import { sanitizeThreadTitle } from "./TextGenerationUtils.ts"; +import { makeClaudeTextGeneration } from "./ClaudeTextGeneration.ts"; + +const ClaudeTextGenerationTestLayer = ServerConfig.layerTest(process.cwd(), { + prefix: "t3code-claude-text-generation-test-", +}).pipe(Layer.provideMerge(NodeServices.layer)); + +function makeFakeClaudeBinary(dir: string) { + return Effect.gen(function* () { + const fs = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const binDir = path.join(dir, "bin"); + const claudePath = path.join(binDir, "claude"); + yield* fs.makeDirectory(binDir, { recursive: true }); + + yield* fs.writeFileString( + claudePath, + [ + "#!/bin/sh", + 'args="$*"', + 'stdin_content="$(cat)"', + 'if [ -n "$T3_FAKE_CLAUDE_ARGS_MUST_CONTAIN" ]; then', + ' printf "%s" "$args" | grep -F -- "$T3_FAKE_CLAUDE_ARGS_MUST_CONTAIN" >/dev/null || {', + ' printf "%s\\n" "args missing expected content" >&2', + " exit 2", + " }", + "fi", + 'if [ -n "$T3_FAKE_CLAUDE_ARGS_MUST_NOT_CONTAIN" ]; then', + ' if printf "%s" "$args" | grep -F -- "$T3_FAKE_CLAUDE_ARGS_MUST_NOT_CONTAIN" >/dev/null; then', + ' printf "%s\\n" "args contained forbidden content" >&2', + " exit 3", + " fi", + "fi", + 'if [ -n "$T3_FAKE_CLAUDE_STDIN_MUST_CONTAIN" ]; then', + ' printf "%s" "$stdin_content" | grep -F -- "$T3_FAKE_CLAUDE_STDIN_MUST_CONTAIN" >/dev/null || {', + ' printf "%s\\n" "stdin missing expected content" >&2', + " exit 4", + " }", + "fi", + 'if [ -n "$T3_FAKE_CLAUDE_HOME_MUST_BE" ] && [ "$HOME" != "$T3_FAKE_CLAUDE_HOME_MUST_BE" ]; then', + ' printf "%s\\n" "HOME was $HOME" >&2', + " exit 5", + "fi", + 'if [ -n "$T3_FAKE_CLAUDE_STDERR" ]; then', + ' printf "%s\\n" "$T3_FAKE_CLAUDE_STDERR" >&2', + "fi", + 'printf "%s" "$T3_FAKE_CLAUDE_OUTPUT"', + 'exit "${T3_FAKE_CLAUDE_EXIT_CODE:-0}"', + "", + ].join("\n"), + ); + yield* fs.chmod(claudePath, 0o755); + return binDir; + }); +} + +function withFakeClaudeEnv( + input: { + output: string; + exitCode?: number; + stderr?: string; + argsMustContain?: string; + argsMustNotContain?: string; + stdinMustContain?: string; + homeMustBe?: string; + claudeConfig?: Partial; + }, + effectFn: (textGeneration: TextGenerationShape) => Effect.Effect, +) { + return Effect.gen(function* () { + const fs = yield* FileSystem.FileSystem; + const tempDir = yield* fs.makeTempDirectoryScoped({ prefix: "t3code-claude-text-" }); + const binDir = yield* makeFakeClaudeBinary(tempDir); + const previousPath = process.env.PATH; + const previousOutput = process.env.T3_FAKE_CLAUDE_OUTPUT; + const previousExitCode = process.env.T3_FAKE_CLAUDE_EXIT_CODE; + const previousStderr = process.env.T3_FAKE_CLAUDE_STDERR; + const previousArgsMustContain = process.env.T3_FAKE_CLAUDE_ARGS_MUST_CONTAIN; + const previousArgsMustNotContain = process.env.T3_FAKE_CLAUDE_ARGS_MUST_NOT_CONTAIN; + const previousStdinMustContain = process.env.T3_FAKE_CLAUDE_STDIN_MUST_CONTAIN; + const previousHomeMustBe = process.env.T3_FAKE_CLAUDE_HOME_MUST_BE; + + yield* Effect.acquireRelease( + Effect.sync(() => { + process.env.PATH = `${binDir}:${previousPath ?? ""}`; + process.env.T3_FAKE_CLAUDE_OUTPUT = input.output; + + if (input.exitCode !== undefined) { + process.env.T3_FAKE_CLAUDE_EXIT_CODE = String(input.exitCode); + } else { + delete process.env.T3_FAKE_CLAUDE_EXIT_CODE; + } + + if (input.stderr !== undefined) { + process.env.T3_FAKE_CLAUDE_STDERR = input.stderr; + } else { + delete process.env.T3_FAKE_CLAUDE_STDERR; + } + + if (input.argsMustContain !== undefined) { + process.env.T3_FAKE_CLAUDE_ARGS_MUST_CONTAIN = input.argsMustContain; + } else { + delete process.env.T3_FAKE_CLAUDE_ARGS_MUST_CONTAIN; + } + + if (input.argsMustNotContain !== undefined) { + process.env.T3_FAKE_CLAUDE_ARGS_MUST_NOT_CONTAIN = input.argsMustNotContain; + } else { + delete process.env.T3_FAKE_CLAUDE_ARGS_MUST_NOT_CONTAIN; + } + + if (input.stdinMustContain !== undefined) { + process.env.T3_FAKE_CLAUDE_STDIN_MUST_CONTAIN = input.stdinMustContain; + } else { + delete process.env.T3_FAKE_CLAUDE_STDIN_MUST_CONTAIN; + } + + if (input.homeMustBe !== undefined) { + process.env.T3_FAKE_CLAUDE_HOME_MUST_BE = input.homeMustBe; + } else { + delete process.env.T3_FAKE_CLAUDE_HOME_MUST_BE; + } + }), + () => + Effect.sync(() => { + process.env.PATH = previousPath; + + if (previousOutput === undefined) { + delete process.env.T3_FAKE_CLAUDE_OUTPUT; + } else { + process.env.T3_FAKE_CLAUDE_OUTPUT = previousOutput; + } + + if (previousExitCode === undefined) { + delete process.env.T3_FAKE_CLAUDE_EXIT_CODE; + } else { + process.env.T3_FAKE_CLAUDE_EXIT_CODE = previousExitCode; + } + + if (previousStderr === undefined) { + delete process.env.T3_FAKE_CLAUDE_STDERR; + } else { + process.env.T3_FAKE_CLAUDE_STDERR = previousStderr; + } + + if (previousArgsMustContain === undefined) { + delete process.env.T3_FAKE_CLAUDE_ARGS_MUST_CONTAIN; + } else { + process.env.T3_FAKE_CLAUDE_ARGS_MUST_CONTAIN = previousArgsMustContain; + } + + if (previousArgsMustNotContain === undefined) { + delete process.env.T3_FAKE_CLAUDE_ARGS_MUST_NOT_CONTAIN; + } else { + process.env.T3_FAKE_CLAUDE_ARGS_MUST_NOT_CONTAIN = previousArgsMustNotContain; + } + + if (previousStdinMustContain === undefined) { + delete process.env.T3_FAKE_CLAUDE_STDIN_MUST_CONTAIN; + } else { + process.env.T3_FAKE_CLAUDE_STDIN_MUST_CONTAIN = previousStdinMustContain; + } + + if (previousHomeMustBe === undefined) { + delete process.env.T3_FAKE_CLAUDE_HOME_MUST_BE; + } else { + process.env.T3_FAKE_CLAUDE_HOME_MUST_BE = previousHomeMustBe; + } + }), + ); + + const config = Schema.decodeSync(ClaudeSettings)(input.claudeConfig ?? {}); + const textGeneration = yield* makeClaudeTextGeneration(config); + return yield* effectFn(textGeneration); + }).pipe(Effect.scoped); +} + +it.layer(ClaudeTextGenerationTestLayer)("ClaudeTextGeneration", (it) => { + it.effect("forwards Claude thinking settings for Haiku without passing effort", () => + withFakeClaudeEnv( + { + output: JSON.stringify({ + structured_output: { + subject: "Add important change", + body: "", + }, + }), + argsMustContain: '--settings {"alwaysThinkingEnabled":false}', + argsMustNotContain: "--effort", + }, + (textGeneration) => + Effect.gen(function* () { + const generated = yield* textGeneration.generateCommitMessage({ + cwd: process.cwd(), + branch: "feature/claude-effect", + stagedSummary: "M README.md", + stagedPatch: "diff --git a/README.md b/README.md", + modelSelection: { + ...createModelSelection(ProviderInstanceId.make("claudeAgent"), "claude-haiku-4-5", [ + { id: "thinking", value: false }, + { id: "effort", value: "high" }, + ]), + }, + }); + + expect(generated.subject).toBe("Add important change"); + }), + ), + ); + + it.effect("forwards Claude fast mode and supported effort", () => + withFakeClaudeEnv( + { + output: JSON.stringify({ + structured_output: { + title: "Improve orchestration flow", + body: "Body", + }, + }), + argsMustContain: '--effort max --settings {"fastMode":true}', + }, + (textGeneration) => + Effect.gen(function* () { + const generated = yield* textGeneration.generatePrContent({ + cwd: process.cwd(), + baseBranch: "main", + headBranch: "feature/claude-effect", + commitSummary: "Improve orchestration", + diffSummary: "1 file changed", + diffPatch: "diff --git a/README.md b/README.md", + modelSelection: { + ...createModelSelection(ProviderInstanceId.make("claudeAgent"), "claude-opus-4-6", [ + { id: "effort", value: "max" }, + { id: "fastMode", value: true }, + ]), + }, + }); + + expect(generated.title).toBe("Improve orchestration flow"); + }), + ), + ); + + it.effect("generates thread titles through the Claude provider", () => + withFakeClaudeEnv( + { + output: JSON.stringify({ + structured_output: { + title: + ' "Reconnect failures after restart because the session state does not recover" ', + }, + }), + stdinMustContain: "You write concise thread titles for coding conversations.", + }, + (textGeneration) => + Effect.gen(function* () { + const generated = yield* textGeneration.generateThreadTitle({ + cwd: process.cwd(), + message: "Please investigate reconnect failures after restarting the session.", + modelSelection: { + instanceId: ProviderInstanceId.make("claudeAgent"), + model: "claude-sonnet-4-6", + }, + }); + + expect(generated.title).toBe( + sanitizeThreadTitle( + '"Reconnect failures after restart because the session state does not recover"', + ), + ); + }), + ), + ); + + it.effect("runs Claude text generation with the configured Claude HOME", () => + Effect.gen(function* () { + const path = yield* Path.Path; + const claudeHome = path.join(process.cwd(), ".claude-work-test"); + return yield* withFakeClaudeEnv( + { + output: JSON.stringify({ + structured_output: { + title: "Use Claude home", + }, + }), + homeMustBe: claudeHome, + claudeConfig: { homePath: claudeHome }, + }, + (textGeneration) => + Effect.gen(function* () { + const generated = yield* textGeneration.generateThreadTitle({ + cwd: process.cwd(), + message: "thread title", + modelSelection: { + instanceId: ProviderInstanceId.make("claudeAgent"), + model: "claude-sonnet-4-6", + }, + }); + + expect(generated.title).toBe(sanitizeThreadTitle("Use Claude home")); + }), + ); + }), + ); + + it.effect("falls back when Claude thread title normalization becomes whitespace-only", () => + withFakeClaudeEnv( + { + output: JSON.stringify({ + structured_output: { + title: ' """ """ ', + }, + }), + }, + (textGeneration) => + Effect.gen(function* () { + const generated = yield* textGeneration.generateThreadTitle({ + cwd: process.cwd(), + message: "Name this thread.", + modelSelection: { + instanceId: ProviderInstanceId.make("claudeAgent"), + model: "claude-sonnet-4-6", + }, + }); + + expect(generated.title).toBe("New thread"); + }), + ), + ); +}); diff --git a/apps/server/src/git/Layers/ClaudeTextGeneration.ts b/apps/server/src/textGeneration/ClaudeTextGeneration.ts similarity index 77% rename from apps/server/src/git/Layers/ClaudeTextGeneration.ts rename to apps/server/src/textGeneration/ClaudeTextGeneration.ts index 97e18c3e789..2f0cbc509b6 100644 --- a/apps/server/src/git/Layers/ClaudeTextGeneration.ts +++ b/apps/server/src/textGeneration/ClaudeTextGeneration.ts @@ -7,31 +7,38 @@ * * @module ClaudeTextGeneration */ -import { Effect, Layer, Option, Schema, Stream } from "effect"; +import { Effect, Option, Schema, Stream } from "effect"; import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; -import { ClaudeModelSelection } from "@t3tools/contracts"; +import { type ClaudeSettings, type ModelSelection } from "@t3tools/contracts"; import { sanitizeBranchFragment, sanitizeFeatureBranchName } from "@t3tools/shared/git"; import { TextGenerationError } from "@t3tools/contracts"; -import { type TextGenerationShape, TextGeneration } from "../Services/TextGeneration.ts"; +import { type TextGenerationShape } from "./TextGeneration.ts"; import { buildBranchNamePrompt, buildCommitMessagePrompt, buildPrContentPrompt, buildThreadTitlePrompt, -} from "../Prompts.ts"; +} from "./TextGenerationPrompts.ts"; import { normalizeCliError, sanitizeCommitSubject, sanitizePrTitle, sanitizeThreadTitle, toJsonSchemaObject, -} from "../Utils.ts"; -import { normalizeClaudeModelOptionsWithCapabilities } from "@t3tools/shared/model"; -import { resolveClaudeApiModelId } from "../../provider/Layers/ClaudeProvider.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; -import { getClaudeModelCapabilities } from "../../provider/Layers/ClaudeProvider.ts"; +} from "./TextGenerationUtils.ts"; +import { + getModelSelectionStringOptionValue, + getProviderOptionDescriptors, +} from "@t3tools/shared/model"; +import { + getClaudeModelCapabilities, + normalizeClaudeCliEffort, + resolveClaudeApiModelId, + resolveClaudeEffort, +} from "../provider/Layers/ClaudeProvider.ts"; +import { makeClaudeEnvironment } from "../provider/Drivers/ClaudeHome.ts"; const CLAUDE_TIMEOUT_MS = 180_000; @@ -43,9 +50,12 @@ const ClaudeOutputEnvelope = Schema.Struct({ structured_output: Schema.Unknown, }); -const makeClaudeTextGeneration = Effect.gen(function* () { +export const makeClaudeTextGeneration = Effect.fn("makeClaudeTextGeneration")(function* ( + claudeSettings: ClaudeSettings, + environment: NodeJS.ProcessEnv = process.env, +) { const commandSpawner = yield* ChildProcessSpawner.ChildProcessSpawner; - const serverSettingsService = yield* Effect.service(ServerSettingsService); + const claudeEnvironment = yield* makeClaudeEnvironment(claudeSettings, environment); const readStreamAsString = ( operation: string, @@ -81,28 +91,32 @@ const makeClaudeTextGeneration = Effect.gen(function* () { cwd: string; prompt: string; outputSchemaJson: S; - modelSelection: ClaudeModelSelection; + modelSelection: ModelSelection; }): Effect.fn.Return { const jsonSchemaStr = JSON.stringify(toJsonSchemaObject(outputSchemaJson)); - const normalizedOptions = normalizeClaudeModelOptionsWithCapabilities( - getClaudeModelCapabilities(modelSelection.model), - modelSelection.options, - ); + const caps = getClaudeModelCapabilities(modelSelection.model); + const descriptors = getProviderOptionDescriptors({ + caps, + selections: modelSelection.options, + }); + const findDescriptor = (id: string) => descriptors.find((descriptor) => descriptor.id === id); + const rawEffortSelection = getModelSelectionStringOptionValue(modelSelection, "effort"); + const resolvedEffort = resolveClaudeEffort(caps, rawEffortSelection); + const cliEffort = normalizeClaudeCliEffort(resolvedEffort); + const thinkingDescriptor = findDescriptor("thinking"); + const fastModeDescriptor = findDescriptor("fastMode"); + const thinking = + thinkingDescriptor?.type === "boolean" ? thinkingDescriptor.currentValue : undefined; + const fastMode = + fastModeDescriptor?.type === "boolean" ? fastModeDescriptor.currentValue : undefined; const settings = { - ...(typeof normalizedOptions?.thinking === "boolean" - ? { alwaysThinkingEnabled: normalizedOptions.thinking } - : {}), - ...(normalizedOptions?.fastMode ? { fastMode: true } : {}), + ...(typeof thinking === "boolean" ? { alwaysThinkingEnabled: thinking } : {}), + ...(fastMode ? { fastMode: true } : {}), }; - const claudeSettings = yield* Effect.map( - serverSettingsService.getSettings, - (settings) => settings.providers.claudeAgent, - ).pipe(Effect.catch(() => Effect.undefined)); - const runClaudeCommand = Effect.fn("runClaudeJson.runClaudeCommand")(function* () { const command = ChildProcess.make( - claudeSettings?.binaryPath || "claude", + claudeSettings.binaryPath || "claude", [ "-p", "--output-format", @@ -111,11 +125,12 @@ const makeClaudeTextGeneration = Effect.gen(function* () { jsonSchemaStr, "--model", resolveClaudeApiModelId(modelSelection), - ...(normalizedOptions?.effort ? ["--effort", normalizedOptions.effort] : []), + ...(cliEffort ? ["--effort", cliEffort] : []), ...(Object.keys(settings).length > 0 ? ["--settings", JSON.stringify(settings)] : []), "--dangerously-skip-permissions", ], { + env: claudeEnvironment, cwd, shell: process.platform === "win32", stdin: { @@ -216,13 +231,6 @@ const makeClaudeTextGeneration = Effect.gen(function* () { includeBranch: input.includeBranch === true, }); - if (input.modelSelection.provider !== "claudeAgent") { - return yield* new TextGenerationError({ - operation: "generateCommitMessage", - detail: "Invalid model selection.", - }); - } - const generated = yield* runClaudeJson({ operation: "generateCommitMessage", cwd: input.cwd, @@ -251,13 +259,6 @@ const makeClaudeTextGeneration = Effect.gen(function* () { diffPatch: input.diffPatch, }); - if (input.modelSelection.provider !== "claudeAgent") { - return yield* new TextGenerationError({ - operation: "generatePrContent", - detail: "Invalid model selection.", - }); - } - const generated = yield* runClaudeJson({ operation: "generatePrContent", cwd: input.cwd, @@ -280,13 +281,6 @@ const makeClaudeTextGeneration = Effect.gen(function* () { attachments: input.attachments, }); - if (input.modelSelection.provider !== "claudeAgent") { - return yield* new TextGenerationError({ - operation: "generateBranchName", - detail: "Invalid model selection.", - }); - } - const generated = yield* runClaudeJson({ operation: "generateBranchName", cwd: input.cwd, @@ -308,13 +302,6 @@ const makeClaudeTextGeneration = Effect.gen(function* () { attachments: input.attachments, }); - if (input.modelSelection.provider !== "claudeAgent") { - return yield* new TextGenerationError({ - operation: "generateThreadTitle", - detail: "Invalid model selection.", - }); - } - const generated = yield* runClaudeJson({ operation: "generateThreadTitle", cwd: input.cwd, @@ -335,5 +322,3 @@ const makeClaudeTextGeneration = Effect.gen(function* () { generateThreadTitle, } satisfies TextGenerationShape; }); - -export const ClaudeTextGenerationLive = Layer.effect(TextGeneration, makeClaudeTextGeneration); diff --git a/apps/server/src/git/Layers/CodexTextGeneration.test.ts b/apps/server/src/textGeneration/CodexTextGeneration.test.ts similarity index 51% rename from apps/server/src/git/Layers/CodexTextGeneration.test.ts rename to apps/server/src/textGeneration/CodexTextGeneration.test.ts index a07505f025c..07123e921b1 100644 --- a/apps/server/src/git/Layers/CodexTextGeneration.test.ts +++ b/apps/server/src/textGeneration/CodexTextGeneration.test.ts @@ -1,29 +1,24 @@ import * as NodeServices from "@effect/platform-node/NodeServices"; import { it } from "@effect/vitest"; -import { Effect, FileSystem, Layer, Path, Result } from "effect"; +import { Effect, FileSystem, Layer, Path, Result, Schema } from "effect"; +import { createModelSelection } from "@t3tools/shared/model"; import { expect } from "vitest"; -import { ServerConfig } from "../../config.ts"; -import { CodexTextGenerationLive } from "./CodexTextGeneration.ts"; -import { TextGenerationError } from "@t3tools/contracts"; -import { TextGeneration } from "../Services/TextGeneration.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; - -const DEFAULT_TEST_MODEL_SELECTION = { - provider: "codex" as const, - model: "gpt-5.4-mini", -}; - -const CodexTextGenerationTestLayer = CodexTextGenerationLive.pipe( - Layer.provideMerge(ServerSettingsService.layerTest()), - Layer.provideMerge( - ServerConfig.layerTest(process.cwd(), { - prefix: "t3code-codex-text-generation-test-", - }), - ), - Layer.provideMerge(NodeServices.layer), +import { CodexSettings, ProviderInstanceId, TextGenerationError } from "@t3tools/contracts"; + +import { ServerConfig } from "../config.ts"; +import { type TextGenerationShape } from "./TextGeneration.ts"; +import { makeCodexTextGeneration } from "./CodexTextGeneration.ts"; + +const DEFAULT_TEST_MODEL_SELECTION = createModelSelection( + ProviderInstanceId.make("codex"), + "gpt-5.4-mini", ); +const CodexTextGenerationTestLayer = ServerConfig.layerTest(process.cwd(), { + prefix: "t3code-codex-text-generation-test-", +}).pipe(Layer.provideMerge(NodeServices.layer)); + function makeFakeCodexBinary( dir: string, input: { @@ -161,39 +156,19 @@ function withFakeCodexEnv( stdinMustContain?: string; stdinMustNotContain?: string; }, - effect: Effect.Effect, + effectFn: (textGeneration: TextGenerationShape) => Effect.Effect, ) { - return Effect.acquireUseRelease( - Effect.gen(function* () { - const fs = yield* FileSystem.FileSystem; - const tempDir = yield* fs.makeTempDirectoryScoped({ prefix: "t3code-codex-text-" }); - const codexPath = yield* makeFakeCodexBinary(tempDir, input); - const serverSettings = yield* ServerSettingsService; - const previousSettings = yield* serverSettings.getSettings; - yield* serverSettings.updateSettings({ - providers: { - codex: { - binaryPath: codexPath, - }, - }, - }); - return { serverSettings, previousBinaryPath: previousSettings.providers.codex.binaryPath }; - }), - () => effect, - ({ serverSettings, previousBinaryPath }) => - serverSettings - .updateSettings({ - providers: { - codex: { - binaryPath: previousBinaryPath, - }, - }, - }) - .pipe(Effect.asVoid), - ); + return Effect.gen(function* () { + const fs = yield* FileSystem.FileSystem; + const tempDir = yield* fs.makeTempDirectoryScoped({ prefix: "t3code-codex-text-" }); + const codexPath = yield* makeFakeCodexBinary(tempDir, input); + const config = Schema.decodeSync(CodexSettings)({ binaryPath: codexPath }); + const textGeneration = yield* makeCodexTextGeneration(config); + return yield* effectFn(textGeneration); + }).pipe(Effect.scoped); } -it.layer(CodexTextGenerationTestLayer)("CodexTextGenerationLive", (it) => { +it.layer(CodexTextGenerationTestLayer)("CodexTextGeneration", (it) => { it.effect("generates and sanitizes commit messages without branch by default", () => withFakeCodexEnv( { @@ -204,22 +179,21 @@ it.layer(CodexTextGenerationTestLayer)("CodexTextGenerationLive", (it) => { }), stdinMustNotContain: "branch must be a short semantic git branch fragment", }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - const generated = yield* textGeneration.generateCommitMessage({ - cwd: process.cwd(), - branch: "feature/codex-effect", - stagedSummary: "M README.md", - stagedPatch: "diff --git a/README.md b/README.md", - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }); + (textGeneration) => + Effect.gen(function* () { + const generated = yield* textGeneration.generateCommitMessage({ + cwd: process.cwd(), + branch: "feature/codex-effect", + stagedSummary: "M README.md", + stagedPatch: "diff --git a/README.md b/README.md", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }); - expect(generated.subject.length).toBeLessThanOrEqual(72); - expect(generated.subject.endsWith(".")).toBe(false); - expect(generated.body).toBe("- added migration\n- updated tests"); - expect(generated.branch).toBeUndefined(); - }), + expect(generated.subject.length).toBeLessThanOrEqual(72); + expect(generated.subject.endsWith(".")).toBe(false); + expect(generated.body).toBe("- added migration\n- updated tests"); + expect(generated.branch).toBeUndefined(); + }), ), ); @@ -236,24 +210,17 @@ it.layer(CodexTextGenerationTestLayer)("CodexTextGenerationLive", (it) => { requireReasoningEffort: "xhigh", stdinMustNotContain: "branch must be a short semantic git branch fragment", }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - yield* textGeneration.generateCommitMessage({ + (textGeneration) => + textGeneration.generateCommitMessage({ cwd: process.cwd(), branch: "feature/codex-effect", stagedSummary: "M README.md", stagedPatch: "diff --git a/README.md b/README.md", - modelSelection: { - provider: "codex", - model: "gpt-5.4", - options: { - reasoningEffort: "xhigh", - fastMode: true, - }, - }, - }); - }), + modelSelection: createModelSelection(ProviderInstanceId.make("codex"), "gpt-5.4", [ + { id: "reasoningEffort", value: "xhigh" }, + { id: "fastMode", value: true }, + ]), + }), ), ); @@ -266,17 +233,14 @@ it.layer(CodexTextGenerationTestLayer)("CodexTextGenerationLive", (it) => { }), requireReasoningEffort: "low", }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - yield* textGeneration.generateCommitMessage({ + (textGeneration) => + textGeneration.generateCommitMessage({ cwd: process.cwd(), branch: "feature/codex-effect", stagedSummary: "M README.md", stagedPatch: "diff --git a/README.md b/README.md", modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }); - }), + }), ), ); @@ -290,21 +254,20 @@ it.layer(CodexTextGenerationTestLayer)("CodexTextGenerationLive", (it) => { }), stdinMustContain: "branch must be a short semantic git branch fragment", }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - const generated = yield* textGeneration.generateCommitMessage({ - cwd: process.cwd(), - branch: "feature/codex-effect", - stagedSummary: "M README.md", - stagedPatch: "diff --git a/README.md b/README.md", - includeBranch: true, - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }); + (textGeneration) => + Effect.gen(function* () { + const generated = yield* textGeneration.generateCommitMessage({ + cwd: process.cwd(), + branch: "feature/codex-effect", + stagedSummary: "M README.md", + stagedPatch: "diff --git a/README.md b/README.md", + includeBranch: true, + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }); - expect(generated.subject).toBe("Add important change"); - expect(generated.branch).toBe("feature/fix/important-system-change"); - }), + expect(generated.subject).toBe("Add important change"); + expect(generated.branch).toBe("feature/fix/important-system-change"); + }), ), ); @@ -316,23 +279,22 @@ it.layer(CodexTextGenerationTestLayer)("CodexTextGenerationLive", (it) => { body: "\n## Summary\n- improve flow\n\n## Testing\n- bun test\n\n", }), }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - const generated = yield* textGeneration.generatePrContent({ - cwd: process.cwd(), - baseBranch: "main", - headBranch: "feature/codex-effect", - commitSummary: "feat: improve orchestration flow", - diffSummary: "2 files changed", - diffPatch: "diff --git a/a.ts b/a.ts", - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }); + (textGeneration) => + Effect.gen(function* () { + const generated = yield* textGeneration.generatePrContent({ + cwd: process.cwd(), + baseBranch: "main", + headBranch: "feature/codex-effect", + commitSummary: "feat: improve orchestration flow", + diffSummary: "2 files changed", + diffPatch: "diff --git a/a.ts b/a.ts", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }); - expect(generated.title).toBe("Improve orchestration flow"); - expect(generated.body.startsWith("## Summary")).toBe(true); - expect(generated.body.endsWith("\n\n")).toBe(false); - }), + expect(generated.title).toBe("Improve orchestration flow"); + expect(generated.body.startsWith("## Summary")).toBe(true); + expect(generated.body.endsWith("\n\n")).toBe(false); + }), ), ); @@ -344,17 +306,16 @@ it.layer(CodexTextGenerationTestLayer)("CodexTextGenerationLive", (it) => { }), stdinMustNotContain: "Image attachments supplied to the model", }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - const generated = yield* textGeneration.generateBranchName({ - cwd: process.cwd(), - message: "Please update session handling.", - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }); + (textGeneration) => + Effect.gen(function* () { + const generated = yield* textGeneration.generateBranchName({ + cwd: process.cwd(), + message: "Please update session handling.", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }); - expect(generated.branch).toBe("feat/session"); - }), + expect(generated.branch).toBe("feat/session"); + }), ), ); @@ -366,17 +327,16 @@ it.layer(CodexTextGenerationTestLayer)("CodexTextGenerationLive", (it) => { ' "Investigate websocket reconnect regressions after worktree restore" \nignored line', }), }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - const generated = yield* textGeneration.generateThreadTitle({ - cwd: process.cwd(), - message: "Please investigate websocket reconnect regressions after a worktree restore.", - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }); + (textGeneration) => + Effect.gen(function* () { + const generated = yield* textGeneration.generateThreadTitle({ + cwd: process.cwd(), + message: "Please investigate websocket reconnect regressions after a worktree restore.", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }); - expect(generated.title).toBe("Investigate websocket reconnect regressions aft..."); - }), + expect(generated.title).toBe("Investigate websocket reconnect regressions aft..."); + }), ), ); @@ -387,17 +347,16 @@ it.layer(CodexTextGenerationTestLayer)("CodexTextGenerationLive", (it) => { title: ' """ """ ', }), }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - const generated = yield* textGeneration.generateThreadTitle({ - cwd: process.cwd(), - message: "Name this thread.", - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }); + (textGeneration) => + Effect.gen(function* () { + const generated = yield* textGeneration.generateThreadTitle({ + cwd: process.cwd(), + message: "Name this thread.", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }); - expect(generated.title).toBe("New thread"); - }), + expect(generated.title).toBe("New thread"); + }), ), ); @@ -408,17 +367,16 @@ it.layer(CodexTextGenerationTestLayer)("CodexTextGenerationLive", (it) => { title: ` "' hello world '" `, }), }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - const generated = yield* textGeneration.generateThreadTitle({ - cwd: process.cwd(), - message: "Name this thread.", - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }); + (textGeneration) => + Effect.gen(function* () { + const generated = yield* textGeneration.generateThreadTitle({ + cwd: process.cwd(), + message: "Name this thread.", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }); - expect(generated.title).toBe("hello world"); - }), + expect(generated.title).toBe("hello world"); + }), ), ); @@ -430,17 +388,16 @@ it.layer(CodexTextGenerationTestLayer)("CodexTextGenerationLive", (it) => { }), stdinMustNotContain: "Attachment metadata:", }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; - - const generated = yield* textGeneration.generateBranchName({ - cwd: process.cwd(), - message: "Fix timeout behavior.", - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }); + (textGeneration) => + Effect.gen(function* () { + const generated = yield* textGeneration.generateBranchName({ + cwd: process.cwd(), + message: "Fix timeout behavior.", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }); - expect(generated.branch).toBe("fix/session-timeout"); - }), + expect(generated.branch).toBe("fix/session-timeout"); + }), ), ); @@ -453,56 +410,17 @@ it.layer(CodexTextGenerationTestLayer)("CodexTextGenerationLive", (it) => { requireImage: true, stdinMustContain: "Attachment metadata:", }, - Effect.gen(function* () { - const fs = yield* FileSystem.FileSystem; - const path = yield* Path.Path; - const { attachmentsDir } = yield* ServerConfig; - const attachmentId = `thread-branch-image-${Date.now()}-${Math.random().toString(36).slice(2, 10)}`; - const attachmentPath = path.join(attachmentsDir, `${attachmentId}.png`); - yield* fs.makeDirectory(attachmentsDir, { recursive: true }); - yield* fs.writeFile(attachmentPath, Buffer.from("hello")); - - const textGeneration = yield* TextGeneration; - const generated = yield* textGeneration.generateBranchName({ - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - cwd: process.cwd(), - message: "Fix layout bug from screenshot.", - attachments: [ - { - type: "image", - id: attachmentId, - name: "bug.png", - mimeType: "image/png", - sizeBytes: 5, - }, - ], - }); - - expect(generated.branch).toBe("fix/ui-regression"); - }), - ), - ); - - it.effect("resolves persisted attachment ids to files for codex image inputs", () => - withFakeCodexEnv( - { - output: JSON.stringify({ - branch: "fix/ui-regression", - }), - requireImage: true, - }, - Effect.gen(function* () { - const fs = yield* FileSystem.FileSystem; - const path = yield* Path.Path; - const { attachmentsDir } = yield* ServerConfig; - const attachmentId = `thread-1-${Date.now()}-${Math.random().toString(36).slice(2, 10)}`; - const imagePath = path.join(attachmentsDir, `${attachmentId}.png`); - yield* fs.makeDirectory(attachmentsDir, { recursive: true }); - yield* fs.writeFile(imagePath, Buffer.from("hello")); - - const textGeneration = yield* TextGeneration; - const generated = yield* textGeneration - .generateBranchName({ + (textGeneration) => + Effect.gen(function* () { + const fs = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const { attachmentsDir } = yield* ServerConfig; + const attachmentId = `thread-branch-image-${Date.now()}-${Math.random().toString(36).slice(2, 10)}`; + const attachmentPath = path.join(attachmentsDir, `${attachmentId}.png`); + yield* fs.makeDirectory(attachmentsDir, { recursive: true }); + yield* fs.writeFile(attachmentPath, Buffer.from("hello")); + + const generated = yield* textGeneration.generateBranchName({ modelSelection: DEFAULT_TEST_MODEL_SELECTION, cwd: process.cwd(), message: "Fix layout bug from screenshot.", @@ -515,24 +433,14 @@ it.layer(CodexTextGenerationTestLayer)("CodexTextGenerationLive", (it) => { sizeBytes: 5, }, ], - }) - .pipe( - Effect.tap(() => - fs.stat(imagePath).pipe( - Effect.map((fileInfo) => { - expect(fileInfo.type).toBe("File"); - }), - ), - ), - Effect.ensuring(fs.remove(imagePath).pipe(Effect.catch(() => Effect.void))), - ); + }); - expect(generated.branch).toBe("fix/ui-regression"); - }), + expect(generated.branch).toBe("fix/ui-regression"); + }), ), ); - it.effect("ignores missing attachment ids for codex image inputs", () => + it.effect("resolves persisted attachment ids to files for codex image inputs", () => withFakeCodexEnv( { output: JSON.stringify({ @@ -540,67 +448,115 @@ it.layer(CodexTextGenerationTestLayer)("CodexTextGenerationLive", (it) => { }), requireImage: true, }, - Effect.gen(function* () { - const fs = yield* FileSystem.FileSystem; - const path = yield* Path.Path; - const { attachmentsDir } = yield* ServerConfig; - const missingAttachmentId = `thread-missing-${Date.now()}-${Math.random().toString(36).slice(2, 10)}`; - const missingPath = path.join(attachmentsDir, `${missingAttachmentId}.png`); - yield* fs.remove(missingPath).pipe(Effect.catch(() => Effect.void)); - - const textGeneration = yield* TextGeneration; - const result = yield* textGeneration - .generateBranchName({ - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - cwd: process.cwd(), - message: "Fix layout bug from screenshot.", - attachments: [ - { - type: "image", - id: missingAttachmentId, - name: "outside.png", - mimeType: "image/png", - sizeBytes: 5, - }, - ], - }) - .pipe(Effect.result); - - expect(Result.isFailure(result)).toBe(true); - if (Result.isFailure(result)) { - expect(result.failure).toBeInstanceOf(TextGenerationError); - expect(result.failure.message).toContain("missing --image input"); - } - }), + (textGeneration) => + Effect.gen(function* () { + const fs = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const { attachmentsDir } = yield* ServerConfig; + const attachmentId = `thread-1-${Date.now()}-${Math.random().toString(36).slice(2, 10)}`; + const imagePath = path.join(attachmentsDir, `${attachmentId}.png`); + yield* fs.makeDirectory(attachmentsDir, { recursive: true }); + yield* fs.writeFile(imagePath, Buffer.from("hello")); + + const generated = yield* textGeneration + .generateBranchName({ + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + cwd: process.cwd(), + message: "Fix layout bug from screenshot.", + attachments: [ + { + type: "image", + id: attachmentId, + name: "bug.png", + mimeType: "image/png", + sizeBytes: 5, + }, + ], + }) + .pipe( + Effect.tap(() => + fs.stat(imagePath).pipe( + Effect.map((fileInfo) => { + expect(fileInfo.type).toBe("File"); + }), + ), + ), + Effect.ensuring(fs.remove(imagePath).pipe(Effect.catch(() => Effect.void))), + ); + + expect(generated.branch).toBe("fix/ui-regression"); + }), ), ); - it.effect( - "fails with typed TextGenerationError when codex returns wrong branch payload shape", - () => - withFakeCodexEnv( - { - output: JSON.stringify({ - title: "This is not a branch payload", - }), - }, + it.effect("ignores missing attachment ids for codex image inputs", () => + withFakeCodexEnv( + { + output: JSON.stringify({ + branch: "fix/ui-regression", + }), + requireImage: true, + }, + (textGeneration) => Effect.gen(function* () { - const textGeneration = yield* TextGeneration; + const fs = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const { attachmentsDir } = yield* ServerConfig; + const missingAttachmentId = `thread-missing-${Date.now()}-${Math.random().toString(36).slice(2, 10)}`; + const missingPath = path.join(attachmentsDir, `${missingAttachmentId}.png`); + yield* fs.remove(missingPath).pipe(Effect.catch(() => Effect.void)); const result = yield* textGeneration .generateBranchName({ - cwd: process.cwd(), - message: "Fix websocket reconnect flake", modelSelection: DEFAULT_TEST_MODEL_SELECTION, + cwd: process.cwd(), + message: "Fix layout bug from screenshot.", + attachments: [ + { + type: "image", + id: missingAttachmentId, + name: "outside.png", + mimeType: "image/png", + sizeBytes: 5, + }, + ], }) .pipe(Effect.result); expect(Result.isFailure(result)).toBe(true); if (Result.isFailure(result)) { expect(result.failure).toBeInstanceOf(TextGenerationError); - expect(result.failure.message).toContain("Codex returned invalid structured output"); + expect(result.failure.message).toContain("missing --image input"); } }), + ), + ); + + it.effect( + "fails with typed TextGenerationError when codex returns wrong branch payload shape", + () => + withFakeCodexEnv( + { + output: JSON.stringify({ + title: "This is not a branch payload", + }), + }, + (textGeneration) => + Effect.gen(function* () { + const result = yield* textGeneration + .generateBranchName({ + cwd: process.cwd(), + message: "Fix websocket reconnect flake", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }) + .pipe(Effect.result); + + expect(Result.isFailure(result)).toBe(true); + if (Result.isFailure(result)) { + expect(result.failure).toBeInstanceOf(TextGenerationError); + expect(result.failure.message).toContain("Codex returned invalid structured output"); + } + }), ), ); @@ -611,27 +567,26 @@ it.layer(CodexTextGenerationTestLayer)("CodexTextGenerationLive", (it) => { exitCode: 1, stderr: "codex execution failed", }, - Effect.gen(function* () { - const textGeneration = yield* TextGeneration; + (textGeneration) => + Effect.gen(function* () { + const result = yield* textGeneration + .generateCommitMessage({ + cwd: process.cwd(), + branch: "feature/codex-error", + stagedSummary: "M README.md", + stagedPatch: "diff --git a/README.md b/README.md", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }) + .pipe(Effect.result); - const result = yield* textGeneration - .generateCommitMessage({ - cwd: process.cwd(), - branch: "feature/codex-error", - stagedSummary: "M README.md", - stagedPatch: "diff --git a/README.md b/README.md", - modelSelection: DEFAULT_TEST_MODEL_SELECTION, - }) - .pipe(Effect.result); - - expect(Result.isFailure(result)).toBe(true); - if (Result.isFailure(result)) { - expect(result.failure).toBeInstanceOf(TextGenerationError); - expect(result.failure.message).toContain( - "Codex CLI command failed: codex execution failed", - ); - } - }), + expect(Result.isFailure(result)).toBe(true); + if (Result.isFailure(result)) { + expect(result.failure).toBeInstanceOf(TextGenerationError); + expect(result.failure.message).toContain( + "Codex CLI command failed: codex execution failed", + ); + } + }), ), ); }); diff --git a/apps/server/src/git/Layers/CodexTextGeneration.ts b/apps/server/src/textGeneration/CodexTextGeneration.ts similarity index 79% rename from apps/server/src/git/Layers/CodexTextGeneration.ts rename to apps/server/src/textGeneration/CodexTextGeneration.ts index 8f15bfa1868..786a0be4c49 100644 --- a/apps/server/src/git/Layers/CodexTextGeneration.ts +++ b/apps/server/src/textGeneration/CodexTextGeneration.ts @@ -1,44 +1,50 @@ -import { randomUUID } from "node:crypto"; - -import { Effect, FileSystem, Layer, Option, Path, Schema, Scope, Stream } from "effect"; +import { Effect, FileSystem, Option, Path, Random, Schema, Scope, Stream } from "effect"; import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; -import { CodexModelSelection } from "@t3tools/contracts"; +import { type CodexSettings, type ModelSelection } from "@t3tools/contracts"; import { sanitizeBranchFragment, sanitizeFeatureBranchName } from "@t3tools/shared/git"; -import { resolveAttachmentPath } from "../../attachmentStore.ts"; -import { ServerConfig } from "../../config.ts"; -import { expandHomePath } from "../../pathExpansion.ts"; +import { resolveAttachmentPath } from "../attachmentStore.ts"; +import { ServerConfig } from "../config.ts"; +import { expandHomePath } from "../pathExpansion.ts"; import { TextGenerationError } from "@t3tools/contracts"; import { type BranchNameGenerationInput, type ThreadTitleGenerationResult, type TextGenerationShape, - TextGeneration, -} from "../Services/TextGeneration.ts"; +} from "./TextGeneration.ts"; import { buildBranchNamePrompt, buildCommitMessagePrompt, buildPrContentPrompt, buildThreadTitlePrompt, -} from "../Prompts.ts"; +} from "./TextGenerationPrompts.ts"; import { normalizeCliError, sanitizeCommitSubject, sanitizePrTitle, sanitizeThreadTitle, toJsonSchemaObject, -} from "../Utils.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; +} from "./TextGenerationUtils.ts"; +import { + getModelSelectionBooleanOptionValue, + getModelSelectionStringOptionValue, +} from "@t3tools/shared/model"; const CODEX_GIT_TEXT_GENERATION_REASONING_EFFORT = "low"; const CODEX_TIMEOUT_MS = 180_000; -const makeCodexTextGeneration = Effect.gen(function* () { +/** + * Build a Codex text-generation closure bound to a specific `CodexSettings` + * payload. See `makeCodexAdapter` for the overall per-instance rationale. + */ +export const makeCodexTextGeneration = Effect.fn("makeCodexTextGeneration")(function* ( + codexConfig: CodexSettings, + environment: NodeJS.ProcessEnv = process.env, +) { const fileSystem = yield* FileSystem.FileSystem; const path = yield* Path.Path; const commandSpawner = yield* ChildProcessSpawner.ChildProcessSpawner; const serverConfig = yield* Effect.service(ServerConfig); - const serverSettingsService = yield* Effect.service(ServerSettingsService); type MaterializedImageAttachments = { readonly imagePaths: ReadonlyArray; @@ -64,21 +70,23 @@ const makeCodexTextGeneration = Effect.gen(function* () { prefix: string, content: string, ): Effect.Effect => { - return fileSystem - .makeTempFileScoped({ - prefix: `t3code-${prefix}-${process.pid}-${randomUUID()}.tmp`, - }) - .pipe( - Effect.tap((filePath) => fileSystem.writeFileString(filePath, content)), - Effect.mapError( - (cause) => - new TextGenerationError({ - operation, - detail: `Failed to write temp file`, - cause, - }), - ), - ); + return Effect.gen(function* () { + const tempFileId = yield* Random.nextUUIDv4; + return yield* fileSystem + .makeTempFileScoped({ + prefix: `t3code-${prefix}-${process.pid}-${tempFileId}.tmp`, + }) + .pipe(Effect.tap((filePath) => fileSystem.writeFileString(filePath, content))); + }).pipe( + Effect.mapError( + (cause) => + new TextGenerationError({ + operation, + detail: `Failed to write temp file`, + cause, + }), + ), + ); }; const safeUnlink = (filePath: string): Effect.Effect => @@ -139,7 +147,7 @@ const makeCodexTextGeneration = Effect.gen(function* () { outputSchemaJson: S; imagePaths?: ReadonlyArray; cleanupPaths?: ReadonlyArray; - modelSelection: CodexModelSelection; + modelSelection: ModelSelection; }): Effect.fn.Return { const schemaPath = yield* writeTempFile( operation, @@ -148,16 +156,12 @@ const makeCodexTextGeneration = Effect.gen(function* () { ); const outputPath = yield* writeTempFile(operation, "codex-output", ""); - const codexSettings = yield* Effect.map( - serverSettingsService.getSettings, - (settings) => settings.providers.codex, - ).pipe(Effect.catch(() => Effect.undefined)); - const runCodexCommand = Effect.fn("runCodexJson.runCodexCommand")(function* () { const reasoningEffort = - modelSelection.options?.reasoningEffort ?? CODEX_GIT_TEXT_GENERATION_REASONING_EFFORT; + getModelSelectionStringOptionValue(modelSelection, "reasoningEffort") ?? + CODEX_GIT_TEXT_GENERATION_REASONING_EFFORT; const command = ChildProcess.make( - codexSettings?.binaryPath || "codex", + codexConfig.binaryPath || "codex", [ "exec", "--ephemeral", @@ -168,7 +172,9 @@ const makeCodexTextGeneration = Effect.gen(function* () { modelSelection.model, "--config", `model_reasoning_effort="${reasoningEffort}"`, - ...(modelSelection.options?.fastMode ? ["--config", `service_tier="fast"`] : []), + ...(getModelSelectionBooleanOptionValue(modelSelection, "fastMode") === true + ? ["--config", `service_tier="fast"`] + : []), "--output-schema", schemaPath, "--output-last-message", @@ -178,10 +184,8 @@ const makeCodexTextGeneration = Effect.gen(function* () { ], { env: { - ...process.env, - ...(codexSettings?.homePath - ? { CODEX_HOME: expandHomePath(codexSettings.homePath) } - : {}), + ...environment, + ...(codexConfig.homePath ? { CODEX_HOME: expandHomePath(codexConfig.homePath) } : {}), }, cwd, shell: process.platform === "win32", @@ -281,13 +285,6 @@ const makeCodexTextGeneration = Effect.gen(function* () { includeBranch: input.includeBranch === true, }); - if (input.modelSelection.provider !== "codex") { - return yield* new TextGenerationError({ - operation: "generateCommitMessage", - detail: "Invalid model selection.", - }); - } - const generated = yield* runCodexJson({ operation: "generateCommitMessage", cwd: input.cwd, @@ -316,13 +313,6 @@ const makeCodexTextGeneration = Effect.gen(function* () { diffPatch: input.diffPatch, }); - if (input.modelSelection.provider !== "codex") { - return yield* new TextGenerationError({ - operation: "generatePrContent", - detail: "Invalid model selection.", - }); - } - const generated = yield* runCodexJson({ operation: "generatePrContent", cwd: input.cwd, @@ -349,13 +339,6 @@ const makeCodexTextGeneration = Effect.gen(function* () { attachments: input.attachments, }); - if (input.modelSelection.provider !== "codex") { - return yield* new TextGenerationError({ - operation: "generateBranchName", - detail: "Invalid model selection.", - }); - } - const generated = yield* runCodexJson({ operation: "generateBranchName", cwd: input.cwd, @@ -382,13 +365,6 @@ const makeCodexTextGeneration = Effect.gen(function* () { attachments: input.attachments, }); - if (input.modelSelection.provider !== "codex") { - return yield* new TextGenerationError({ - operation: "generateThreadTitle", - detail: "Invalid model selection.", - }); - } - const generated = yield* runCodexJson({ operation: "generateThreadTitle", cwd: input.cwd, @@ -410,5 +386,3 @@ const makeCodexTextGeneration = Effect.gen(function* () { generateThreadTitle, } satisfies TextGenerationShape; }); - -export const CodexTextGenerationLive = Layer.effect(TextGeneration, makeCodexTextGeneration); diff --git a/apps/server/src/textGeneration/CopilotTextGeneration.ts b/apps/server/src/textGeneration/CopilotTextGeneration.ts new file mode 100644 index 00000000000..30911721431 --- /dev/null +++ b/apps/server/src/textGeneration/CopilotTextGeneration.ts @@ -0,0 +1,60 @@ +/** + * CopilotTextGeneration — `TextGenerationShape` factory for the GitHub + * Copilot provider. + * + * The Copilot SDK does not expose a straightforward "one-shot prompt with + * structured JSON output" command analogous to `claude -p --output-format + * json` or `codex exec`. Spinning up a full session per text-generation + * call (commit messages, PR titles, etc.) would be both expensive and a + * poor product experience because every invocation would run agentic tool + * approvals, slash-command discovery, etc. + * + * Until/unless the SDK ships a dedicated structured-prompt entrypoint, + * this factory exposes a `TextGenerationShape` that fails gracefully on + * every operation with a stable, user-actionable error message. Callers + * (`SessionTextGeneration` etc.) already fall back to other providers + * when one fails, so this keeps Copilot a valid `ProviderInstance` member + * without claiming a capability it cannot honour. + * + * @module CopilotTextGeneration + */ +import { Effect } from "effect"; + +import { TextGenerationError } from "@t3tools/contracts"; + +import { type TextGenerationShape } from "./TextGeneration.ts"; +import type { CopilotSettings } from "../provider/Drivers/CopilotSettings.ts"; + +const UNSUPPORTED_DETAIL = + "GitHub Copilot does not support headless text generation. Pick a different provider for commit / PR / branch / thread title generation."; + +export const makeCopilotTextGeneration = Effect.fn("makeCopilotTextGeneration")(function* ( + _copilotSettings: CopilotSettings, + _environment: NodeJS.ProcessEnv = process.env, +) { + const fail = ( + operation: Op, + ) => + Effect.fail( + new TextGenerationError({ + operation, + detail: UNSUPPORTED_DETAIL, + }), + ); + + const generateCommitMessage: TextGenerationShape["generateCommitMessage"] = () => + fail("generateCommitMessage"); + const generatePrContent: TextGenerationShape["generatePrContent"] = () => + fail("generatePrContent"); + const generateBranchName: TextGenerationShape["generateBranchName"] = () => + fail("generateBranchName"); + const generateThreadTitle: TextGenerationShape["generateThreadTitle"] = () => + fail("generateThreadTitle"); + + return { + generateCommitMessage, + generatePrContent, + generateBranchName, + generateThreadTitle, + } satisfies TextGenerationShape; +}); diff --git a/apps/server/src/textGeneration/CursorTextGeneration.test.ts b/apps/server/src/textGeneration/CursorTextGeneration.test.ts new file mode 100644 index 00000000000..0de135c8465 --- /dev/null +++ b/apps/server/src/textGeneration/CursorTextGeneration.test.ts @@ -0,0 +1,264 @@ +import * as path from "node:path"; +import * as os from "node:os"; +import { fileURLToPath } from "node:url"; +import { chmodSync, mkdirSync, mkdtempSync, readFileSync, rmSync, writeFileSync } from "node:fs"; + +import * as NodeServices from "@effect/platform-node/NodeServices"; +import { it } from "@effect/vitest"; +import { Effect, Layer, Schema } from "effect"; +import { createModelSelection } from "@t3tools/shared/model"; +import { expect } from "vitest"; + +import { CursorSettings, ProviderInstanceId } from "@t3tools/contracts"; + +import { ServerConfig } from "../config.ts"; +import { type TextGenerationShape } from "./TextGeneration.ts"; +import { makeCursorTextGeneration } from "./CursorTextGeneration.ts"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); +const mockAgentPath = path.join(__dirname, "../../scripts/acp-mock-agent.ts"); + +function shellSingleQuote(value: string): string { + return `'${value.replaceAll("'", `'"'"'`)}'`; +} + +const CursorTextGenerationTestLayer = ServerConfig.layerTest(process.cwd(), { + prefix: "t3code-cursor-text-generation-test-", +}).pipe(Layer.provideMerge(NodeServices.layer)); + +function makeAcpAgentWrapper(dir: string, env: Record): string { + const binDir = path.join(dir, "bin"); + const agentPath = path.join(binDir, "agent"); + mkdirSync(binDir, { recursive: true }); + writeFileSync( + agentPath, + [ + "#!/bin/sh", + ...Object.entries(env).map(([key, value]) => `export ${key}=${shellSingleQuote(value)}`), + 'if [ "$1" != "acp" ]; then', + ' printf "%s\\n" "unexpected args: $*" >&2', + " exit 11", + "fi", + `exec bun ${JSON.stringify(mockAgentPath)}`, + "", + ].join("\n"), + "utf8", + ); + chmodSync(agentPath, 0o755); + return agentPath; +} + +function withFakeAcpAgent( + env: Record, + effectFn: (textGeneration: TextGenerationShape) => Effect.Effect, +) { + return Effect.gen(function* () { + const tempDir = mkdtempSync(path.join(os.tmpdir(), "t3code-cursor-text-acp-")); + yield* Effect.addFinalizer(() => + Effect.sync(() => { + rmSync(tempDir, { recursive: true, force: true }); + }), + ); + const agentPath = makeAcpAgentWrapper(tempDir, env); + const config = Schema.decodeSync(CursorSettings)({ binaryPath: agentPath }); + const textGeneration = yield* makeCursorTextGeneration(config); + return yield* effectFn(textGeneration); + }).pipe(Effect.scoped); +} + +function waitForFileContent(path: string): Effect.Effect { + return Effect.promise(async () => { + const deadline = Date.now() + 5_000; + for (;;) { + try { + return readFileSync(path, "utf8"); + } catch (error) { + if (Date.now() >= deadline) { + throw error instanceof Error ? error : new Error(String(error)); + } + } + await new Promise((resolve) => setTimeout(resolve, 25)); + } + }); +} + +it.layer(CursorTextGenerationTestLayer)("CursorTextGeneration", (it) => { + it.effect("uses ACP model config options instead of raw CLI model ids", () => { + const requestLogDir = mkdtempSync(path.join(os.tmpdir(), "t3code-cursor-text-log-")); + const requestLogPath = path.join(requestLogDir, "requests.ndjson"); + + return withFakeAcpAgent( + { + T3_ACP_REQUEST_LOG_PATH: requestLogPath, + T3_ACP_PROMPT_RESPONSE_TEXT: JSON.stringify({ + subject: "Add generated commit message", + body: "- verify cursor acp model config path", + }), + }, + (textGeneration) => + Effect.gen(function* () { + const generated = yield* textGeneration.generateCommitMessage({ + cwd: process.cwd(), + branch: "feature/cursor-text-generation", + stagedSummary: "M apps/server/src/textGeneration/CursorTextGeneration.ts", + stagedPatch: + "diff --git a/apps/server/src/textGeneration/CursorTextGeneration.ts b/apps/server/src/textGeneration/CursorTextGeneration.ts", + modelSelection: { + ...createModelSelection(ProviderInstanceId.make("cursor"), "gpt-5.4", [ + { id: "reasoning", value: "xhigh" }, + { id: "fastMode", value: true }, + { id: "contextWindow", value: "1m" }, + ]), + }, + }); + + expect(generated.subject).toBe("Add generated commit message"); + expect(generated.body).toBe("- verify cursor acp model config path"); + + const requests = readFileSync(requestLogPath, "utf8") + .trim() + .split("\n") + .filter((line) => line.length > 0) + .map( + (line) => JSON.parse(line) as { method?: string; params?: Record }, + ); + + expect( + requests.find((request) => request.method === "initialize")?.params?.clientCapabilities, + ).toMatchObject({ + _meta: { + parameterizedModelPicker: true, + }, + }); + expect( + requests.some( + (request) => + request.method === "session/set_config_option" && + request.params?.configId === "model" && + request.params?.value === "gpt-5.4", + ), + ).toBe(true); + expect( + requests.some( + (request) => + request.method === "session/set_config_option" && + request.params?.configId === "reasoning" && + request.params?.value === "extra-high", + ), + ).toBe(true); + expect( + requests.some( + (request) => + request.method === "session/set_config_option" && + request.params?.configId === "context" && + request.params?.value === "1m", + ), + ).toBe(true); + expect( + requests.some( + (request) => + request.method === "session/set_config_option" && + request.params?.configId === "fast" && + request.params?.value === "true", + ), + ).toBe(true); + expect( + requests.find((request) => request.method === "session/prompt")?.params?.prompt, + ).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + type: "text", + text: expect.stringContaining("Staged patch:"), + }), + ]), + ); + + rmSync(requestLogDir, { recursive: true, force: true }); + }), + ); + }); + + it.effect("accepts json objects with extra assistant text around them", () => + withFakeAcpAgent( + { + T3_ACP_PROMPT_RESPONSE_TEXT: + 'Sure, here is the JSON:\n```json\n{\n "subject": "Update README dummy comment with attribution and date",\n "body": ""\n}\n```\nDone.', + }, + (textGeneration) => + Effect.gen(function* () { + const generated = yield* textGeneration.generateCommitMessage({ + cwd: process.cwd(), + branch: "feature/cursor-noisy-json", + stagedSummary: "M README.md", + stagedPatch: "diff --git a/README.md b/README.md", + modelSelection: { + instanceId: ProviderInstanceId.make("cursor"), + model: "composer-2", + }, + }); + + expect(generated.subject).toBe("Update README dummy comment with attribution and date"); + expect(generated.body).toBe(""); + }), + ), + ); + + it.effect("generates thread titles through Cursor ACP text generation", () => + withFakeAcpAgent( + { + T3_ACP_PROMPT_RESPONSE_TEXT: JSON.stringify({ + title: '"Trim reconnect spinner status after resume."', + }), + }, + (textGeneration) => + Effect.gen(function* () { + const generated = yield* textGeneration.generateThreadTitle({ + cwd: process.cwd(), + message: "Fix the reconnect spinner after a resumed session.", + modelSelection: { + instanceId: ProviderInstanceId.make("cursor"), + model: "composer-2", + }, + }); + + expect(generated.title).toBe("Trim reconnect spinner status after resume."); + }), + ), + ); + + it.effect("closes the ACP child process after text generation completes", () => { + const exitLogDir = mkdtempSync(path.join(os.tmpdir(), "t3code-cursor-text-exit-log-")); + const exitLogPath = path.join(exitLogDir, "exit.log"); + + return withFakeAcpAgent( + { + T3_ACP_EXIT_LOG_PATH: exitLogPath, + T3_ACP_PROMPT_RESPONSE_TEXT: JSON.stringify({ + subject: "Close runtime after generation", + body: "", + }), + }, + (textGeneration) => + Effect.gen(function* () { + const generated = yield* textGeneration.generateCommitMessage({ + cwd: process.cwd(), + branch: "feature/cursor-runtime-close", + stagedSummary: "M apps/server/src/textGeneration/CursorTextGeneration.ts", + stagedPatch: + "diff --git a/apps/server/src/textGeneration/CursorTextGeneration.ts b/apps/server/src/textGeneration/CursorTextGeneration.ts", + modelSelection: { + instanceId: ProviderInstanceId.make("cursor"), + model: "composer-2", + }, + }); + + expect(generated.subject).toBe("Close runtime after generation"); + + const exitLog = yield* waitForFileContent(exitLogPath); + expect(exitLog).toContain("exit:0"); + + rmSync(exitLogDir, { recursive: true, force: true }); + }), + ); + }); +}); diff --git a/apps/server/src/git/Layers/CursorTextGeneration.ts b/apps/server/src/textGeneration/CursorTextGeneration.ts similarity index 81% rename from apps/server/src/git/Layers/CursorTextGeneration.ts rename to apps/server/src/textGeneration/CursorTextGeneration.ts index 24f066059c7..1cde82d61b6 100644 --- a/apps/server/src/git/Layers/CursorTextGeneration.ts +++ b/apps/server/src/textGeneration/CursorTextGeneration.ts @@ -1,32 +1,27 @@ -import { Effect, Layer, Option, Ref, Schema } from "effect"; +import { Effect, Option, Ref, Schema } from "effect"; import { ChildProcessSpawner } from "effect/unstable/process"; -import { CursorModelSelection } from "@t3tools/contracts"; +import { type CursorSettings, type ModelSelection } from "@t3tools/contracts"; import { sanitizeBranchFragment, sanitizeFeatureBranchName } from "@t3tools/shared/git"; import { TextGenerationError } from "@t3tools/contracts"; -import { - type ThreadTitleGenerationResult, - type TextGenerationShape, - TextGeneration, -} from "../Services/TextGeneration.ts"; +import { type ThreadTitleGenerationResult, type TextGenerationShape } from "./TextGeneration.ts"; import { buildBranchNamePrompt, buildCommitMessagePrompt, buildPrContentPrompt, buildThreadTitlePrompt, -} from "../Prompts.ts"; +} from "./TextGenerationPrompts.ts"; import { extractJsonObject, sanitizeCommitSubject, sanitizePrTitle, sanitizeThreadTitle, -} from "../Utils.ts"; +} from "./TextGenerationUtils.ts"; import { applyCursorAcpModelSelection, makeCursorAcpRuntime, -} from "../../provider/acp/CursorAcpSupport.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; +} from "../provider/acp/CursorAcpSupport.ts"; const CURSOR_TIMEOUT_MS = 180_000; @@ -55,9 +50,15 @@ function isTextGenerationError(error: unknown): error is TextGenerationError { ); } -const makeCursorTextGeneration = Effect.gen(function* () { +/** + * Build a Cursor text-generation closure bound to a specific `CursorSettings` + * payload. See `makeCodexAdapter` for the overall per-instance rationale. + */ +export const makeCursorTextGeneration = Effect.fn("makeCursorTextGeneration")(function* ( + cursorSettings: CursorSettings, + environment: NodeJS.ProcessEnv = process.env, +) { const commandSpawner = yield* ChildProcessSpawner.ChildProcessSpawner; - const serverSettingsService = yield* Effect.service(ServerSettingsService); const runCursorJson = ({ operation, @@ -74,17 +75,13 @@ const makeCursorTextGeneration = Effect.gen(function* () { cwd: string; prompt: string; outputSchemaJson: S; - modelSelection: CursorModelSelection; + modelSelection: ModelSelection; }): Effect.Effect => Effect.gen(function* () { - const cursorSettings = yield* Effect.map( - serverSettingsService.getSettings, - (settings) => settings.providers.cursor, - ).pipe(Effect.catch(() => Effect.undefined)); - const outputRef = yield* Ref.make(""); const runtime = yield* makeCursorAcpRuntime({ cursorSettings, + environment, childProcessSpawner: commandSpawner, cwd, clientInfo: { name: "t3-code-git-text", version: "0.0.0" }, @@ -108,7 +105,7 @@ const makeCursorTextGeneration = Effect.gen(function* () { yield* applyCursorAcpModelSelection({ runtime, model: modelSelection.model, - modelOptions: modelSelection.options, + selections: modelSelection.options, mapError: ({ cause, configId, step }) => mapCursorAcpError( operation, @@ -186,13 +183,6 @@ const makeCursorTextGeneration = Effect.gen(function* () { includeBranch: input.includeBranch === true, }); - if (input.modelSelection.provider !== "cursor") { - return yield* new TextGenerationError({ - operation: "generateCommitMessage", - detail: "Invalid model selection.", - }); - } - const generated = yield* runCursorJson({ operation: "generateCommitMessage", cwd: input.cwd, @@ -221,13 +211,6 @@ const makeCursorTextGeneration = Effect.gen(function* () { diffPatch: input.diffPatch, }); - if (input.modelSelection.provider !== "cursor") { - return yield* new TextGenerationError({ - operation: "generatePrContent", - detail: "Invalid model selection.", - }); - } - const generated = yield* runCursorJson({ operation: "generatePrContent", cwd: input.cwd, @@ -250,13 +233,6 @@ const makeCursorTextGeneration = Effect.gen(function* () { attachments: input.attachments, }); - if (input.modelSelection.provider !== "cursor") { - return yield* new TextGenerationError({ - operation: "generateBranchName", - detail: "Invalid model selection.", - }); - } - const generated = yield* runCursorJson({ operation: "generateBranchName", cwd: input.cwd, @@ -278,13 +254,6 @@ const makeCursorTextGeneration = Effect.gen(function* () { attachments: input.attachments, }); - if (input.modelSelection.provider !== "cursor") { - return yield* new TextGenerationError({ - operation: "generateThreadTitle", - detail: "Invalid model selection.", - }); - } - const generated = yield* runCursorJson({ operation: "generateThreadTitle", cwd: input.cwd, @@ -305,5 +274,3 @@ const makeCursorTextGeneration = Effect.gen(function* () { generateThreadTitle, } satisfies TextGenerationShape; }); - -export const CursorTextGenerationLive = Layer.effect(TextGeneration, makeCursorTextGeneration); diff --git a/apps/server/src/textGeneration/GeminiCliTextGeneration.ts b/apps/server/src/textGeneration/GeminiCliTextGeneration.ts new file mode 100644 index 00000000000..aa009414e29 --- /dev/null +++ b/apps/server/src/textGeneration/GeminiCliTextGeneration.ts @@ -0,0 +1,66 @@ +/** + * GeminiCliTextGeneration — `TextGenerationShape` factory for the Gemini CLI + * provider. + * + * Earlier revisions invoked `gemini -p --approval-mode yolo` inside + * the user's repository to coerce structured JSON output. That gave a + * commit/PR/branch/title generation request the same trust posture as a + * full agentic session — capable of executing arbitrary tool calls in the + * workspace without user approval. The Gemini CLI does not expose a + * non-interactive mode that disables tool execution outright (the only + * non-interactive approval mode is `yolo`), so the safe option is to not + * run it at all from text-generation paths. + * + * This factory keeps Gemini CLI a valid `ProviderInstance` member by + * returning a graceful "not supported" failure on every operation. Callers + * (`SessionTextGeneration` etc.) already fall back to other providers when + * one fails, so picking Gemini for chat/sessions still works — only the + * automated text-generation helpers redirect users to a different provider. + * + * @module GeminiCliTextGeneration + */ +import { Effect } from "effect"; + +import { type GenericProviderSettings, TextGenerationError } from "@t3tools/contracts"; + +import { type TextGenerationShape } from "./TextGeneration.ts"; + +const UNSUPPORTED_DETAIL = + "Gemini CLI is not supported for headless text generation (the only non-interactive Gemini mode auto-approves tool calls in the workspace). Pick a different provider for commit / PR / branch / thread title generation."; + +export const makeGeminiCliTextGeneration = Effect.fn("makeGeminiCliTextGeneration")(function* ( + _config: GenericProviderSettings, + _environment: NodeJS.ProcessEnv = process.env, +) { + const fail = < + Op extends + | "generateCommitMessage" + | "generatePrContent" + | "generateBranchName" + | "generateThreadTitle", + >( + operation: Op, + ) => + Effect.fail( + new TextGenerationError({ + operation, + detail: UNSUPPORTED_DETAIL, + }), + ); + + const generateCommitMessage: TextGenerationShape["generateCommitMessage"] = () => + fail("generateCommitMessage"); + const generatePrContent: TextGenerationShape["generatePrContent"] = () => + fail("generatePrContent"); + const generateBranchName: TextGenerationShape["generateBranchName"] = () => + fail("generateBranchName"); + const generateThreadTitle: TextGenerationShape["generateThreadTitle"] = () => + fail("generateThreadTitle"); + + return { + generateCommitMessage, + generatePrContent, + generateBranchName, + generateThreadTitle, + } satisfies TextGenerationShape; +}); diff --git a/apps/server/src/textGeneration/KiloTextGeneration.ts b/apps/server/src/textGeneration/KiloTextGeneration.ts new file mode 100644 index 00000000000..fef1da33142 --- /dev/null +++ b/apps/server/src/textGeneration/KiloTextGeneration.ts @@ -0,0 +1,302 @@ +/** + * KiloTextGeneration — Text generation layer using a per-instance Kilo + * server for branch/commit/PR/thread-title generation. + * + * Kilo's API mirrors OpenCode's, so this driver delegates to a private + * `KiloServerManager` (no shared state with the adapter's manager) and + * runs `session.prompt`-style requests via the SDK client. Unlike the + * OpenCode text-generation driver, Kilo does not yet expose a long-lived + * shared server pool: each request currently creates a transient session + * on the manager-owned server and the server is released when the + * factory's scope finalizer fires (registry shutdown). + * + * @module KiloTextGeneration + */ +import { Effect, Schema } from "effect"; + +import { + TextGenerationError, + type ChatAttachment, + type ModelSelection, +} from "@t3tools/contracts"; +import { sanitizeBranchFragment, sanitizeFeatureBranchName } from "@t3tools/shared/git"; + +import { + KiloServerManager, +} from "../kiloServerManager.ts"; +import { parseKiloModel, readJsonData } from "../kilo/utils.ts"; +import { createClient } from "../kilo/serverLifecycle.ts"; +import type { KiloProviderOptions, SharedServerState } from "../kilo/types.ts"; +import { type TextGenerationShape } from "./TextGeneration.ts"; +import { + buildBranchNamePrompt, + buildCommitMessagePrompt, + buildPrContentPrompt, + buildThreadTitlePrompt, +} from "./TextGenerationPrompts.ts"; +import { + extractJsonObject, + sanitizeCommitSubject, + sanitizePrTitle, + sanitizeThreadTitle, +} from "./TextGenerationUtils.ts"; +import type { KiloSettings } from "../provider/Layers/KiloProvider.ts"; + +type TextGenerationOperation = + | "generateCommitMessage" + | "generatePrContent" + | "generateBranchName" + | "generateThreadTitle"; + +function getKiloPromptErrorMessage(error: unknown): string | null { + if (!error || typeof error !== "object") { + return null; + } + const message = + "data" in error && + error.data && + typeof error.data === "object" && + "message" in error.data && + typeof error.data.message === "string" + ? error.data.message.trim() + : ""; + if (message.length > 0) { + return message; + } + if ("name" in error && typeof error.name === "string") { + const name = error.name.trim(); + return name.length > 0 ? name : null; + } + return null; +} + +function getKiloTextResponse(parts: ReadonlyArray | undefined): string { + return (parts ?? []) + .flatMap((part) => { + if (!part || typeof part !== "object") { + return []; + } + if (!("type" in part) || part.type !== "text") { + return []; + } + if (!("text" in part) || typeof part.text !== "string") { + return []; + } + return [part.text]; + }) + .join("") + .trim(); +} + +export const makeKiloTextGeneration = Effect.fn("makeKiloTextGeneration")(function* ( + kiloSettings: KiloSettings, + _environment: NodeJS.ProcessEnv = process.env, +) { + // Per-instance manager: one server process bound to this text-generation + // factory's scope. The manager owns the spawned child via its internal + // `ensureServer`/`getOrStartServer`, so `stopAll()` actually kills it on + // scope close. Concurrent generation calls share the same pending start + // because `getOrStartServer` is internally serialized. + const manager = new KiloServerManager(); + + yield* Effect.acquireRelease(Effect.sync(() => manager), (m) => Effect.sync(() => m.stopAll())); + + const resolveBinaryPath = (): string => kiloSettings.binaryPath.trim() || "kilo"; + + const ensureKiloServer = (): Promise => + manager.getOrStartServer({ binaryPath: resolveBinaryPath() } as KiloProviderOptions); + + const runKiloJson = Effect.fn("runKiloJson")(function* (input: { + readonly operation: TextGenerationOperation; + readonly cwd: string; + readonly prompt: string; + readonly outputSchemaJson: S; + readonly modelSelection: ModelSelection; + readonly attachments?: ReadonlyArray | undefined; + }) { + const parsed = parseKiloModel(input.modelSelection.model); + if (!parsed) { + return yield* new TextGenerationError({ + operation: input.operation, + detail: "Kilo model selection must use the 'provider/model' format.", + }); + } + + const rawText = yield* Effect.tryPromise({ + try: async () => { + const shared = await ensureKiloServer(); + const client = await createClient({ + baseUrl: shared.baseUrl, + directory: input.cwd, + responseStyle: "data", + throwOnError: true, + ...(shared.authHeader + ? { headers: { Authorization: shared.authHeader } } + : {}), + }); + + const created = (await readJsonData( + client.session.create({ title: `T3 Code ${input.operation}` }), + )) as { readonly id?: string } | { readonly data?: { readonly id?: string } }; + const sessionId = + ("id" in created && typeof created.id === "string" && created.id) || + ("data" in created && + created.data && + typeof created.data.id === "string" && + created.data.id) || + ""; + if (!sessionId) { + throw new Error("Kilo session.create returned no session id."); + } + + const result = (await readJsonData( + client.session.promptAsync({ + sessionID: sessionId, + model: { providerID: parsed.providerId, modelID: parsed.modelId }, + ...(parsed.variant ? { variant: parsed.variant } : {}), + parts: [{ type: "text", text: input.prompt }], + }), + )) as + | { + readonly data?: { + readonly info?: { readonly error?: unknown }; + readonly parts?: ReadonlyArray; + }; + readonly info?: { readonly error?: unknown }; + readonly parts?: ReadonlyArray; + } + | undefined; + + const data = result && "data" in result && result.data ? result.data : result; + const errorMessage = getKiloPromptErrorMessage(data?.info?.error); + if (errorMessage) { + throw new Error(errorMessage); + } + const text = getKiloTextResponse(data?.parts); + if (text.length === 0) { + throw new Error("Kilo returned empty output."); + } + return text; + }, + catch: (cause) => + new TextGenerationError({ + operation: input.operation, + detail: cause instanceof Error ? cause.message : "Kilo text generation failed.", + cause, + }), + }); + + return yield* Schema.decodeEffect(Schema.fromJsonString(input.outputSchemaJson))( + extractJsonObject(rawText), + ).pipe( + Effect.catchTag("SchemaError", (cause) => + Effect.fail( + new TextGenerationError({ + operation: input.operation, + detail: "Kilo returned invalid structured output.", + cause, + }), + ), + ), + ); + }); + + const generateCommitMessage: TextGenerationShape["generateCommitMessage"] = Effect.fn( + "KiloTextGeneration.generateCommitMessage", + )(function* (input) { + const { prompt, outputSchema } = buildCommitMessagePrompt({ + branch: input.branch, + stagedSummary: input.stagedSummary, + stagedPatch: input.stagedPatch, + includeBranch: input.includeBranch === true, + }); + const generated = yield* runKiloJson({ + operation: "generateCommitMessage", + cwd: input.cwd, + prompt, + outputSchemaJson: outputSchema, + modelSelection: input.modelSelection, + }); + + return { + subject: sanitizeCommitSubject(generated.subject), + body: generated.body.trim(), + ...("branch" in generated && typeof generated.branch === "string" + ? { branch: sanitizeFeatureBranchName(generated.branch) } + : {}), + }; + }); + + const generatePrContent: TextGenerationShape["generatePrContent"] = Effect.fn( + "KiloTextGeneration.generatePrContent", + )(function* (input) { + const { prompt, outputSchema } = buildPrContentPrompt({ + baseBranch: input.baseBranch, + headBranch: input.headBranch, + commitSummary: input.commitSummary, + diffSummary: input.diffSummary, + diffPatch: input.diffPatch, + }); + const generated = yield* runKiloJson({ + operation: "generatePrContent", + cwd: input.cwd, + prompt, + outputSchemaJson: outputSchema, + modelSelection: input.modelSelection, + }); + + return { + title: sanitizePrTitle(generated.title), + body: generated.body.trim(), + }; + }); + + const generateBranchName: TextGenerationShape["generateBranchName"] = Effect.fn( + "KiloTextGeneration.generateBranchName", + )(function* (input) { + const { prompt, outputSchema } = buildBranchNamePrompt({ + message: input.message, + attachments: input.attachments, + }); + const generated = yield* runKiloJson({ + operation: "generateBranchName", + cwd: input.cwd, + prompt, + outputSchemaJson: outputSchema, + modelSelection: input.modelSelection, + attachments: input.attachments, + }); + + return { + branch: sanitizeBranchFragment(generated.branch), + }; + }); + + const generateThreadTitle: TextGenerationShape["generateThreadTitle"] = Effect.fn( + "KiloTextGeneration.generateThreadTitle", + )(function* (input) { + const { prompt, outputSchema } = buildThreadTitlePrompt({ + message: input.message, + attachments: input.attachments, + }); + const generated = yield* runKiloJson({ + operation: "generateThreadTitle", + cwd: input.cwd, + prompt, + outputSchemaJson: outputSchema, + modelSelection: input.modelSelection, + attachments: input.attachments, + }); + + return { + title: sanitizeThreadTitle(generated.title), + }; + }); + + return { + generateCommitMessage, + generatePrContent, + generateBranchName, + generateThreadTitle, + } satisfies TextGenerationShape; +}); diff --git a/apps/server/src/textGeneration/OpenCodeTextGeneration.test.ts b/apps/server/src/textGeneration/OpenCodeTextGeneration.test.ts new file mode 100644 index 00000000000..907c749355f --- /dev/null +++ b/apps/server/src/textGeneration/OpenCodeTextGeneration.test.ts @@ -0,0 +1,346 @@ +import { OpenCodeSettings, ProviderInstanceId } from "@t3tools/contracts"; +import * as NodeServices from "@effect/platform-node/NodeServices"; +import { it } from "@effect/vitest"; +import { Duration, Effect, Layer, Schema } from "effect"; +import { TestClock } from "effect/testing"; +import { NetService } from "@t3tools/shared/Net"; +import { beforeEach, expect } from "vitest"; + +import { ServerConfig } from "../config.ts"; +import { + OpenCodeRuntime, + OpenCodeRuntimeError, + type OpenCodeRuntimeShape, +} from "../provider/opencodeRuntime.ts"; +import { type TextGenerationShape } from "./TextGeneration.ts"; +import { makeOpenCodeTextGeneration } from "./OpenCodeTextGeneration.ts"; + +const runtimeMock = { + state: { + startCalls: [] as string[], + promptUrls: [] as string[], + authHeaders: [] as Array, + closeCalls: [] as string[], + promptResult: undefined as + | { data?: { info?: { error?: unknown }; parts?: Array<{ type: string; text?: string }> } } + | undefined, + }, + reset() { + this.state.startCalls.length = 0; + this.state.promptUrls.length = 0; + this.state.authHeaders.length = 0; + this.state.closeCalls.length = 0; + this.state.promptResult = undefined; + }, +}; + +const OpenCodeRuntimeTestDouble: OpenCodeRuntimeShape = { + startOpenCodeServerProcess: ({ binaryPath }) => + Effect.gen(function* () { + const index = runtimeMock.state.startCalls.length + 1; + const url = `http://127.0.0.1:${4_300 + index}`; + runtimeMock.state.startCalls.push(binaryPath); + // The production runtime binds server lifetime to the caller's scope. + // Mirror that here so the closeCalls probe observes scope close. + yield* Effect.addFinalizer(() => + Effect.sync(() => { + runtimeMock.state.closeCalls.push(url); + }), + ); + return { + url, + exitCode: Effect.never, + }; + }), + connectToOpenCodeServer: ({ serverUrl }) => + Effect.succeed({ + url: serverUrl ?? "http://127.0.0.1:4301", + exitCode: null, + external: Boolean(serverUrl), + }), + runOpenCodeCommand: () => Effect.succeed({ stdout: "", stderr: "", code: 0 }), + createOpenCodeSdkClient: ({ baseUrl, serverPassword }) => + ({ + session: { + create: async () => ({ data: { id: `${baseUrl}/session` } }), + prompt: async () => { + runtimeMock.state.promptUrls.push(baseUrl); + runtimeMock.state.authHeaders.push( + serverPassword ? `Basic ${btoa(`opencode:${serverPassword}`)}` : null, + ); + return ( + runtimeMock.state.promptResult ?? { + data: { + parts: [ + { + type: "text", + text: JSON.stringify({ + subject: "Improve OpenCode reuse", + body: "Reuse one server for the full action.", + }), + }, + ], + }, + } + ); + }, + }, + }) as unknown as ReturnType, + loadOpenCodeInventory: () => + Effect.fail( + new OpenCodeRuntimeError({ + operation: "loadOpenCodeInventory", + detail: "OpenCodeRuntimeTestDouble.loadOpenCodeInventory not used in this test", + cause: null, + }), + ), +}; + +const DEFAULT_TEST_MODEL_SELECTION = { + instanceId: ProviderInstanceId.make("opencode"), + model: "openai/gpt-5", +}; + +const OPENCODE_TEXT_GENERATION_IDLE_TTL_MS = 30_000; + +const OpenCodeTextGenerationTestLayer = Layer.succeed( + OpenCodeRuntime, + OpenCodeRuntimeTestDouble, +).pipe( + Layer.provideMerge( + ServerConfig.layerTest(process.cwd(), { + prefix: "t3code-opencode-text-generation-test-", + }), + ), + Layer.provideMerge(NetService.layer), + Layer.provideMerge(NodeServices.layer), +); + +const OpenCodeTextGenerationExistingServerTestLayer = Layer.succeed( + OpenCodeRuntime, + OpenCodeRuntimeTestDouble, +).pipe( + Layer.provideMerge( + ServerConfig.layerTest(process.cwd(), { + prefix: "t3code-opencode-text-generation-existing-server-test-", + }), + ), + Layer.provideMerge(NetService.layer), + Layer.provideMerge(NodeServices.layer), +); + +const DEFAULT_OPENCODE_SETTINGS = Schema.decodeSync(OpenCodeSettings)({ + binaryPath: "fake-opencode", +}); +const EXISTING_SERVER_OPENCODE_SETTINGS = Schema.decodeSync(OpenCodeSettings)({ + binaryPath: "fake-opencode", + serverUrl: "http://127.0.0.1:9999", + serverPassword: "secret-password", +}); + +function withOpenCodeTextGeneration( + settings: OpenCodeSettings, + effectFn: (textGeneration: TextGenerationShape) => Effect.Effect, +) { + return Effect.gen(function* () { + const textGeneration = yield* makeOpenCodeTextGeneration(settings); + return yield* effectFn(textGeneration); + }).pipe(Effect.scoped); +} + +beforeEach(() => { + runtimeMock.reset(); +}); + +const advanceIdleClock = Effect.gen(function* () { + yield* Effect.yieldNow; + yield* TestClock.adjust(Duration.millis(OPENCODE_TEXT_GENERATION_IDLE_TTL_MS + 1)); + yield* Effect.yieldNow; +}); + +it.layer(OpenCodeTextGenerationTestLayer)("OpenCodeTextGeneration", (it) => { + it.effect("reuses a warm server across back-to-back requests and closes it after idling", () => + withOpenCodeTextGeneration(DEFAULT_OPENCODE_SETTINGS, (textGeneration) => + Effect.gen(function* () { + yield* textGeneration.generateCommitMessage({ + cwd: process.cwd(), + branch: "feature/opencode-reuse", + stagedSummary: "M README.md", + stagedPatch: "diff --git a/README.md b/README.md", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }); + yield* textGeneration.generateCommitMessage({ + cwd: process.cwd(), + branch: "feature/opencode-reuse", + stagedSummary: "M README.md", + stagedPatch: "diff --git a/README.md b/README.md", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }); + + expect(runtimeMock.state.startCalls).toEqual(["fake-opencode"]); + expect(runtimeMock.state.promptUrls).toEqual([ + "http://127.0.0.1:4301", + "http://127.0.0.1:4301", + ]); + expect(runtimeMock.state.closeCalls).toEqual([]); + + yield* advanceIdleClock; + + expect(runtimeMock.state.closeCalls).toEqual(["http://127.0.0.1:4301"]); + }), + ).pipe(Effect.provide(TestClock.layer())), + ); + + it.effect("starts a new server after the warm server idles out", () => + withOpenCodeTextGeneration(DEFAULT_OPENCODE_SETTINGS, (textGeneration) => + Effect.gen(function* () { + yield* textGeneration.generateCommitMessage({ + cwd: process.cwd(), + branch: "feature/opencode-reuse", + stagedSummary: "M README.md", + stagedPatch: "diff --git a/README.md b/README.md", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }); + + yield* advanceIdleClock; + + yield* textGeneration.generateCommitMessage({ + cwd: process.cwd(), + branch: "feature/opencode-reuse", + stagedSummary: "M README.md", + stagedPatch: "diff --git a/README.md b/README.md", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }); + + expect(runtimeMock.state.startCalls).toEqual(["fake-opencode", "fake-opencode"]); + expect(runtimeMock.state.promptUrls).toEqual([ + "http://127.0.0.1:4301", + "http://127.0.0.1:4302", + ]); + expect(runtimeMock.state.closeCalls).toEqual(["http://127.0.0.1:4301"]); + }), + ).pipe(Effect.provide(TestClock.layer())), + ); + + it.effect("returns a typed empty-output error when OpenCode returns no text parts", () => + withOpenCodeTextGeneration(DEFAULT_OPENCODE_SETTINGS, (textGeneration) => + Effect.gen(function* () { + runtimeMock.state.promptResult = { data: {} }; + + const error = yield* textGeneration + .generateCommitMessage({ + cwd: process.cwd(), + branch: "feature/opencode-reuse", + stagedSummary: "M README.md", + stagedPatch: "diff --git a/README.md b/README.md", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }) + .pipe(Effect.flip); + + expect(error.message).toContain("OpenCode returned empty output."); + }), + ), + ); + + it.effect("parses JSON returned as plain text output", () => + withOpenCodeTextGeneration(DEFAULT_OPENCODE_SETTINGS, (textGeneration) => + Effect.gen(function* () { + runtimeMock.state.promptResult = { + data: { + parts: [ + { + type: "text", + text: 'Here is the result:\n{"subject":"Tighten OpenCode parsing","body":"Handle JSON text output locally."}', + }, + ], + }, + }; + + const result = yield* textGeneration.generateCommitMessage({ + cwd: process.cwd(), + branch: "feature/opencode-reuse", + stagedSummary: "M README.md", + stagedPatch: "diff --git a/README.md b/README.md", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }); + + expect(result).toEqual({ + subject: "Tighten OpenCode parsing", + body: "Handle JSON text output locally.", + }); + }), + ), + ); + + it.effect("surfaces the upstream OpenCode structured-output error message", () => + withOpenCodeTextGeneration(DEFAULT_OPENCODE_SETTINGS, (textGeneration) => + Effect.gen(function* () { + runtimeMock.state.promptResult = { + data: { + info: { + error: { + name: "StructuredOutputError", + data: { + message: "Model did not produce structured output", + retries: 2, + }, + }, + }, + }, + }; + + const error = yield* textGeneration + .generateCommitMessage({ + cwd: process.cwd(), + branch: "feature/opencode-reuse", + stagedSummary: "M README.md", + stagedPatch: "diff --git a/README.md b/README.md", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }) + .pipe(Effect.flip); + + expect(error.message).toContain("Model did not produce structured output"); + }), + ), + ); +}); + +it.layer(OpenCodeTextGenerationExistingServerTestLayer)( + "OpenCodeTextGeneration with configured server URL", + (it) => { + it.effect("reuses a configured OpenCode server URL without spawning or applying idle TTL", () => + withOpenCodeTextGeneration(EXISTING_SERVER_OPENCODE_SETTINGS, (textGeneration) => + Effect.gen(function* () { + yield* textGeneration.generateCommitMessage({ + cwd: process.cwd(), + branch: "feature/opencode-reuse", + stagedSummary: "M README.md", + stagedPatch: "diff --git a/README.md b/README.md", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }); + yield* textGeneration.generateCommitMessage({ + cwd: process.cwd(), + branch: "feature/opencode-reuse", + stagedSummary: "M README.md", + stagedPatch: "diff --git a/README.md b/README.md", + modelSelection: DEFAULT_TEST_MODEL_SELECTION, + }); + + expect(runtimeMock.state.startCalls).toEqual([]); + expect(runtimeMock.state.promptUrls).toEqual([ + "http://127.0.0.1:9999", + "http://127.0.0.1:9999", + ]); + expect(runtimeMock.state.authHeaders).toEqual([ + `Basic ${btoa("opencode:secret-password")}`, + `Basic ${btoa("opencode:secret-password")}`, + ]); + + yield* advanceIdleClock; + + expect(runtimeMock.state.closeCalls).toEqual([]); + }), + ).pipe(Effect.provide(TestClock.layer())), + ); + }, +); diff --git a/apps/server/src/git/Layers/OpenCodeTextGeneration.ts b/apps/server/src/textGeneration/OpenCodeTextGeneration.ts similarity index 84% rename from apps/server/src/git/Layers/OpenCodeTextGeneration.ts rename to apps/server/src/textGeneration/OpenCodeTextGeneration.ts index fd28188d600..d646e4f2e5a 100644 --- a/apps/server/src/git/Layers/OpenCodeTextGeneration.ts +++ b/apps/server/src/textGeneration/OpenCodeTextGeneration.ts @@ -1,29 +1,30 @@ -import { Effect, Exit, Fiber, Layer, Schema, Scope } from "effect"; +import { Effect, Exit, Fiber, Schema, Scope } from "effect"; import * as Semaphore from "effect/Semaphore"; import { TextGenerationError, type ChatAttachment, - type OpenCodeModelSelection, + type ModelSelection, + type OpenCodeSettings, } from "@t3tools/contracts"; import { sanitizeBranchFragment, sanitizeFeatureBranchName } from "@t3tools/shared/git"; +import { getModelSelectionStringOptionValue } from "@t3tools/shared/model"; -import { ServerConfig } from "../../config.ts"; -import { resolveAttachmentPath } from "../../attachmentStore.ts"; -import { ServerSettingsService } from "../../serverSettings.ts"; +import { ServerConfig } from "../config.ts"; +import { resolveAttachmentPath } from "../attachmentStore.ts"; import { buildBranchNamePrompt, buildCommitMessagePrompt, buildPrContentPrompt, buildThreadTitlePrompt, -} from "../Prompts.ts"; -import { type TextGenerationShape, TextGeneration } from "../Services/TextGeneration.ts"; +} from "./TextGenerationPrompts.ts"; +import { type TextGenerationShape } from "./TextGeneration.ts"; import { extractJsonObject, sanitizeCommitSubject, sanitizePrTitle, sanitizeThreadTitle, -} from "../Utils.ts"; +} from "./TextGenerationUtils.ts"; import { OpenCodeRuntime, type OpenCodeServerConnection, @@ -31,7 +32,7 @@ import { openCodeRuntimeErrorDetail, parseOpenCodeModelSlug, toOpenCodeFileParts, -} from "../../provider/opencodeRuntime.ts"; +} from "../provider/opencodeRuntime.ts"; const OPENCODE_TEXT_GENERATION_IDLE_TTL = "30 seconds"; @@ -92,9 +93,11 @@ interface SharedOpenCodeTextGenerationServerState { idleCloseFiber: Fiber.Fiber | null; } -const makeOpenCodeTextGeneration = Effect.gen(function* () { +export const makeOpenCodeTextGeneration = Effect.fn("makeOpenCodeTextGeneration")(function* ( + openCodeSettings: OpenCodeSettings, + environment: NodeJS.ProcessEnv = process.env, +) { const serverConfig = yield* ServerConfig; - const serverSettingsService = yield* ServerSettingsService; const openCodeRuntime = yield* OpenCodeRuntime; const idleFiberScope = yield* Effect.acquireRelease(Scope.make(), (scope) => Scope.close(scope, Exit.void), @@ -201,6 +204,7 @@ const makeOpenCodeTextGeneration = Effect.gen(function* () { openCodeRuntime .startOpenCodeServerProcess({ binaryPath: input.binaryPath, + environment, }) .pipe( Effect.provideService(Scope.Scope, serverScope), @@ -266,7 +270,7 @@ const makeOpenCodeTextGeneration = Effect.gen(function* () { readonly cwd: string; readonly prompt: string; readonly outputSchemaJson: S; - readonly modelSelection: OpenCodeModelSelection; + readonly modelSelection: ModelSelection; readonly attachments?: ReadonlyArray | undefined; }) { const parsedModel = parseOpenCodeModelSlug(input.modelSelection.model); @@ -277,26 +281,6 @@ const makeOpenCodeTextGeneration = Effect.gen(function* () { }); } - const settings = yield* serverSettingsService.getSettings.pipe( - Effect.map( - (value) => - value.providers?.opencode ?? { - enabled: true, - binaryPath: "opencode", - serverUrl: "", - serverPassword: "", - customModels: [], - }, - ), - Effect.orElseSucceed(() => ({ - enabled: true, - binaryPath: "opencode", - serverUrl: "", - serverPassword: "", - customModels: [], - })), - ); - const fileParts = toOpenCodeFileParts({ attachments: input.attachments, resolveAttachmentPath: (attachment) => @@ -309,8 +293,8 @@ const makeOpenCodeTextGeneration = Effect.gen(function* () { const client = openCodeRuntime.createOpenCodeSdkClient({ baseUrl: server.url, directory: input.cwd, - ...(settings.serverUrl.length > 0 && settings.serverPassword - ? { serverPassword: settings.serverPassword } + ...(openCodeSettings.serverUrl.length > 0 && openCodeSettings.serverPassword + ? { serverPassword: openCodeSettings.serverPassword } : {}), }); const session = await client.session.create({ @@ -320,16 +304,17 @@ const makeOpenCodeTextGeneration = Effect.gen(function* () { if (!session.data) { throw new Error("OpenCode session.create returned no session payload."); } + const selectedAgent = getModelSelectionStringOptionValue(input.modelSelection, "agent"); + const selectedVariant = getModelSelectionStringOptionValue( + input.modelSelection, + "variant", + ); const result = await client.session.prompt({ sessionID: session.data.id, model: parsedModel, - ...(input.modelSelection.options?.agent - ? { agent: input.modelSelection.options.agent } - : {}), - ...(input.modelSelection.options?.variant - ? { variant: input.modelSelection.options.variant } - : {}), + ...(selectedAgent ? { agent: selectedAgent } : {}), + ...(selectedVariant ? { variant: selectedVariant } : {}), parts: [{ type: "text", text: input.prompt }, ...fileParts], }); const info = result.data?.info; @@ -352,11 +337,11 @@ const makeOpenCodeTextGeneration = Effect.gen(function* () { }); const rawOutput = - settings.serverUrl.length > 0 - ? yield* runAgainstServer({ url: settings.serverUrl }) + openCodeSettings.serverUrl.length > 0 + ? yield* runAgainstServer({ url: openCodeSettings.serverUrl }) : yield* Effect.acquireUseRelease( acquireSharedServer({ - binaryPath: settings.binaryPath, + binaryPath: openCodeSettings.binaryPath, operation: input.operation, }), runAgainstServer, @@ -381,13 +366,6 @@ const makeOpenCodeTextGeneration = Effect.gen(function* () { const generateCommitMessage: TextGenerationShape["generateCommitMessage"] = Effect.fn( "OpenCodeTextGeneration.generateCommitMessage", )(function* (input) { - if (input.modelSelection.provider !== "opencode") { - return yield* new TextGenerationError({ - operation: "generateCommitMessage", - detail: "Invalid model selection.", - }); - } - const { prompt, outputSchema } = buildCommitMessagePrompt({ branch: input.branch, stagedSummary: input.stagedSummary, @@ -414,13 +392,6 @@ const makeOpenCodeTextGeneration = Effect.gen(function* () { const generatePrContent: TextGenerationShape["generatePrContent"] = Effect.fn( "OpenCodeTextGeneration.generatePrContent", )(function* (input) { - if (input.modelSelection.provider !== "opencode") { - return yield* new TextGenerationError({ - operation: "generatePrContent", - detail: "Invalid model selection.", - }); - } - const { prompt, outputSchema } = buildPrContentPrompt({ baseBranch: input.baseBranch, headBranch: input.headBranch, @@ -445,13 +416,6 @@ const makeOpenCodeTextGeneration = Effect.gen(function* () { const generateBranchName: TextGenerationShape["generateBranchName"] = Effect.fn( "OpenCodeTextGeneration.generateBranchName", )(function* (input) { - if (input.modelSelection.provider !== "opencode") { - return yield* new TextGenerationError({ - operation: "generateBranchName", - detail: "Invalid model selection.", - }); - } - const { prompt, outputSchema } = buildBranchNamePrompt({ message: input.message, attachments: input.attachments, @@ -473,13 +437,6 @@ const makeOpenCodeTextGeneration = Effect.gen(function* () { const generateThreadTitle: TextGenerationShape["generateThreadTitle"] = Effect.fn( "OpenCodeTextGeneration.generateThreadTitle", )(function* (input) { - if (input.modelSelection.provider !== "opencode") { - return yield* new TextGenerationError({ - operation: "generateThreadTitle", - detail: "Invalid model selection.", - }); - } - const { prompt, outputSchema } = buildThreadTitlePrompt({ message: input.message, attachments: input.attachments, @@ -505,5 +462,3 @@ const makeOpenCodeTextGeneration = Effect.gen(function* () { generateThreadTitle, } satisfies TextGenerationShape; }); - -export const OpenCodeTextGenerationLive = Layer.effect(TextGeneration, makeOpenCodeTextGeneration); diff --git a/apps/server/src/textGeneration/TextGeneration.test.ts b/apps/server/src/textGeneration/TextGeneration.test.ts new file mode 100644 index 00000000000..4f3d44f9258 --- /dev/null +++ b/apps/server/src/textGeneration/TextGeneration.test.ts @@ -0,0 +1,117 @@ +import { it } from "@effect/vitest"; +import { Effect, PubSub, Result, Stream } from "effect"; +import { describe, expect } from "vitest"; + +import { ProviderInstanceId } from "@t3tools/contracts"; +import { createModelSelection } from "@t3tools/shared/model"; + +import type { ProviderInstance } from "../provider/ProviderDriver.ts"; +import type { ProviderInstanceRegistryShape } from "../provider/Services/ProviderInstanceRegistry.ts"; +import type { TextGenerationShape } from "./TextGeneration.ts"; + +import { makeTextGenerationFromRegistry } from "./TextGeneration.ts"; + +const makeStubTextGeneration = (overrides: Partial): TextGenerationShape => ({ + generateCommitMessage: () => + Effect.die("generateCommitMessage stub not configured for this test"), + generatePrContent: () => Effect.die("generatePrContent stub not configured for this test"), + generateBranchName: () => Effect.die("generateBranchName stub not configured for this test"), + generateThreadTitle: () => Effect.die("generateThreadTitle stub not configured for this test"), + ...overrides, +}); + +const makeStubInstance = ( + instanceId: ProviderInstanceId, + textGeneration: TextGenerationShape, +): ProviderInstance => + ({ + instanceId, + driverKind: instanceId as unknown as ProviderInstance["driverKind"], + continuationIdentity: { + driverKind: instanceId as unknown as ProviderInstance["driverKind"], + continuationKey: `${instanceId}:test`, + }, + displayName: undefined, + enabled: true, + snapshot: {} as ProviderInstance["snapshot"], + adapter: {} as ProviderInstance["adapter"], + textGeneration, + }) satisfies ProviderInstance; + +const makeStubRegistry = ( + instances: ReadonlyArray, +): ProviderInstanceRegistryShape => { + const byId = new Map(instances.map((instance) => [instance.instanceId, instance] as const)); + return { + getInstance: (id) => Effect.succeed(byId.get(id)), + listInstances: Effect.succeed(instances), + listUnavailable: Effect.succeed([]), + streamChanges: Stream.empty, + // Tests never drive changes through this stub; acquire a throwaway + // subscription on an unused PubSub so the shape is satisfied. + subscribeChanges: Effect.flatMap(PubSub.unbounded(), (pubsub) => + PubSub.subscribe(pubsub), + ), + }; +}; + +describe("makeTextGenerationFromRegistry", () => { + it.effect("delegates to the matching instance's textGeneration closure", () => + Effect.gen(function* () { + const personalId = ProviderInstanceId.make("codex_personal"); + const personalCalls: string[] = []; + const personal = makeStubInstance( + personalId, + makeStubTextGeneration({ + generateBranchName: (input) => { + personalCalls.push(input.message); + return Effect.succeed({ branch: "personal-branch" }); + }, + }), + ); + + const workId = ProviderInstanceId.make("codex_work"); + const work = makeStubInstance( + workId, + makeStubTextGeneration({ + generateBranchName: () => Effect.succeed({ branch: "work-branch" }), + }), + ); + + const tg = makeTextGenerationFromRegistry(makeStubRegistry([personal, work])); + + const result = yield* tg.generateBranchName({ + cwd: process.cwd(), + message: "Refactor the routing layer", + modelSelection: createModelSelection(ProviderInstanceId.make("codex_personal"), "gpt-5"), + }); + + expect(result.branch).toBe("personal-branch"); + expect(personalCalls).toEqual(["Refactor the routing layer"]); + }), + ); + + it.effect("fails with TextGenerationError when the instance is unknown", () => + Effect.gen(function* () { + const tg = makeTextGenerationFromRegistry(makeStubRegistry([])); + + const result = yield* tg + .generateBranchName({ + cwd: process.cwd(), + message: "anything", + modelSelection: createModelSelection( + ProviderInstanceId.make("missing_instance"), + "gpt-5", + ), + }) + .pipe(Effect.result); + + expect(Result.isFailure(result)).toBe(true); + if (Result.isFailure(result)) { + expect(result.failure._tag).toBe("TextGenerationError"); + expect(result.failure.operation).toBe("generateBranchName"); + expect(result.failure.detail).toContain("missing_instance"); + } + }), + ); +}); diff --git a/apps/server/src/git/Services/TextGeneration.ts b/apps/server/src/textGeneration/TextGeneration.ts similarity index 57% rename from apps/server/src/git/Services/TextGeneration.ts rename to apps/server/src/textGeneration/TextGeneration.ts index c0356438a29..51796faf8a7 100644 --- a/apps/server/src/git/Services/TextGeneration.ts +++ b/apps/server/src/textGeneration/TextGeneration.ts @@ -1,18 +1,13 @@ -/** - * TextGeneration - Effect service contract for AI-generated Git content. - * - * Generates commit messages and pull request titles/bodies from repository - * context prepared by Git services. - * - * @module TextGeneration - */ -import { Context } from "effect"; -import type { Effect } from "effect"; -import type { ChatAttachment, ModelSelection, ProviderKind } from "@t3tools/contracts"; +import { Context, Effect, Layer } from "effect"; +import type { ChatAttachment, ModelSelection, ProviderInstanceId } from "@t3tools/contracts"; +import { TextGenerationError } from "@t3tools/contracts"; -import type { TextGenerationError } from "@t3tools/contracts"; +import { + ProviderInstanceRegistry, + type ProviderInstanceRegistryShape, +} from "../provider/Services/ProviderInstanceRegistry.ts"; +import type { ProviderInstance } from "../provider/ProviderDriver.ts"; -/** Providers that support git text generation (commit messages, PR content, branch names). */ export type TextGenerationProvider = "codex" | "claudeAgent" | "cursor" | "opencode"; export interface CommitMessageGenerationInput { @@ -20,8 +15,6 @@ export interface CommitMessageGenerationInput { branch: string | null; stagedSummary: string; stagedPatch: string; - provider?: ProviderKind | undefined; - model?: string | undefined; /** When true, the model also returns a semantic branch name for the change. */ includeBranch?: boolean; /** What model and provider to use for generation. */ @@ -42,8 +35,6 @@ export interface PrContentGenerationInput { commitSummary: string; diffSummary: string; diffPatch: string; - provider?: ProviderKind | undefined; - model?: string | undefined; /** What model and provider to use for generation. */ modelSelection: ModelSelection; } @@ -56,8 +47,6 @@ export interface PrContentGenerationResult { export interface BranchNameGenerationInput { cwd: string; message: string; - provider?: ProviderKind | undefined; - model?: string | undefined; attachments?: ReadonlyArray | undefined; /** What model and provider to use for generation. */ modelSelection: ModelSelection; @@ -125,5 +114,58 @@ export interface TextGenerationShape { * TextGeneration - Service tag for commit and PR text generation. */ export class TextGeneration extends Context.Service()( - "t3/git/Services/TextGeneration", + "t3/text-generation/TextGeneration", ) {} + +type TextGenerationOp = + | "generateCommitMessage" + | "generatePrContent" + | "generateBranchName" + | "generateThreadTitle"; + +const resolveInstance = ( + registry: ProviderInstanceRegistryShape, + operation: TextGenerationOp, + instanceId: ProviderInstanceId, +): Effect.Effect => + registry.getInstance(instanceId).pipe( + Effect.flatMap((instance) => + instance + ? Effect.succeed(instance.textGeneration) + : Effect.fail( + new TextGenerationError({ + operation, + detail: `No provider instance registered for id '${instanceId}'.`, + }), + ), + ), + ); + +export const makeTextGenerationFromRegistry = ( + registry: ProviderInstanceRegistryShape, +): TextGenerationShape => ({ + generateCommitMessage: (input) => + resolveInstance(registry, "generateCommitMessage", input.modelSelection.instanceId).pipe( + Effect.flatMap((textGeneration) => textGeneration.generateCommitMessage(input)), + ), + generatePrContent: (input) => + resolveInstance(registry, "generatePrContent", input.modelSelection.instanceId).pipe( + Effect.flatMap((textGeneration) => textGeneration.generatePrContent(input)), + ), + generateBranchName: (input) => + resolveInstance(registry, "generateBranchName", input.modelSelection.instanceId).pipe( + Effect.flatMap((textGeneration) => textGeneration.generateBranchName(input)), + ), + generateThreadTitle: (input) => + resolveInstance(registry, "generateThreadTitle", input.modelSelection.instanceId).pipe( + Effect.flatMap((textGeneration) => textGeneration.generateThreadTitle(input)), + ), +}); + +export const layer = Layer.effect( + TextGeneration, + Effect.gen(function* () { + const registry = yield* ProviderInstanceRegistry; + return makeTextGenerationFromRegistry(registry); + }), +); diff --git a/apps/server/src/textGeneration/TextGenerationPolicy.ts b/apps/server/src/textGeneration/TextGenerationPolicy.ts new file mode 100644 index 00000000000..b0e020fa4fc --- /dev/null +++ b/apps/server/src/textGeneration/TextGenerationPolicy.ts @@ -0,0 +1,19 @@ +import { Schema } from "effect"; + +export const TextGenerationPolicyKind = Schema.Literals([ + "default", + "conventional_commits", + "repo_conventions", + "custom", +]); +export type TextGenerationPolicyKind = typeof TextGenerationPolicyKind.Type; + +export const TextGenerationPolicy = Schema.Struct({ + kind: TextGenerationPolicyKind, + commitInstructions: Schema.optional(Schema.String), + changeRequestInstructions: Schema.optional(Schema.String), + branchInstructions: Schema.optional(Schema.String), + threadTitleInstructions: Schema.optional(Schema.String), + inferRepositoryConventions: Schema.Boolean, +}); +export type TextGenerationPolicy = typeof TextGenerationPolicy.Type; diff --git a/apps/server/src/textGeneration/TextGenerationPresets.ts b/apps/server/src/textGeneration/TextGenerationPresets.ts new file mode 100644 index 00000000000..70955742148 --- /dev/null +++ b/apps/server/src/textGeneration/TextGenerationPresets.ts @@ -0,0 +1,41 @@ +import type { TextGenerationPolicy, TextGenerationPolicyKind } from "./TextGenerationPolicy.ts"; + +export const defaultTextGenerationPolicy: TextGenerationPolicy = { + kind: "default", + inferRepositoryConventions: false, +}; + +export const conventionalCommitsTextGenerationPolicy: TextGenerationPolicy = { + kind: "conventional_commits", + commitInstructions: + "Use Conventional Commits when generating commit subjects. Prefer the narrowest accurate type and include a scope only when it is obvious from the diff.", + changeRequestInstructions: + "Keep the change request title concise. Do not force Conventional Commit syntax into the title unless the repository already uses it.", + inferRepositoryConventions: false, +}; + +export const repositoryConventionsTextGenerationPolicy: TextGenerationPolicy = { + kind: "repo_conventions", + commitInstructions: + "Follow the repository's established commit message style when examples are available.", + changeRequestInstructions: + "Follow the repository's established change request title and body style when examples are available.", + inferRepositoryConventions: true, +}; + +export const customTextGenerationPolicy = ( + overrides: Omit, "kind">, +): TextGenerationPolicy => ({ + kind: "custom", + inferRepositoryConventions: false, + ...overrides, +}); + +export const textGenerationPresets: Record< + Exclude, + TextGenerationPolicy +> = { + default: defaultTextGenerationPolicy, + conventional_commits: conventionalCommitsTextGenerationPolicy, + repo_conventions: repositoryConventionsTextGenerationPolicy, +}; diff --git a/apps/server/src/git/Prompts.test.ts b/apps/server/src/textGeneration/TextGenerationPrompts.test.ts similarity index 98% rename from apps/server/src/git/Prompts.test.ts rename to apps/server/src/textGeneration/TextGenerationPrompts.test.ts index d8d079c0cf3..25fed642270 100644 --- a/apps/server/src/git/Prompts.test.ts +++ b/apps/server/src/textGeneration/TextGenerationPrompts.test.ts @@ -5,8 +5,8 @@ import { buildCommitMessagePrompt, buildPrContentPrompt, buildThreadTitlePrompt, -} from "./Prompts.ts"; -import { normalizeCliError, sanitizeThreadTitle } from "./Utils.ts"; +} from "./TextGenerationPrompts.ts"; +import { normalizeCliError, sanitizeThreadTitle } from "./TextGenerationUtils.ts"; import { TextGenerationError } from "@t3tools/contracts"; describe("buildCommitMessagePrompt", () => { diff --git a/apps/server/src/git/Prompts.ts b/apps/server/src/textGeneration/TextGenerationPrompts.ts similarity index 87% rename from apps/server/src/git/Prompts.ts rename to apps/server/src/textGeneration/TextGenerationPrompts.ts index 4092358825c..43ae62047b9 100644 --- a/apps/server/src/git/Prompts.ts +++ b/apps/server/src/textGeneration/TextGenerationPrompts.ts @@ -9,7 +9,13 @@ import { Schema } from "effect"; import type { ChatAttachment } from "@t3tools/contracts"; -import { limitSection } from "./Utils.ts"; +import { limitSection } from "./TextGenerationUtils.ts"; +import type { TextGenerationPolicy } from "./TextGenerationPolicy.ts"; + +function policyInstruction(instruction: string | undefined): ReadonlyArray { + const trimmed = instruction?.trim(); + return trimmed ? ["", "Additional instructions:", limitSection(trimmed, 4_000)] : []; +} // --------------------------------------------------------------------------- // Commit message @@ -20,6 +26,7 @@ export interface CommitMessagePromptInput { stagedSummary: string; stagedPatch: string; includeBranch: boolean; + policy?: TextGenerationPolicy | undefined; } export function buildCommitMessagePrompt(input: CommitMessagePromptInput) { @@ -37,6 +44,7 @@ export function buildCommitMessagePrompt(input: CommitMessagePromptInput) { ? ["- branch must be a short semantic git branch fragment for this change"] : []), "- capture the primary user-visible or developer-visible change", + ...policyInstruction(input.policy?.commitInstructions), "", `Branch: ${input.branch ?? "(detached)"}`, "", @@ -77,6 +85,7 @@ export interface PrContentPromptInput { commitSummary: string; diffSummary: string; diffPatch: string; + policy?: TextGenerationPolicy | undefined; } export function buildPrContentPrompt(input: PrContentPromptInput) { @@ -88,6 +97,7 @@ export function buildPrContentPrompt(input: PrContentPromptInput) { "- body must be markdown and include headings '## Summary' and '## Testing'", "- under Summary, provide short bullet points", "- under Testing, include bullet points with concrete checks or 'Not run' where appropriate", + ...policyInstruction(input.policy?.changeRequestInstructions), "", `Base branch: ${input.baseBranch}`, `Head branch: ${input.headBranch}`, @@ -117,6 +127,7 @@ export function buildPrContentPrompt(input: PrContentPromptInput) { export interface BranchNamePromptInput { message: string; attachments?: ReadonlyArray | undefined; + policy?: TextGenerationPolicy | undefined; } interface PromptFromMessageInput { @@ -125,6 +136,7 @@ interface PromptFromMessageInput { rules: ReadonlyArray; message: string; attachments?: ReadonlyArray | undefined; + additionalInstructions?: string | undefined; } function buildPromptFromMessage(input: PromptFromMessageInput): string { @@ -140,6 +152,7 @@ function buildPromptFromMessage(input: PromptFromMessageInput): string { "", "User message:", limitSection(input.message, 8_000), + ...policyInstruction(input.additionalInstructions), ]; if (attachmentLines.length > 0) { promptSections.push( @@ -164,6 +177,7 @@ export function buildBranchNamePrompt(input: BranchNamePromptInput) { ], message: input.message, attachments: input.attachments, + additionalInstructions: input.policy?.branchInstructions, }); const outputSchema = Schema.Struct({ branch: Schema.String, @@ -179,6 +193,7 @@ export function buildBranchNamePrompt(input: BranchNamePromptInput) { export interface ThreadTitlePromptInput { message: string; attachments?: ReadonlyArray | undefined; + policy?: TextGenerationPolicy | undefined; } export function buildThreadTitlePrompt(input: ThreadTitlePromptInput) { @@ -193,6 +208,7 @@ export function buildThreadTitlePrompt(input: ThreadTitlePromptInput) { ], message: input.message, attachments: input.attachments, + additionalInstructions: input.policy?.threadTitleInstructions, }); const outputSchema = Schema.Struct({ title: Schema.String, diff --git a/apps/server/src/textGeneration/TextGenerationUtils.ts b/apps/server/src/textGeneration/TextGenerationUtils.ts new file mode 100644 index 00000000000..fcd8cc8b689 --- /dev/null +++ b/apps/server/src/textGeneration/TextGenerationUtils.ts @@ -0,0 +1,159 @@ +import { Schema } from "effect"; + +import { TextGenerationError } from "@t3tools/contracts"; + +/** Convert an Effect Schema to a flat JSON Schema object, inlining `$defs` when present. */ +export function toJsonSchemaObject(schema: Schema.Top): unknown { + const document = Schema.toJsonSchemaDocument(schema); + if (document.definitions && Object.keys(document.definitions).length > 0) { + return { ...document.schema, $defs: document.definitions }; + } + return document.schema; +} + +/** Truncate a text section to `maxChars`, appending a `[truncated]` marker when needed. */ +export function limitSection(value: string, maxChars: number): string { + if (value.length <= maxChars) return value; + const truncated = value.slice(0, maxChars); + return `${truncated}\n\n[truncated]`; +} + +export function extractJsonObject(raw: string): string { + const trimmed = raw.trim(); + if (trimmed.length === 0) { + return trimmed; + } + + const start = trimmed.indexOf("{"); + if (start < 0) { + return trimmed; + } + + let depth = 0; + let inString = false; + let escaping = false; + for (let index = start; index < trimmed.length; index += 1) { + const char = trimmed[index]; + if (inString) { + if (escaping) { + escaping = false; + } else if (char === "\\") { + escaping = true; + } else if (char === '"') { + inString = false; + } + continue; + } + + if (char === '"') { + inString = true; + continue; + } + + if (char === "{") { + depth += 1; + continue; + } + + if (char === "}") { + depth -= 1; + if (depth === 0) { + return trimmed.slice(start, index + 1); + } + } + } + + return trimmed.slice(start); +} + +/** Normalise a raw commit subject to imperative-mood, ≤72 chars, no trailing period. */ +export function sanitizeCommitSubject(raw: string): string { + const singleLine = raw.trim().split(/\r?\n/g)[0]?.trim() ?? ""; + const withoutTrailingPeriod = singleLine.replace(/[.]+$/g, "").trim(); + if (withoutTrailingPeriod.length === 0) { + return "Update project files"; + } + + if (withoutTrailingPeriod.length <= 72) { + return withoutTrailingPeriod; + } + return withoutTrailingPeriod.slice(0, 72).trimEnd(); +} + +/** Normalise a raw PR title to a single line with a sensible fallback. */ +export function sanitizePrTitle(raw: string): string { + const singleLine = raw.trim().split(/\r?\n/g)[0]?.trim() ?? ""; + if (singleLine.length > 0) { + return singleLine; + } + return "Update project changes"; +} + +/** Normalise a raw thread title to a compact single-line sidebar-safe label. */ +export function sanitizeThreadTitle(raw: string): string { + const normalized = raw + .trim() + .split(/\r?\n/g)[0] + ?.trim() + .replace(/^['"`]+|['"`]+$/g, "") + .trim() + .replace(/\s+/g, " "); + + if (!normalized || normalized.trim().length === 0) { + return "New thread"; + } + + if (normalized.length <= 50) { + return normalized; + } + + return `${normalized.slice(0, 47).trimEnd()}...`; +} + +/** CLI name to human-readable label, e.g. "codex" → "Codex CLI (`codex`)" */ +function cliLabel(cliName: string): string { + const capitalized = cliName.charAt(0).toUpperCase() + cliName.slice(1); + return `${capitalized} CLI (\`${cliName}\`)`; +} + +/** + * Normalize an unknown error from a CLI text generation process into a + * typed `TextGenerationError`. Parameterized by CLI name so both Codex + * and Claude (and future providers) can share the same logic. + */ +export function normalizeCliError( + cliName: string, + operation: string, + error: unknown, + fallback: string, +): TextGenerationError { + if (Schema.is(TextGenerationError)(error)) { + return error; + } + + if (error instanceof Error) { + const lower = error.message.toLowerCase(); + if ( + error.message.includes(`Command not found: ${cliName}`) || + lower.includes(`spawn ${cliName}`) || + lower.includes("enoent") + ) { + return new TextGenerationError({ + operation, + detail: `${cliLabel(cliName)} is required but not available on PATH.`, + cause: error, + }); + } + return new TextGenerationError({ + operation, + detail: `${fallback}: ${error.message}`, + cause: error, + }); + } + + return new TextGenerationError({ + operation, + detail: fallback, + cause: error, + }); +} diff --git a/apps/server/src/vcs/GitVcsDriver.test.ts b/apps/server/src/vcs/GitVcsDriver.test.ts new file mode 100644 index 00000000000..0e5ba5b82ec --- /dev/null +++ b/apps/server/src/vcs/GitVcsDriver.test.ts @@ -0,0 +1,99 @@ +import * as NodeServices from "@effect/platform-node/NodeServices"; +import { Effect, FileSystem, Layer, Path, PlatformError } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; +import { assert, it } from "@effect/vitest"; + +import { GitCommandError } from "@t3tools/contracts"; +import { ServerConfig } from "../config.ts"; +import * as GitVcsDriver from "./GitVcsDriver.ts"; +import * as VcsProcess from "./VcsProcess.ts"; +import { runVcsDriverContractSuite } from "./testing/VcsDriverContractHarness.ts"; + +const ServerConfigLayer = ServerConfig.layerTest(process.cwd(), { + prefix: "t3-git-vcs-contract-", +}); +const GitContractLayer = Layer.mergeAll(GitVcsDriver.vcsLayer, GitVcsDriver.layer).pipe( + Layer.provide(ServerConfigLayer), + Layer.provideMerge(VcsProcess.layer), + Layer.provideMerge(NodeServices.layer), +); + +const runGit = (cwd: string, args: ReadonlyArray) => + Effect.gen(function* () { + const driver = yield* GitVcsDriver.GitVcsDriver; + yield* driver.execute({ + operation: "GitVcsDriver.contract.git", + cwd, + args, + timeoutMs: 10_000, + }); + }); + +type GitContractError = GitCommandError | PlatformError.PlatformError; + +runVcsDriverContractSuite({ + name: "Git", + kind: "git", + layer: GitContractLayer, + fixture: { + createRepo: (cwd) => + Effect.gen(function* () { + yield* runGit(cwd, ["init"]); + yield* runGit(cwd, ["config", "user.email", "test@test.com"]); + yield* runGit(cwd, ["config", "user.name", "Test"]); + }), + writeFile: (cwd, relativePath, contents) => + Effect.gen(function* () { + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const absolutePath = path.join(cwd, relativePath); + yield* fileSystem.makeDirectory(path.dirname(absolutePath), { recursive: true }); + yield* fileSystem.writeFileString(absolutePath, contents); + }), + trackFile: (cwd, relativePath) => runGit(cwd, ["add", relativePath]), + commit: (cwd, message) => runGit(cwd, ["commit", "-m", message]), + ignorePath: (cwd, pattern) => + Effect.gen(function* () { + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + yield* fileSystem.writeFileString(path.join(cwd, ".gitignore"), `${pattern}\n`); + }), + }, +}); + +it.effect("GitVcsDriver forwards execute env to the VCS process", () => { + let observedEnv: NodeJS.ProcessEnv | undefined; + + return Effect.gen(function* () { + const driver = yield* GitVcsDriver.makeVcsDriverShape(); + + yield* driver.execute({ + operation: "GitVcsDriver.test.env", + cwd: "/repo", + args: ["status"], + env: { + GIT_INDEX_FILE: "/tmp/t3-index", + }, + }); + + assert.deepStrictEqual(observedEnv, { + GIT_INDEX_FILE: "/tmp/t3-index", + }); + }).pipe( + Effect.provide( + Layer.mock(VcsProcess.VcsProcess)({ + run: (input) => + Effect.sync(() => { + observedEnv = input.env; + return { + exitCode: ChildProcessSpawner.ExitCode(0), + stdout: "", + stderr: "", + stdoutTruncated: false, + stderrTruncated: false, + }; + }), + }), + ), + ); +}); diff --git a/apps/server/src/vcs/GitVcsDriver.ts b/apps/server/src/vcs/GitVcsDriver.ts new file mode 100644 index 00000000000..49a135aec4c --- /dev/null +++ b/apps/server/src/vcs/GitVcsDriver.ts @@ -0,0 +1,557 @@ +import { Context, DateTime, Effect, Layer, Option } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; + +import { + GitCommandError, + VcsProcessExitError, + type VcsSwitchRefInput, + type VcsSwitchRefResult, + type VcsCreateRefInput, + type VcsCreateRefResult, + type VcsCreateWorktreeInput, + type VcsCreateWorktreeResult, + type VcsInitInput, + type VcsListRefsInput, + type VcsListRefsResult, + type VcsPullResult, + type VcsRemoveWorktreeInput, + type VcsStatusInput, + type VcsStatusResult, +} from "@t3tools/contracts"; +import * as GitVcsDriverCore from "./GitVcsDriverCore.ts"; +import * as VcsDriver from "./VcsDriver.ts"; +import * as VcsProcess from "./VcsProcess.ts"; + +export interface ExecuteGitInput { + readonly operation: string; + readonly cwd: string; + readonly args: ReadonlyArray; + readonly stdin?: string; + readonly env?: NodeJS.ProcessEnv; + readonly allowNonZeroExit?: boolean; + readonly timeoutMs?: number; + readonly maxOutputBytes?: number; + readonly truncateOutputAtMaxBytes?: boolean; + readonly progress?: ExecuteGitProgress; +} + +export interface ExecuteGitResult { + readonly exitCode: ChildProcessSpawner.ExitCode; + readonly stdout: string; + readonly stderr: string; + readonly stdoutTruncated: boolean; + readonly stderrTruncated: boolean; +} + +export interface GitStatusDetails { + isRepo: boolean; + sourceControlProvider?: VcsStatusResult["sourceControlProvider"]; + hasOriginRemote: boolean; + isDefaultBranch: boolean; + branch: string | null; + upstreamRef: string | null; + hasWorkingTreeChanges: boolean; + workingTree: VcsStatusResult["workingTree"]; + hasUpstream: boolean; + aheadCount: number; + behindCount: number; + aheadOfDefaultCount: number; +} + +export interface GitPreparedCommitContext { + stagedSummary: string; + stagedPatch: string; +} + +export interface ExecuteGitProgress { + readonly onStdoutLine?: (line: string) => Effect.Effect; + readonly onStderrLine?: (line: string) => Effect.Effect; + readonly onHookStarted?: (hookName: string) => Effect.Effect; + readonly onHookFinished?: (input: { + hookName: string; + exitCode: number | null; + durationMs: number | null; + }) => Effect.Effect; +} + +export interface GitCommitProgress { + readonly onOutputLine?: (input: { + stream: "stdout" | "stderr"; + text: string; + }) => Effect.Effect; + readonly onHookStarted?: (hookName: string) => Effect.Effect; + readonly onHookFinished?: (input: { + hookName: string; + exitCode: number | null; + durationMs: number | null; + }) => Effect.Effect; +} + +export interface GitCommitOptions { + readonly timeoutMs?: number; + readonly progress?: GitCommitProgress; +} + +export interface GitPushResult { + status: "pushed" | "skipped_up_to_date"; + branch: string; + upstreamBranch?: string | undefined; + setUpstream?: boolean | undefined; +} + +export interface GitRangeContext { + commitSummary: string; + diffSummary: string; + diffPatch: string; +} + +export interface GitRenameBranchInput { + cwd: string; + oldBranch: string; + newBranch: string; +} + +export interface GitRenameBranchResult { + branch: string; +} + +export interface GitFetchPullRequestBranchInput { + cwd: string; + prNumber: number; + branch: string; +} + +export interface GitEnsureRemoteInput { + cwd: string; + preferredName: string; + url: string; +} + +export interface GitFetchRemoteBranchInput { + cwd: string; + remoteName: string; + remoteBranch: string; + localBranch: string; +} + +export interface GitFetchRemoteTrackingBranchInput { + cwd: string; + remoteName: string; + remoteBranch: string; +} + +export interface GitSetBranchUpstreamInput { + cwd: string; + branch: string; + remoteName: string; + remoteBranch: string; +} + +export interface GitVcsDriverShape { + readonly execute: (input: ExecuteGitInput) => Effect.Effect; + readonly status: (input: VcsStatusInput) => Effect.Effect; + readonly statusDetails: (cwd: string) => Effect.Effect; + readonly statusDetailsLocal: (cwd: string) => Effect.Effect; + readonly prepareCommitContext: ( + cwd: string, + filePaths?: readonly string[], + ) => Effect.Effect; + readonly commit: ( + cwd: string, + subject: string, + body: string, + options?: GitCommitOptions, + ) => Effect.Effect<{ commitSha: string }, GitCommandError>; + readonly pushCurrentBranch: ( + cwd: string, + fallbackBranch: string | null, + options?: { readonly remoteName?: string | null }, + ) => Effect.Effect; + readonly readRangeContext: ( + cwd: string, + baseRef: string, + ) => Effect.Effect; + readonly readConfigValue: ( + cwd: string, + key: string, + ) => Effect.Effect; + readonly listRefs: (input: VcsListRefsInput) => Effect.Effect; + readonly pullCurrentBranch: (cwd: string) => Effect.Effect; + readonly createWorktree: ( + input: VcsCreateWorktreeInput, + ) => Effect.Effect; + readonly fetchPullRequestBranch: ( + input: GitFetchPullRequestBranchInput, + ) => Effect.Effect; + readonly ensureRemote: (input: GitEnsureRemoteInput) => Effect.Effect; + readonly resolvePrimaryRemoteName: (cwd: string) => Effect.Effect; + readonly fetchRemoteBranch: ( + input: GitFetchRemoteBranchInput, + ) => Effect.Effect; + readonly fetchRemoteTrackingBranch: ( + input: GitFetchRemoteTrackingBranchInput, + ) => Effect.Effect; + readonly setBranchUpstream: ( + input: GitSetBranchUpstreamInput, + ) => Effect.Effect; + readonly removeWorktree: (input: VcsRemoveWorktreeInput) => Effect.Effect; + readonly renameBranch: ( + input: GitRenameBranchInput, + ) => Effect.Effect; + readonly createRef: ( + input: VcsCreateRefInput, + ) => Effect.Effect; + readonly switchRef: ( + input: VcsSwitchRefInput, + ) => Effect.Effect; + readonly initRepo: (input: VcsInitInput) => Effect.Effect; + readonly listLocalBranchNames: (cwd: string) => Effect.Effect; +} + +export class GitVcsDriver extends Context.Service()( + "t3/vcs/GitVcsDriver", +) {} + +const WORKSPACE_FILES_MAX_OUTPUT_BYTES = 16 * 1024 * 1024; +const GIT_CHECK_IGNORE_MAX_STDIN_BYTES = 256 * 1024; +const WORKSPACE_GIT_HARDENED_CONFIG_ARGS = [ + "-c", + "core.fsmonitor=false", + "-c", + "core.untrackedCache=false", +] as const; + +const nowFreshness = Effect.fn("GitVcsDriver.nowFreshness")(function* () { + const now = yield* DateTime.now; + return { + source: "live-local" as const, + observedAt: now, + expiresAt: Option.none(), + }; +}); + +function splitNullSeparatedPaths(input: string, truncated: boolean): string[] { + const parts = input.split("\0"); + if (parts.length === 0) return []; + + if (truncated && parts[parts.length - 1]?.length) { + parts.pop(); + } + + return parts.filter((value) => value.length > 0); +} + +function chunkPathsForGitCheckIgnore(relativePaths: ReadonlyArray): string[][] { + const chunks: string[][] = []; + let chunk: string[] = []; + let chunkBytes = 0; + + for (const relativePath of relativePaths) { + const relativePathBytes = Buffer.byteLength(relativePath) + 1; + if (chunk.length > 0 && chunkBytes + relativePathBytes > GIT_CHECK_IGNORE_MAX_STDIN_BYTES) { + chunks.push(chunk); + chunk = []; + chunkBytes = 0; + } + + chunk.push(relativePath); + chunkBytes += relativePathBytes; + + if (chunkBytes >= GIT_CHECK_IGNORE_MAX_STDIN_BYTES) { + chunks.push(chunk); + chunk = []; + chunkBytes = 0; + } + } + + if (chunk.length > 0) { + chunks.push(chunk); + } + + return chunks; +} + +function parseGitRemoteVerboseOutput( + output: string, +): Map { + const remotes = new Map(); + for (const line of output.split("\n")) { + const trimmed = line.trim(); + if (trimmed.length === 0) { + continue; + } + + const match = /^(\S+)\s+(\S+)\s+\((fetch|push)\)$/.exec(trimmed); + if (!match) { + continue; + } + + const name = match[1]; + const url = match[2]; + const direction = match[3]; + if (!name || !url || !direction) { + continue; + } + const remote = remotes.get(name) ?? {}; + if (direction === "fetch") { + remote.url = url; + } else { + remote.pushUrl = url; + } + remotes.set(name, remote); + } + return remotes; +} + +const gitCommand = ( + process: VcsProcess.VcsProcessShape, + operation: string, + cwd: string, + args: ReadonlyArray, + options?: { + readonly stdin?: string; + readonly env?: NodeJS.ProcessEnv; + readonly allowNonZeroExit?: boolean; + readonly timeoutMs?: number; + readonly maxOutputBytes?: number; + readonly truncateOutputAtMaxBytes?: boolean; + }, +) => + process.run({ + operation, + command: "git", + args, + cwd, + ...(options?.stdin !== undefined ? { stdin: options.stdin } : {}), + ...(options?.env !== undefined ? { env: options.env } : {}), + ...(options?.allowNonZeroExit !== undefined + ? { allowNonZeroExit: options.allowNonZeroExit } + : {}), + ...(options?.timeoutMs !== undefined ? { timeoutMs: options.timeoutMs } : {}), + ...(options?.maxOutputBytes !== undefined ? { maxOutputBytes: options.maxOutputBytes } : {}), + ...(options?.truncateOutputAtMaxBytes !== undefined + ? { truncateOutputAtMaxBytes: options.truncateOutputAtMaxBytes } + : {}), + }); + +export const makeVcsDriverShape = Effect.fn("makeGitVcsDriverShape")(function* () { + const process = yield* VcsProcess.VcsProcess; + const capabilities = { + kind: "git" as const, + supportsWorktrees: true, + supportsBookmarks: false, + supportsAtomicSnapshot: false, + supportsPushDefaultRemote: true, + ignoreClassifier: "native" as const, + }; + + const isInsideWorkTree: VcsDriver.VcsDriverShape["isInsideWorkTree"] = (cwd) => + gitCommand( + process, + "GitVcsDriver.isInsideWorkTree", + cwd, + ["rev-parse", "--is-inside-work-tree"], + { + allowNonZeroExit: true, + timeoutMs: 5_000, + maxOutputBytes: 4_096, + }, + ).pipe(Effect.map((result) => result.exitCode === 0 && result.stdout.trim() === "true")); + + const execute: VcsDriver.VcsDriverShape["execute"] = (input) => + gitCommand(process, input.operation, input.cwd, input.args, { + ...(input.stdin !== undefined ? { stdin: input.stdin } : {}), + ...(input.env !== undefined ? { env: input.env } : {}), + ...(input.allowNonZeroExit !== undefined ? { allowNonZeroExit: input.allowNonZeroExit } : {}), + ...(input.timeoutMs !== undefined ? { timeoutMs: input.timeoutMs } : {}), + ...(input.maxOutputBytes !== undefined ? { maxOutputBytes: input.maxOutputBytes } : {}), + ...(input.truncateOutputAtMaxBytes !== undefined + ? { truncateOutputAtMaxBytes: input.truncateOutputAtMaxBytes } + : {}), + }); + + const detectRepository: VcsDriver.VcsDriverShape["detectRepository"] = Effect.fn( + "detectRepository", + )(function* (cwd) { + if (!(yield* isInsideWorkTree(cwd))) { + return null; + } + + const root = yield* gitCommand(process, "GitVcsDriver.detectRepository.root", cwd, [ + "rev-parse", + "--show-toplevel", + ]); + const gitCommonDir = yield* gitCommand( + process, + "GitVcsDriver.detectRepository.commonDir", + cwd, + ["rev-parse", "--git-common-dir"], + ).pipe(Effect.catch(() => Effect.succeed(null))); + + return { + kind: "git" as const, + rootPath: root.stdout.trim(), + metadataPath: gitCommonDir?.stdout.trim() || null, + freshness: yield* nowFreshness(), + }; + }); + + const listWorkspaceFiles: VcsDriver.VcsDriverShape["listWorkspaceFiles"] = (cwd) => + gitCommand( + process, + "GitVcsDriver.listWorkspaceFiles", + cwd, + [ + ...WORKSPACE_GIT_HARDENED_CONFIG_ARGS, + "ls-files", + "--cached", + "--others", + "--exclude-standard", + "-z", + ], + { + allowNonZeroExit: true, + timeoutMs: 20_000, + maxOutputBytes: WORKSPACE_FILES_MAX_OUTPUT_BYTES, + truncateOutputAtMaxBytes: true, + }, + ).pipe( + Effect.flatMap((result) => + result.exitCode === 0 + ? Effect.gen(function* () { + const freshness = yield* nowFreshness(); + return { + paths: splitNullSeparatedPaths(result.stdout, result.stdoutTruncated), + truncated: result.stdoutTruncated, + freshness, + }; + }) + : Effect.fail( + new VcsProcessExitError({ + operation: "GitVcsDriver.listWorkspaceFiles", + command: "git ls-files", + cwd, + exitCode: result.exitCode, + detail: result.stderr.trim() || "git ls-files failed", + }), + ), + ), + ); + + const listRemotes: VcsDriver.VcsDriverShape["listRemotes"] = Effect.fn("listRemotes")( + function* (cwd) { + const result = yield* gitCommand(process, "GitVcsDriver.listRemotes", cwd, ["remote", "-v"], { + allowNonZeroExit: true, + timeoutMs: 5_000, + maxOutputBytes: 64 * 1024, + }); + + if (result.exitCode !== 0) { + return yield* new VcsProcessExitError({ + operation: "GitVcsDriver.listRemotes", + command: "git remote -v", + cwd, + exitCode: result.exitCode, + detail: result.stderr.trim() || "git remote -v failed", + }); + } + + const parsed = parseGitRemoteVerboseOutput(result.stdout); + const remotes = Array.from(parsed.entries()).flatMap(([name, remote]) => { + if (!remote.url) { + return []; + } + return [ + { + name, + url: remote.url, + pushUrl: remote.pushUrl ? Option.some(remote.pushUrl) : Option.none(), + isPrimary: name === "origin", + }, + ]; + }); + + return { + remotes, + freshness: yield* nowFreshness(), + }; + }, + ); + + const filterIgnoredPaths: VcsDriver.VcsDriverShape["filterIgnoredPaths"] = Effect.fn( + "filterIgnoredPaths", + )(function* (cwd, relativePaths) { + if (relativePaths.length === 0) { + return relativePaths; + } + + const ignoredPaths = new Set(); + const chunks = chunkPathsForGitCheckIgnore(relativePaths); + + for (const chunk of chunks) { + const result = yield* gitCommand( + process, + "GitVcsDriver.filterIgnoredPaths", + cwd, + [...WORKSPACE_GIT_HARDENED_CONFIG_ARGS, "check-ignore", "--no-index", "-z", "--stdin"], + { + stdin: `${chunk.join("\0")}\0`, + allowNonZeroExit: true, + timeoutMs: 20_000, + maxOutputBytes: WORKSPACE_FILES_MAX_OUTPUT_BYTES, + truncateOutputAtMaxBytes: true, + }, + ); + + if (result.exitCode !== 0 && result.exitCode !== 1) { + return yield* new VcsProcessExitError({ + operation: "GitVcsDriver.filterIgnoredPaths", + command: "git check-ignore", + cwd, + exitCode: result.exitCode, + detail: result.stderr.trim() || "git check-ignore failed", + }); + } + + for (const ignoredPath of splitNullSeparatedPaths(result.stdout, result.stdoutTruncated)) { + ignoredPaths.add(ignoredPath); + } + } + + if (ignoredPaths.size === 0) { + return relativePaths; + } + + return relativePaths.filter((relativePath) => !ignoredPaths.has(relativePath)); + }); + + const initRepository: VcsDriver.VcsDriverShape["initRepository"] = (input) => + gitCommand(process, "GitVcsDriver.initRepository", input.cwd, ["init"], { + timeoutMs: 10_000, + maxOutputBytes: 64 * 1024, + }).pipe(Effect.asVoid); + + return VcsDriver.VcsDriver.of({ + capabilities, + execute, + detectRepository, + isInsideWorkTree, + listWorkspaceFiles, + listRemotes, + filterIgnoredPaths, + initRepository, + }); +}); + +export const makeVcsDriver = Effect.fn("makeGitVcsDriver")(function* () { + const driver = yield* makeVcsDriverShape(); + return VcsDriver.VcsDriver.of(driver); +}); + +export const make = Effect.fn("makeGitVcsDriverService")(function* () { + const git = yield* GitVcsDriverCore.makeGitVcsDriverCore(); + return GitVcsDriver.of(git); +}); + +export const vcsLayer = Layer.effect(VcsDriver.VcsDriver, makeVcsDriver()); +export const layer = Layer.effect(GitVcsDriver, make()); diff --git a/apps/server/src/vcs/GitVcsDriverCore.test.ts b/apps/server/src/vcs/GitVcsDriverCore.test.ts new file mode 100644 index 00000000000..0daf9ab9564 --- /dev/null +++ b/apps/server/src/vcs/GitVcsDriverCore.test.ts @@ -0,0 +1,365 @@ +import * as NodeServices from "@effect/platform-node/NodeServices"; +import { assert, it, describe } from "@effect/vitest"; +import { Effect, FileSystem, Layer, Path, PlatformError, Scope } from "effect"; + +import { GitCommandError } from "@t3tools/contracts"; +import { ServerConfig } from "../config.ts"; +import * as GitVcsDriver from "./GitVcsDriver.ts"; + +const ServerConfigLayer = ServerConfig.layerTest(process.cwd(), { + prefix: "t3-git-vcs-driver-test-", +}); +const TestLayer = GitVcsDriver.layer.pipe( + Layer.provide(ServerConfigLayer), + Layer.provideMerge(NodeServices.layer), +); + +const makeTmpDir = ( + prefix = "git-vcs-driver-test-", +): Effect.Effect => + Effect.gen(function* () { + const fileSystem = yield* FileSystem.FileSystem; + return yield* fileSystem.makeTempDirectoryScoped({ prefix }); + }); + +const writeTextFile = ( + cwd: string, + relativePath: string, + contents: string, +): Effect.Effect => + Effect.gen(function* () { + const fileSystem = yield* FileSystem.FileSystem; + const pathService = yield* Path.Path; + const filePath = pathService.join(cwd, relativePath); + yield* fileSystem.makeDirectory(pathService.dirname(filePath), { recursive: true }); + yield* fileSystem.writeFileString(filePath, contents); + }); + +const git = ( + cwd: string, + args: ReadonlyArray, + env?: NodeJS.ProcessEnv, +): Effect.Effect => + Effect.gen(function* () { + const driver = yield* GitVcsDriver.GitVcsDriver; + const result = yield* driver.execute({ + operation: "GitVcsDriver.test.git", + cwd, + args, + ...(env ? { env } : {}), + timeoutMs: 10_000, + }); + return result.stdout.trim(); + }); + +const initRepoWithCommit = ( + cwd: string, +): Effect.Effect< + { readonly initialBranch: string }, + GitCommandError | PlatformError.PlatformError, + GitVcsDriver.GitVcsDriver | FileSystem.FileSystem | Path.Path +> => + Effect.gen(function* () { + const driver = yield* GitVcsDriver.GitVcsDriver; + yield* driver.initRepo({ cwd }); + yield* git(cwd, ["config", "user.email", "test@test.com"]); + yield* git(cwd, ["config", "user.name", "Test"]); + yield* writeTextFile(cwd, "README.md", "# test\n"); + yield* git(cwd, ["add", "."]); + yield* git(cwd, ["commit", "-m", "initial commit"]); + const initialBranch = yield* git(cwd, ["branch", "--show-current"]); + return { initialBranch }; + }); + +it.layer(TestLayer)("GitVcsDriver core integration", (it) => { + describe("repository status", () => { + it.effect("reports non-repository directories without failing", () => + Effect.gen(function* () { + const cwd = yield* makeTmpDir(); + const driver = yield* GitVcsDriver.GitVcsDriver; + + const refs = yield* driver.listRefs({ cwd }); + assert.equal(refs.isRepo, false); + assert.deepStrictEqual(refs.refs, []); + }), + ); + + it.effect("reports refName and dirty state for a repository", () => + Effect.gen(function* () { + const cwd = yield* makeTmpDir(); + const { initialBranch } = yield* initRepoWithCommit(cwd); + yield* writeTextFile(cwd, "feature.ts", "export const value = 1;\n"); + + const status = yield* (yield* GitVcsDriver.GitVcsDriver).statusDetails(cwd); + + assert.equal(status.isRepo, true); + assert.equal(status.branch, initialBranch); + assert.equal(status.hasWorkingTreeChanges, true); + assert.include( + status.workingTree.files.map((file) => file.path), + "feature.ts", + ); + }), + ); + + it.effect("reports default-branch delta separately from upstream delta", () => + Effect.gen(function* () { + const cwd = yield* makeTmpDir(); + const remote = yield* makeTmpDir("git-vcs-driver-remote-"); + const { initialBranch } = yield* initRepoWithCommit(cwd); + yield* git(remote, ["init", "--bare"]); + yield* git(cwd, ["remote", "add", "origin", remote]); + yield* git(cwd, ["push", "-u", "origin", initialBranch]); + yield* git(cwd, ["checkout", "-b", "feature/synced"]); + yield* writeTextFile(cwd, "feature.txt", "feature\n"); + yield* git(cwd, ["add", "feature.txt"]); + yield* git(cwd, ["commit", "-m", "feature commit"]); + yield* git(cwd, ["push", "-u", "origin", "feature/synced"]); + + const status = yield* (yield* GitVcsDriver.GitVcsDriver).statusDetails(cwd); + + assert.equal(status.hasUpstream, true); + assert.equal(status.aheadCount, 0); + assert.equal(status.behindCount, 0); + assert.equal(status.aheadOfDefaultCount, 1); + }), + ); + + it.effect("reuses the no-upstream fallback ahead count for default-branch delta", () => + Effect.gen(function* () { + const cwd = yield* makeTmpDir(); + const remote = yield* makeTmpDir("git-vcs-driver-remote-"); + const { initialBranch } = yield* initRepoWithCommit(cwd); + yield* git(remote, ["init", "--bare"]); + yield* git(cwd, ["remote", "add", "origin", remote]); + yield* git(cwd, ["push", "-u", "origin", initialBranch]); + yield* git(cwd, ["checkout", "-b", "feature/no-upstream"]); + yield* writeTextFile(cwd, "feature.txt", "feature\n"); + yield* git(cwd, ["add", "feature.txt"]); + yield* git(cwd, ["commit", "-m", "feature commit"]); + + const status = yield* (yield* GitVcsDriver.GitVcsDriver).statusDetails(cwd); + + assert.equal(status.hasUpstream, false); + assert.equal(status.aheadCount, 1); + assert.equal(status.behindCount, 0); + assert.equal(status.aheadOfDefaultCount, 1); + }), + ); + }); + + describe("refName operations", () => { + it.effect("creates, checks out, renames, and lists refs", () => + Effect.gen(function* () { + const cwd = yield* makeTmpDir(); + yield* initRepoWithCommit(cwd); + const driver = yield* GitVcsDriver.GitVcsDriver; + + yield* driver.createRef({ cwd, refName: "feature/original" }); + const switchRef = yield* driver.switchRef({ cwd, refName: "feature/original" }); + assert.equal(switchRef.refName, "feature/original"); + + const renamed = yield* driver.renameBranch({ + cwd, + oldBranch: "feature/original", + newBranch: "feature/renamed", + }); + assert.equal(renamed.branch, "feature/renamed"); + assert.equal(yield* git(cwd, ["branch", "--show-current"]), "feature/renamed"); + + const refs = yield* driver.listRefs({ cwd }); + assert.equal( + refs.refs.find((refName) => refName.name === "feature/renamed")?.current, + true, + ); + }), + ); + + it.effect("returns the existing refName when rename source and target match", () => + Effect.gen(function* () { + const cwd = yield* makeTmpDir(); + yield* initRepoWithCommit(cwd); + const driver = yield* GitVcsDriver.GitVcsDriver; + + const current = yield* git(cwd, ["branch", "--show-current"]); + const result = yield* driver.renameBranch({ + cwd, + oldBranch: current, + newBranch: current, + }); + + assert.equal(result.branch, current); + }), + ); + }); + + describe("worktree operations", () => { + it.effect("creates and removes a worktree for a new refName", () => + Effect.gen(function* () { + const cwd = yield* makeTmpDir(); + const { initialBranch } = yield* initRepoWithCommit(cwd); + const pathService = yield* Path.Path; + const worktreePath = pathService.join( + yield* makeTmpDir("git-worktrees-"), + "feature-worktree", + ); + const driver = yield* GitVcsDriver.GitVcsDriver; + + const created = yield* driver.createWorktree({ + cwd, + path: worktreePath, + refName: initialBranch, + newRefName: "feature/worktree", + }); + + assert.equal(created.worktree.path, worktreePath); + assert.equal(created.worktree.refName, "feature/worktree"); + assert.equal(yield* git(worktreePath, ["branch", "--show-current"]), "feature/worktree"); + + yield* driver.removeWorktree({ cwd, path: worktreePath }); + const fileSystem = yield* FileSystem.FileSystem; + assert.equal(yield* fileSystem.exists(worktreePath), false); + }), + ); + }); + + describe("commit context", () => { + it.effect("stages selected files and commits only those files", () => + Effect.gen(function* () { + const cwd = yield* makeTmpDir(); + yield* initRepoWithCommit(cwd); + const driver = yield* GitVcsDriver.GitVcsDriver; + + yield* writeTextFile(cwd, "a.txt", "a\n"); + yield* writeTextFile(cwd, "b.txt", "b\n"); + + const context = yield* driver.prepareCommitContext(cwd, ["a.txt"]); + assert.include(context?.stagedSummary ?? "", "a.txt"); + assert.notInclude(context?.stagedSummary ?? "", "b.txt"); + + const commit = yield* driver.commit(cwd, "Add a", ""); + assert.match(commit.commitSha, /^[a-f0-9]{40}$/); + assert.equal(yield* git(cwd, ["log", "-1", "--pretty=%s"]), "Add a"); + + const status = yield* git(cwd, ["status", "--porcelain"]); + assert.include(status, "?? b.txt"); + assert.notInclude(status, "a.txt"); + }), + ); + }); + + describe("remote operations", () => { + it.effect("pushes with upstream setup and skips when already up to date", () => + Effect.gen(function* () { + const cwd = yield* makeTmpDir(); + const remote = yield* makeTmpDir("git-remote-"); + yield* initRepoWithCommit(cwd); + yield* git(remote, ["init", "--bare"]); + yield* git(cwd, ["remote", "add", "origin", remote]); + yield* (yield* GitVcsDriver.GitVcsDriver).createRef({ + cwd, + refName: "feature/push", + }); + yield* (yield* GitVcsDriver.GitVcsDriver).switchRef({ + cwd, + refName: "feature/push", + }); + yield* writeTextFile(cwd, "feature.txt", "feature\n"); + yield* (yield* GitVcsDriver.GitVcsDriver).prepareCommitContext(cwd); + yield* (yield* GitVcsDriver.GitVcsDriver).commit(cwd, "Add feature", ""); + + const pushed = yield* (yield* GitVcsDriver.GitVcsDriver).pushCurrentBranch(cwd, null); + assert.deepInclude(pushed, { + status: "pushed", + branch: "feature/push", + setUpstream: true, + }); + assert.equal( + yield* git(cwd, ["rev-parse", "--abbrev-ref", "@{upstream}"]), + "origin/feature/push", + ); + + const skipped = yield* (yield* GitVcsDriver.GitVcsDriver).pushCurrentBranch(cwd, null); + assert.deepInclude(skipped, { + status: "skipped_up_to_date", + branch: "feature/push", + }); + }), + ); + + it.effect( + "pushes upstream branches to the remote branch name, not the upstream shorthand", + () => + Effect.gen(function* () { + const cwd = yield* makeTmpDir(); + const remote = yield* makeTmpDir("git-remote-"); + yield* initRepoWithCommit(cwd); + const driver = yield* GitVcsDriver.GitVcsDriver; + yield* git(cwd, ["branch", "-M", "main"]); + yield* git(remote, ["init", "--bare"]); + yield* git(cwd, ["remote", "add", "origin", remote]); + yield* git(cwd, ["push", "-u", "origin", "main"]); + yield* writeTextFile(cwd, "upstream.txt", "upstream\n"); + yield* driver.prepareCommitContext(cwd); + yield* driver.commit(cwd, "Add upstream update", ""); + + const pushed = yield* driver.pushCurrentBranch(cwd, null); + + assert.deepInclude(pushed, { + status: "pushed", + branch: "main", + upstreamBranch: "origin/main", + setUpstream: false, + }); + assert.equal( + yield* git(remote, ["log", "-1", "--pretty=%s", "main"]), + "Add upstream update", + ); + const badBranch = yield* driver.execute({ + operation: "GitVcsDriver.test.showBadRemoteBranch", + cwd: remote, + args: ["show-ref", "--verify", "--quiet", "refs/heads/origin/main"], + allowNonZeroExit: true, + timeoutMs: 10_000, + }); + assert.notEqual(badBranch.exitCode, 0); + }), + ); + + it.effect("pushes to the requested remote instead of the primary remote", () => + Effect.gen(function* () { + const cwd = yield* makeTmpDir(); + const originRemote = yield* makeTmpDir("git-origin-remote-"); + const publishRemote = yield* makeTmpDir("git-publish-remote-"); + yield* initRepoWithCommit(cwd); + const driver = yield* GitVcsDriver.GitVcsDriver; + yield* git(cwd, ["branch", "-M", "main"]); + yield* git(originRemote, ["init", "--bare"]); + yield* git(publishRemote, ["init", "--bare"]); + yield* git(cwd, ["remote", "add", "origin", originRemote]); + yield* git(cwd, ["remote", "add", "origin-1", publishRemote]); + + const pushed = yield* driver.pushCurrentBranch(cwd, null, { remoteName: "origin-1" }); + + assert.deepInclude(pushed, { + status: "pushed", + branch: "main", + upstreamBranch: "origin-1/main", + setUpstream: true, + }); + assert.equal( + yield* git(publishRemote, ["log", "-1", "--pretty=%s", "main"]), + "initial commit", + ); + const originMain = yield* driver.execute({ + operation: "GitVcsDriver.test.originMainMissing", + cwd: originRemote, + args: ["show-ref", "--verify", "--quiet", "refs/heads/main"], + allowNonZeroExit: true, + timeoutMs: 10_000, + }); + assert.notEqual(originMain.exitCode, 0); + }), + ); + }); +}); diff --git a/apps/server/src/git/Layers/GitCore.ts b/apps/server/src/vcs/GitVcsDriverCore.ts similarity index 59% rename from apps/server/src/git/Layers/GitCore.ts rename to apps/server/src/vcs/GitVcsDriverCore.ts index 660d1092b40..4425285164d 100644 --- a/apps/server/src/git/Layers/GitCore.ts +++ b/apps/server/src/vcs/GitVcsDriverCore.ts @@ -1,11 +1,11 @@ import { Cache, Data, + DateTime, Duration, Effect, Exit, FileSystem, - Layer, Option, Path, PlatformError, @@ -19,25 +19,17 @@ import { import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; import nodePath from "node:path"; -import { GitCommandError, type GitBranch } from "@t3tools/contracts"; +import { GitCommandError, type VcsRef } from "@t3tools/contracts"; import { dedupeRemoteBranchesWithLocalMatches } from "@t3tools/shared/git"; -import { compactTraceAttributes } from "../../observability/Attributes.ts"; -import { gitCommandDuration, gitCommandsTotal, withMetrics } from "../../observability/Metrics.ts"; -import { - GitCore, - type ExecuteGitProgress, - type GitCommitOptions, - type GitCoreShape, - type GitStatusDetails, - type ExecuteGitInput, - type ExecuteGitResult, -} from "../Services/GitCore.ts"; +import { compactTraceAttributes } from "../observability/Attributes.ts"; +import { gitCommandDuration, gitCommandsTotal, withMetrics } from "../observability/Metrics.ts"; +import * as GitVcsDriver from "./GitVcsDriver.ts"; import { parseRemoteNames, parseRemoteNamesInGitOrder, parseRemoteRefWithRemoteNames, -} from "../remoteRefs.ts"; -import { ServerConfig } from "../../config.ts"; +} from "../git/remoteRefs.ts"; +import { ServerConfig } from "../config.ts"; import { decodeJsonResult } from "@t3tools/shared/schemaJson"; const DEFAULT_TIMEOUT_MS = 30_000; @@ -47,21 +39,13 @@ const PREPARED_COMMIT_PATCH_MAX_OUTPUT_BYTES = 49_000; const RANGE_COMMIT_SUMMARY_MAX_OUTPUT_BYTES = 19_000; const RANGE_DIFF_SUMMARY_MAX_OUTPUT_BYTES = 19_000; const RANGE_DIFF_PATCH_MAX_OUTPUT_BYTES = 59_000; -const WORKSPACE_FILES_MAX_OUTPUT_BYTES = 16 * 1024 * 1024; -const GIT_CHECK_IGNORE_MAX_STDIN_BYTES = 256 * 1024; -const WORKSPACE_GIT_HARDENED_CONFIG_ARGS = [ - "-c", - "core.fsmonitor=false", - "-c", - "core.untrackedCache=false", -] as const; const STATUS_UPSTREAM_REFRESH_INTERVAL = Duration.seconds(15); const STATUS_UPSTREAM_REFRESH_TIMEOUT = Duration.seconds(5); const STATUS_UPSTREAM_REFRESH_FAILURE_COOLDOWN = Duration.seconds(5); const STATUS_UPSTREAM_REFRESH_CACHE_CAPACITY = 2_048; const DEFAULT_BASE_BRANCH_CANDIDATES = ["main", "master"] as const; const GIT_LIST_BRANCHES_DEFAULT_LIMIT = 100; -const NON_REPOSITORY_STATUS_DETAILS = Object.freeze({ +const NON_REPOSITORY_STATUS_DETAILS = Object.freeze({ isRepo: false, hasOriginRemote: false, isDefaultBranch: false, @@ -72,6 +56,7 @@ const NON_REPOSITORY_STATUS_DETAILS = Object.freeze({ hasUpstream: false, aheadCount: 0, behindCount: 0, + aheadOfDefaultCount: 0, }); type TraceTailState = { @@ -91,7 +76,7 @@ interface ExecuteGitOptions { fallbackErrorMessage?: string | undefined; maxOutputBytes?: number | undefined; truncateOutputAtMaxBytes?: boolean | undefined; - progress?: ExecuteGitProgress | undefined; + progress?: GitVcsDriver.ExecuteGitProgress | undefined; } function parseBranchAb(value: string): { ahead: number; behind: number } { @@ -127,47 +112,6 @@ function parseNumstatEntries( return entries; } -function splitNullSeparatedPaths(input: string, truncated: boolean): string[] { - const parts = input.split("\0"); - if (parts.length === 0) return []; - - if (truncated && parts[parts.length - 1]?.length) { - parts.pop(); - } - - return parts.filter((value) => value.length > 0); -} - -function chunkPathsForGitCheckIgnore(relativePaths: readonly string[]): string[][] { - const chunks: string[][] = []; - let chunk: string[] = []; - let chunkBytes = 0; - - for (const relativePath of relativePaths) { - const relativePathBytes = Buffer.byteLength(relativePath) + 1; - if (chunk.length > 0 && chunkBytes + relativePathBytes > GIT_CHECK_IGNORE_MAX_STDIN_BYTES) { - chunks.push(chunk); - chunk = []; - chunkBytes = 0; - } - - chunk.push(relativePath); - chunkBytes += relativePathBytes; - - if (chunkBytes >= GIT_CHECK_IGNORE_MAX_STDIN_BYTES) { - chunks.push(chunk); - chunk = []; - chunkBytes = 0; - } - } - - if (chunk.length > 0) { - chunks.push(chunk); - } - - return chunks; -} - function parsePorcelainPath(line: string): string | null { if (line.startsWith("? ") || line.startsWith("! ")) { const simple = line.slice(2).trim(); @@ -206,34 +150,34 @@ function parseBranchLine(line: string): { name: string; current: boolean } | nul } function filterBranchesForListQuery( - branches: ReadonlyArray, + refs: ReadonlyArray, query?: string, -): ReadonlyArray { +): ReadonlyArray { if (!query) { - return branches; + return refs; } const normalizedQuery = query.toLowerCase(); - return branches.filter((branch) => branch.name.toLowerCase().includes(normalizedQuery)); + return refs.filter((refName) => refName.name.toLowerCase().includes(normalizedQuery)); } function paginateBranches(input: { - branches: ReadonlyArray; + refs: ReadonlyArray; cursor?: number | undefined; limit?: number | undefined; }): { - branches: ReadonlyArray; + refs: ReadonlyArray; nextCursor: number | null; totalCount: number; } { const cursor = input.cursor ?? 0; const limit = input.limit ?? GIT_LIST_BRANCHES_DEFAULT_LIMIT; - const totalCount = input.branches.length; - const branches = input.branches.slice(cursor, cursor + limit); - const nextCursor = cursor + branches.length < totalCount ? cursor + branches.length : null; + const totalCount = input.refs.length; + const refs = input.refs.slice(cursor, cursor + limit); + const nextCursor = cursor + refs.length < totalCount ? cursor + refs.length : null; return { - branches, + refs, nextCursor, totalCount, }; @@ -274,7 +218,7 @@ function parseRemoteFetchUrls(stdout: string): Map { function parseUpstreamRefWithRemoteNames( upstreamRef: string, remoteNames: ReadonlyArray, -): { upstreamRef: string; remoteName: string; upstreamBranch: string } | null { +): { upstreamRef: string; remoteName: string; branchName: string } | null { const parsed = parseRemoteRefWithRemoteNames(upstreamRef, remoteNames); if (!parsed) { return null; @@ -283,28 +227,28 @@ function parseUpstreamRefWithRemoteNames( return { upstreamRef, remoteName: parsed.remoteName, - upstreamBranch: parsed.branchName, + branchName: parsed.branchName, }; } function parseUpstreamRefByFirstSeparator( upstreamRef: string, -): { upstreamRef: string; remoteName: string; upstreamBranch: string } | null { +): { upstreamRef: string; remoteName: string; branchName: string } | null { const separatorIndex = upstreamRef.indexOf("/"); if (separatorIndex <= 0 || separatorIndex === upstreamRef.length - 1) { return null; } const remoteName = upstreamRef.slice(0, separatorIndex).trim(); - const upstreamBranch = upstreamRef.slice(separatorIndex + 1).trim(); - if (remoteName.length === 0 || upstreamBranch.length === 0) { + const branchName = upstreamRef.slice(separatorIndex + 1).trim(); + if (remoteName.length === 0 || branchName.length === 0) { return null; } return { upstreamRef, remoteName, - upstreamBranch, + branchName, }; } @@ -316,11 +260,11 @@ function parseTrackingBranchByUpstreamRef(stdout: string, upstreamRef: string): } const [branchNameRaw, upstreamBranchRaw = ""] = trimmedLine.split("\t"); const branchName = branchNameRaw?.trim() ?? ""; - const upstreamBranch = upstreamBranchRaw.trim(); - if (branchName.length === 0 || upstreamBranch.length === 0) { + const candidateUpstreamRef = upstreamBranchRaw.trim(); + if (branchName.length === 0 || candidateUpstreamRef.length === 0) { continue; } - if (upstreamBranch === upstreamRef) { + if (candidateUpstreamRef === upstreamRef) { return branchName; } } @@ -347,8 +291,8 @@ function parseDefaultBranchFromRemoteHeadRef(value: string, remoteName: string): if (!trimmed.startsWith(prefix)) { return null; } - const branch = trimmed.slice(prefix.length).trim(); - return branch.length > 0 ? branch : null; + const refName = trimmed.slice(prefix.length).trim(); + return refName.length > 0 ? refName : null; } function createGitCommandError( @@ -382,7 +326,7 @@ function isMissingGitCwdError(error: GitCommandError): boolean { } function toGitCommandError( - input: Pick, + input: Pick, detail: string, ) { return (cause: unknown) => @@ -402,17 +346,18 @@ interface Trace2Monitor { readonly flush: Effect.Effect; } -const nowUnixNano = (): bigint => BigInt(Date.now()) * 1_000_000n; +const nowUnixNano = DateTime.now.pipe( + Effect.map((now) => BigInt(DateTime.toEpochMillis(now)) * 1_000_000n), +); const addCurrentSpanEvent = (name: string, attributes: Record) => - Effect.currentSpan.pipe( - Effect.tap((span) => - Effect.sync(() => { - span.event(name, nowUnixNano(), compactTraceAttributes(attributes)); - }), - ), - Effect.catch(() => Effect.void), - ); + Effect.gen(function* () { + const span = yield* Effect.currentSpan; + const timestamp = yield* nowUnixNano; + yield* Effect.sync(() => { + span.event(name, timestamp, compactTraceAttributes(attributes)); + }); + }).pipe(Effect.catch(() => Effect.void)); function trace2ChildKey(record: Record): string | null { const childId = record.child_id; @@ -426,8 +371,8 @@ function trace2ChildKey(record: Record): string | null { const Trace2Record = Schema.Record(Schema.String, Schema.Unknown); const createTrace2Monitor = Effect.fn("createTrace2Monitor")(function* ( - input: Pick, - progress: ExecuteGitProgress | undefined, + input: Pick, + progress: GitVcsDriver.ExecuteGitProgress | undefined, ): Effect.fn.Return< Trace2Monitor, PlatformError.PlatformError, @@ -461,7 +406,7 @@ const createTrace2Monitor = Effect.fn("createTrace2Monitor")(function* ( const traceRecord = decodeJsonResult(Trace2Record)(trimmedLine); if (Result.isFailure(traceRecord)) { yield* Effect.logDebug( - `GitCore.trace2: failed to parse trace line for ${quoteGitCommand(input.args)} in ${input.cwd}`, + `GitVcsDriver.trace2: failed to parse trace line for ${quoteGitCommand(input.args)} in ${input.cwd}`, traceRecord.failure, ); return; @@ -485,7 +430,8 @@ const createTrace2Monitor = Effect.fn("createTrace2Monitor")(function* ( } if (event === "child_start") { - hookStartByChildKey.set(childKey, { hookName, startedAtMs: Date.now() }); + const now = yield* DateTime.now; + hookStartByChildKey.set(childKey, { hookName, startedAtMs: DateTime.toEpochMillis(now) }); yield* addCurrentSpanEvent("git.hook.started", { hookName, }); @@ -497,9 +443,12 @@ const createTrace2Monitor = Effect.fn("createTrace2Monitor")(function* ( if (event === "child_exit") { hookStartByChildKey.delete(childKey); - const code = traceRecord.success.code; + const code = traceRecord.success.exitCode; const exitCode = typeof code === "number" && Number.isInteger(code) ? code : null; - const durationMs = started ? Math.max(0, Date.now() - started.startedAtMs) : null; + const now = yield* DateTime.now; + const durationMs = started + ? Math.max(0, DateTime.toEpochMillis(now) - started.startedAtMs) + : null; yield* addCurrentSpanEvent("git.hook.finished", { hookName: started?.hookName ?? hookName, exitCode, @@ -581,7 +530,7 @@ const createTrace2Monitor = Effect.fn("createTrace2Monitor")(function* ( }); const collectOutput = Effect.fn("collectOutput")(function* ( - input: Pick, + input: Pick, stream: Stream.Stream, maxOutputBytes: number, truncateOutputAtMaxBytes: boolean, @@ -654,14 +603,14 @@ const collectOutput = Effect.fn("collectOutput")(function* ( }; }); -export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { - executeOverride?: GitCoreShape["execute"]; +export const makeGitVcsDriverCore = Effect.fn("makeGitVcsDriverCore")(function* (options?: { + executeOverride?: GitVcsDriver.GitVcsDriverShape["execute"]; }) { const fileSystem = yield* FileSystem.FileSystem; const path = yield* Path.Path; const { worktreesDir } = yield* ServerConfig; - let executeRaw: GitCoreShape["execute"]; + let executeRaw: GitVcsDriver.GitVcsDriverShape["execute"]; if (options?.executeOverride) { executeRaw = options.executeOverride; @@ -712,7 +661,6 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { input.progress?.onStderrLine, ), child.exitCode.pipe( - Effect.map((value) => Number(value)), Effect.mapError(toGitCommandError(commandInput, "failed to report exit code.")), ), input.stdin === undefined @@ -739,12 +687,12 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { } return { - code: exitCode, + exitCode, stdout: stdout.text, stderr: stderr.text, stdoutTruncated: stdout.truncated, stderrTruncated: stderr.truncated, - } satisfies ExecuteGitResult; + } satisfies GitVcsDriver.ExecuteGitResult; }); return yield* runGitCommand().pipe( @@ -768,7 +716,7 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { }); } - const execute: GitCoreShape["execute"] = (input) => + const execute: GitVcsDriver.GitVcsDriverShape["execute"] = (input) => executeRaw(input).pipe( withMetrics({ counter: gitCommandsTotal, @@ -792,7 +740,7 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { cwd: string, args: readonly string[], options: ExecuteGitOptions = {}, - ): Effect.Effect => + ): Effect.Effect => execute({ operation, cwd, @@ -807,7 +755,7 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { ...(options.progress ? { progress: options.progress } : {}), }).pipe( Effect.flatMap((result) => { - if (options.allowNonZeroExit || result.code === 0) { + if (options.allowNonZeroExit || result.exitCode === 0) { return Effect.succeed(result); } const stderr = result.stderr.trim(); @@ -824,7 +772,7 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { operation, cwd, args, - `${commandLabel(args)} failed: code=${result.code ?? "null"}`, + `${commandLabel(args)} failed: code=${result.exitCode ?? "null"}`, ), ); }), @@ -860,16 +808,16 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { ), ); - const branchExists = (cwd: string, branch: string): Effect.Effect => + const branchExists = (cwd: string, refName: string): Effect.Effect => executeGit( - "GitCore.branchExists", + "GitVcsDriver.branchExists", cwd, - ["show-ref", "--verify", "--quiet", `refs/heads/${branch}`], + ["show-ref", "--verify", "--quiet", `refs/heads/${refName}`], { allowNonZeroExit: true, timeoutMs: 5_000, }, - ).pipe(Effect.map((result) => result.code === 0)); + ).pipe(Effect.map((result) => result.exitCode === 0)); const resolveAvailableBranchName = Effect.fn("resolveAvailableBranchName")(function* ( cwd: string, @@ -889,7 +837,7 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { } return yield* createGitCommandError( - "GitCore.renameBranch", + "GitVcsDriver.renameBranch", cwd, ["branch", "-m", "--", desiredBranch], `Could not find an available branch name for '${desiredBranch}'.`, @@ -898,7 +846,7 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { const resolveCurrentUpstream = Effect.fn("resolveCurrentUpstream")(function* (cwd: string) { const upstreamRef = yield* runGitStdout( - "GitCore.resolveCurrentUpstream", + "GitVcsDriver.resolveCurrentUpstream", cwd, ["rev-parse", "--abbrev-ref", "--symbolic-full-name", "@{upstream}"], true, @@ -908,7 +856,7 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { return null; } - const remoteNames = yield* runGitStdout("GitCore.listRemoteNames", cwd, ["remote"]).pipe( + const remoteNames = yield* runGitStdout("GitVcsDriver.listRemoteNames", cwd, ["remote"]).pipe( Effect.map(parseRemoteNames), Effect.catch(() => Effect.succeed>([])), ); @@ -925,7 +873,7 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { const fetchCwd = path.basename(gitCommonDir) === ".git" ? path.dirname(gitCommonDir) : gitCommonDir; return executeGit( - "GitCore.fetchRemoteForStatus", + "GitVcsDriver.fetchRemoteForStatus", fetchCwd, ["--git-dir", gitCommonDir, "fetch", "--quiet", "--no-tags", remoteName], { @@ -936,7 +884,7 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { }; const resolveGitCommonDir = Effect.fn("resolveGitCommonDir")(function* (cwd: string) { - const gitCommonDir = yield* runGitStdout("GitCore.resolveGitCommonDir", cwd, [ + const gitCommonDir = yield* runGitStdout("GitVcsDriver.resolveGitCommonDir", cwd, [ "rev-parse", "--git-common-dir", ]).pipe(Effect.map((stdout) => stdout.trim())); @@ -979,13 +927,13 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { remoteName: string, ): Effect.Effect => executeGit( - "GitCore.resolveDefaultBranchName", + "GitVcsDriver.resolveDefaultBranchName", cwd, ["symbolic-ref", `refs/remotes/${remoteName}/HEAD`], { allowNonZeroExit: true }, ).pipe( Effect.map((result) => { - if (result.code !== 0) { + if (result.exitCode !== 0) { return null; } return parseDefaultBranchFromRemoteHeadRef(result.stdout, remoteName); @@ -995,27 +943,36 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { const remoteBranchExists = ( cwd: string, remoteName: string, - branch: string, + refName: string, ): Effect.Effect => executeGit( - "GitCore.remoteBranchExists", + "GitVcsDriver.remoteBranchExists", cwd, - ["show-ref", "--verify", "--quiet", `refs/remotes/${remoteName}/${branch}`], + ["show-ref", "--verify", "--quiet", `refs/remotes/${remoteName}/${refName}`], { allowNonZeroExit: true, }, - ).pipe(Effect.map((result) => result.code === 0)); + ).pipe(Effect.map((result) => result.exitCode === 0)); const originRemoteExists = (cwd: string): Effect.Effect => - executeGit("GitCore.originRemoteExists", cwd, ["remote", "get-url", "origin"], { + executeGit("GitVcsDriver.originRemoteExists", cwd, ["remote", "get-url", "origin"], { allowNonZeroExit: true, - }).pipe(Effect.map((result) => result.code === 0)); + }).pipe(Effect.map((result) => result.exitCode === 0)); const listRemoteNames = (cwd: string): Effect.Effect, GitCommandError> => - runGitStdout("GitCore.listRemoteNames", cwd, ["remote"]).pipe( + runGitStdout("GitVcsDriver.listRemoteNames", cwd, ["remote"]).pipe( Effect.map(parseRemoteNamesInGitOrder), ); + const resolvePublishBranchName = Effect.fn("resolvePublishBranchName")(function* ( + cwd: string, + branchName: string, + ) { + const remoteNames = yield* listRemoteNames(cwd).pipe(Effect.catch(() => Effect.succeed([]))); + const parsedRemoteRef = parseRemoteRefWithRemoteNames(branchName, remoteNames); + return parsedRemoteRef?.branchName ?? branchName; + }); + const resolvePrimaryRemoteName = Effect.fn("resolvePrimaryRemoteName")(function* (cwd: string) { if (yield* originRemoteExists(cwd)) { return "origin"; @@ -1026,7 +983,7 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { return firstRemote; } return yield* createGitCommandError( - "GitCore.resolvePrimaryRemoteName", + "GitVcsDriver.resolvePrimaryRemoteName", cwd, ["remote"], "No git remote is configured for this repository.", @@ -1035,12 +992,12 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { const resolvePushRemoteName = Effect.fn("resolvePushRemoteName")(function* ( cwd: string, - branch: string, + refName: string, ) { const branchPushRemote = yield* runGitStdout( - "GitCore.resolvePushRemoteName.branchPushRemote", + "GitVcsDriver.resolvePushRemoteName.branchPushRemote", cwd, - ["config", "--get", `branch.${branch}.pushRemote`], + ["config", "--get", `branch.${refName}.pushRemote`], true, ).pipe(Effect.map((stdout) => stdout.trim())); if (branchPushRemote.length > 0) { @@ -1048,7 +1005,7 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { } const pushDefaultRemote = yield* runGitStdout( - "GitCore.resolvePushRemoteName.remotePushDefault", + "GitVcsDriver.resolvePushRemoteName.remotePushDefault", cwd, ["config", "--get", "remote.pushDefault"], true, @@ -1060,39 +1017,47 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { return yield* resolvePrimaryRemoteName(cwd).pipe(Effect.catch(() => Effect.succeed(null))); }); - const ensureRemote: GitCoreShape["ensureRemote"] = Effect.fn("ensureRemote")(function* (input) { - const preferredName = sanitizeRemoteName(input.preferredName); - const normalizedTargetUrl = normalizeRemoteUrl(input.url); - const remoteFetchUrls = yield* runGitStdout("GitCore.ensureRemote.listRemoteUrls", input.cwd, [ - "remote", - "-v", - ]).pipe(Effect.map((stdout) => parseRemoteFetchUrls(stdout))); - - for (const [remoteName, remoteUrl] of remoteFetchUrls.entries()) { - if (normalizeRemoteUrl(remoteUrl) === normalizedTargetUrl) { - return remoteName; + const ensureRemote: GitVcsDriver.GitVcsDriverShape["ensureRemote"] = Effect.fn("ensureRemote")( + function* (input) { + const preferredName = sanitizeRemoteName(input.preferredName); + const normalizedTargetUrl = normalizeRemoteUrl(input.url); + const remoteFetchUrls = yield* runGitStdout( + "GitVcsDriver.ensureRemote.listRemoteUrls", + input.cwd, + ["remote", "-v"], + ).pipe(Effect.map((stdout) => parseRemoteFetchUrls(stdout))); + + for (const [remoteName, remoteUrl] of remoteFetchUrls.entries()) { + if (normalizeRemoteUrl(remoteUrl) === normalizedTargetUrl) { + return remoteName; + } } - } - let remoteName = preferredName; - let suffix = 1; - while (remoteFetchUrls.has(remoteName)) { - remoteName = `${preferredName}-${suffix}`; - suffix += 1; - } + let remoteName = preferredName; + let suffix = 1; + while (remoteFetchUrls.has(remoteName)) { + remoteName = `${preferredName}-${suffix}`; + suffix += 1; + } - yield* runGit("GitCore.ensureRemote.add", input.cwd, ["remote", "add", remoteName, input.url]); - return remoteName; - }); + yield* runGit("GitVcsDriver.ensureRemote.add", input.cwd, [ + "remote", + "add", + remoteName, + input.url, + ]); + return remoteName; + }, + ); const resolveBaseBranchForNoUpstream = Effect.fn("resolveBaseBranchForNoUpstream")(function* ( cwd: string, - branch: string, + refName: string, ) { const configuredBaseBranch = yield* runGitStdout( - "GitCore.resolveBaseBranchForNoUpstream.config", + "GitVcsDriver.resolveBaseBranchForNoUpstream.config", cwd, - ["config", "--get", `branch.${branch}.gh-merge-base`], + ["config", "--get", `branch.${refName}.gh-merge-base`], true, ).pipe(Effect.map((stdout) => stdout.trim())); @@ -1119,7 +1084,7 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { : remotePrefix && candidate.startsWith(remotePrefix) ? candidate.slice(remotePrefix.length) : candidate; - if (normalizedCandidate.length === 0 || normalizedCandidate === branch) { + if (normalizedCandidate.length === 0 || normalizedCandidate === refName) { continue; } @@ -1140,20 +1105,20 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { const computeAheadCountAgainstBase = Effect.fn("computeAheadCountAgainstBase")(function* ( cwd: string, - branch: string, + refName: string, ) { - const baseBranch = yield* resolveBaseBranchForNoUpstream(cwd, branch); - if (!baseBranch) { + const baseRef = yield* resolveBaseBranchForNoUpstream(cwd, refName); + if (!baseRef) { return 0; } const result = yield* executeGit( - "GitCore.computeAheadCountAgainstBase", + "GitVcsDriver.computeAheadCountAgainstBase", cwd, - ["rev-list", "--count", `${baseBranch}..HEAD`], + ["rev-list", "--count", `${baseRef}..HEAD`], { allowNonZeroExit: true }, ); - if (result.code !== 0) { + if (result.exitCode !== 0) { return 0; } @@ -1163,7 +1128,7 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { const readBranchRecency = Effect.fn("readBranchRecency")(function* (cwd: string) { const branchRecency = yield* executeGit( - "GitCore.readBranchRecency", + "GitVcsDriver.readBranchRecency", cwd, [ "for-each-ref", @@ -1178,7 +1143,7 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { ); const branchLastCommit = new Map(); - if (branchRecency.code !== 0) { + if (branchRecency.exitCode !== 0) { return branchLastCommit; } @@ -1199,7 +1164,7 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { const readStatusDetailsLocal = Effect.fn("readStatusDetailsLocal")(function* (cwd: string) { const statusResult = yield* executeGit( - "GitCore.statusDetails.status", + "GitVcsDriver.statusDetails.status", cwd, ["status", "--porcelain=2", "--branch"], { @@ -1211,27 +1176,27 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { return NON_REPOSITORY_STATUS_DETAILS; } - if (statusResult.code !== 0) { + if (statusResult.exitCode !== 0) { const stderr = statusResult.stderr.trim(); return yield* createGitCommandError( - "GitCore.statusDetails.status", + "GitVcsDriver.statusDetails.status", cwd, ["status", "--porcelain=2", "--branch"], stderr || "git status failed", ); } - const [unstagedNumstatStdout, stagedNumstatStdout, defaultRefResult, hasOriginRemote] = + const [unstagedNumstatStdout, stagedNumstatStdout, defaultRefResult, hasPrimaryRemote] = yield* Effect.all( [ - runGitStdout("GitCore.statusDetails.unstagedNumstat", cwd, ["diff", "--numstat"]), - runGitStdout("GitCore.statusDetails.stagedNumstat", cwd, [ + runGitStdout("GitVcsDriver.statusDetails.unstagedNumstat", cwd, ["diff", "--numstat"]), + runGitStdout("GitVcsDriver.statusDetails.stagedNumstat", cwd, [ "diff", "--cached", "--numstat", ]), executeGit( - "GitCore.statusDetails.defaultRef", + "GitVcsDriver.statusDetails.defaultRef", cwd, ["symbolic-ref", "refs/remotes/origin/HEAD"], { @@ -1244,21 +1209,22 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { ); const statusStdout = statusResult.stdout; const defaultBranch = - defaultRefResult.code === 0 + defaultRefResult.exitCode === 0 ? defaultRefResult.stdout.trim().replace(/^refs\/remotes\/origin\//, "") : null; - let branch: string | null = null; + let refName: string | null = null; let upstreamRef: string | null = null; let aheadCount = 0; let behindCount = 0; + let aheadOfDefaultCount = 0; let hasWorkingTreeChanges = false; const changedFilesWithoutNumstat = new Set(); for (const line of statusStdout.split(/\r?\n/g)) { if (line.startsWith("# branch.head ")) { const value = line.slice("# branch.head ".length).trim(); - branch = value.startsWith("(") ? null : value; + refName = value.startsWith("(") ? null : value; continue; } if (line.startsWith("# branch.upstream ")) { @@ -1280,13 +1246,31 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { } } - if (!upstreamRef && branch) { - aheadCount = yield* computeAheadCountAgainstBase(cwd, branch).pipe( - Effect.catch(() => Effect.succeed(0)), - ); + const fallbackAheadCount = + !upstreamRef && refName + ? yield* computeAheadCountAgainstBase(cwd, refName).pipe( + Effect.catch(() => Effect.succeed(0)), + ) + : null; + + if (fallbackAheadCount !== null) { + aheadCount = fallbackAheadCount; behindCount = 0; } + const isDefaultBranch = + refName !== null && + (refName === defaultBranch || + (defaultBranch === null && (refName === "main" || refName === "master"))); + if (refName && !isDefaultBranch) { + aheadOfDefaultCount = + fallbackAheadCount !== null + ? fallbackAheadCount + : yield* computeAheadCountAgainstBase(cwd, refName).pipe( + Effect.catch(() => Effect.succeed(0)), + ); + } + const stagedEntries = parseNumstatEntries(stagedNumstatStdout); const unstagedEntries = parseNumstatEntries(unstagedNumstatStdout); const fileStatMap = new Map(); @@ -1315,12 +1299,9 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { return { isRepo: true, - hasOriginRemote, - isDefaultBranch: - branch !== null && - (branch === defaultBranch || - (defaultBranch === null && (branch === "main" || branch === "master"))), - branch, + hasOriginRemote: hasPrimaryRemote, + isDefaultBranch, + branch: refName, upstreamRef, hasWorkingTreeChanges, workingTree: { @@ -1331,67 +1312,71 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { hasUpstream: upstreamRef !== null, aheadCount, behindCount, + aheadOfDefaultCount, }; }); - const statusDetailsLocal: GitCoreShape["statusDetailsLocal"] = Effect.fn("statusDetailsLocal")( + const statusDetailsLocal: GitVcsDriver.GitVcsDriverShape["statusDetailsLocal"] = Effect.fn( + "statusDetailsLocal", + )(function* (cwd) { + return yield* readStatusDetailsLocal(cwd); + }); + + const statusDetails: GitVcsDriver.GitVcsDriverShape["statusDetails"] = Effect.fn("statusDetails")( function* (cwd) { + yield* refreshStatusUpstreamIfStale(cwd).pipe( + Effect.catchIf(isMissingGitCwdError, () => Effect.void), + Effect.ignoreCause({ log: true }), + ); return yield* readStatusDetailsLocal(cwd); }, ); - const statusDetails: GitCoreShape["statusDetails"] = Effect.fn("statusDetails")(function* (cwd) { - yield* refreshStatusUpstreamIfStale(cwd).pipe( - Effect.catchIf(isMissingGitCwdError, () => Effect.void), - Effect.ignoreCause({ log: true }), - ); - return yield* readStatusDetailsLocal(cwd); - }); - - const status: GitCoreShape["status"] = (input) => + const status: GitVcsDriver.GitVcsDriverShape["status"] = (input) => statusDetails(input.cwd).pipe( Effect.map((details) => ({ isRepo: details.isRepo, - hasOriginRemote: details.hasOriginRemote, - isDefaultBranch: details.isDefaultBranch, - branch: details.branch, + hasPrimaryRemote: details.hasOriginRemote, + isDefaultRef: details.isDefaultBranch, + refName: details.branch, hasWorkingTreeChanges: details.hasWorkingTreeChanges, workingTree: details.workingTree, hasUpstream: details.hasUpstream, aheadCount: details.aheadCount, behindCount: details.behindCount, + aheadOfDefaultCount: details.aheadOfDefaultCount, pr: null, })), ); - const prepareCommitContext: GitCoreShape["prepareCommitContext"] = Effect.fn( + const prepareCommitContext: GitVcsDriver.GitVcsDriverShape["prepareCommitContext"] = Effect.fn( "prepareCommitContext", )(function* (cwd, filePaths) { if (filePaths && filePaths.length > 0) { - yield* runGit("GitCore.prepareCommitContext.reset", cwd, ["reset"]).pipe( + yield* runGit("GitVcsDriver.prepareCommitContext.reset", cwd, ["reset"]).pipe( Effect.catch(() => Effect.void), ); - yield* runGit("GitCore.prepareCommitContext.addSelected", cwd, [ + yield* runGit("GitVcsDriver.prepareCommitContext.addSelected", cwd, [ "add", "-A", "--", ...filePaths, ]); } else { - yield* runGit("GitCore.prepareCommitContext.addAll", cwd, ["add", "-A"]); + yield* runGit("GitVcsDriver.prepareCommitContext.addAll", cwd, ["add", "-A"]); } - const stagedSummary = yield* runGitStdout("GitCore.prepareCommitContext.stagedSummary", cwd, [ - "diff", - "--cached", - "--name-status", - ]).pipe(Effect.map((stdout) => stdout.trim())); + const stagedSummary = yield* runGitStdout( + "GitVcsDriver.prepareCommitContext.stagedSummary", + cwd, + ["diff", "--cached", "--name-status"], + ).pipe(Effect.map((stdout) => stdout.trim())); if (stagedSummary.length === 0) { return null; } const stagedPatch = yield* runGitStdoutWithOptions( - "GitCore.prepareCommitContext.stagedPatch", + "GitVcsDriver.prepareCommitContext.stagedPatch", cwd, ["diff", "--cached", "--patch", "--minimal"], { @@ -1406,11 +1391,11 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { }; }); - const commit: GitCoreShape["commit"] = Effect.fn("commit")(function* ( + const commit: GitVcsDriver.GitVcsDriverShape["commit"] = Effect.fn("commit")(function* ( cwd, subject, body, - options?: GitCommitOptions, + options?: GitVcsDriver.GitCommitOptions, ) { const args = ["commit", "-m", subject]; const trimmedBody = body.trim(); @@ -1427,11 +1412,11 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { onStderrLine: (line: string) => options.progress?.onOutputLine?.({ stream: "stderr", text: line }) ?? Effect.void, }; - yield* executeGit("GitCore.commit.commit", cwd, args, { + yield* executeGit("GitVcsDriver.commit.commit", cwd, args, { ...(options?.timeoutMs !== undefined ? { timeoutMs: options.timeoutMs } : {}), ...(progress ? { progress } : {}), }).pipe(Effect.asVoid); - const commitSha = yield* runGitStdout("GitCore.commit.revParseHead", cwd, [ + const commitSha = yield* runGitStdout("GitVcsDriver.commit.revParseHead", cwd, [ "rev-parse", "HEAD", ]).pipe(Effect.map((stdout) => stdout.trim())); @@ -1439,656 +1424,610 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { return { commitSha }; }); - const pushCurrentBranch: GitCoreShape["pushCurrentBranch"] = Effect.fn("pushCurrentBranch")( - function* (cwd, fallbackBranch) { - const details = yield* statusDetails(cwd); - const branch = details.branch ?? fallbackBranch; - if (!branch) { - return yield* createGitCommandError( - "GitCore.pushCurrentBranch", - cwd, - ["push"], - "Cannot push from detached HEAD.", - ); - } - - const hasNoLocalDelta = details.aheadCount === 0 && details.behindCount === 0; - if (hasNoLocalDelta) { - if (details.hasUpstream) { - return { - status: "skipped_up_to_date" as const, - branch, - ...(details.upstreamRef ? { upstreamBranch: details.upstreamRef } : {}), - }; - } - - const comparableBaseBranch = yield* resolveBaseBranchForNoUpstream(cwd, branch).pipe( - Effect.catch(() => Effect.succeed(null)), - ); - if (comparableBaseBranch) { - const publishRemoteName = yield* resolvePushRemoteName(cwd, branch).pipe( - Effect.catch(() => Effect.succeed(null)), - ); - if (!publishRemoteName) { - return { - status: "skipped_up_to_date" as const, - branch, - }; - } + const pushCurrentBranch: GitVcsDriver.GitVcsDriverShape["pushCurrentBranch"] = Effect.fn( + "pushCurrentBranch", + )(function* (cwd, fallbackBranch, options) { + const details = yield* statusDetails(cwd); + const branch = details.branch ?? fallbackBranch; + if (!branch) { + return yield* createGitCommandError( + "GitVcsDriver.pushCurrentBranch", + cwd, + ["push"], + "Cannot push from detached HEAD.", + ); + } - const hasRemoteBranch = yield* remoteBranchExists(cwd, publishRemoteName, branch).pipe( - Effect.catch(() => Effect.succeed(false)), - ); - if (hasRemoteBranch) { - return { - status: "skipped_up_to_date" as const, - branch, - }; - } - } - } + const requestedRemoteName = options?.remoteName?.trim() || null; + if (requestedRemoteName) { + const publishBranch = yield* resolvePublishBranchName(cwd, branch); + yield* runGit("GitVcsDriver.pushCurrentBranch.pushWithRequestedRemote", cwd, [ + "push", + "-u", + requestedRemoteName, + `HEAD:refs/heads/${publishBranch}`, + ]); + return { + status: "pushed" as const, + branch, + upstreamBranch: `${requestedRemoteName}/${publishBranch}`, + setUpstream: true, + }; + } - if (!details.hasUpstream) { - const publishRemoteName = yield* resolvePushRemoteName(cwd, branch); - if (!publishRemoteName) { - return yield* createGitCommandError( - "GitCore.pushCurrentBranch", - cwd, - ["push"], - "Cannot push because no git remote is configured for this repository.", - ); - } - yield* runGit("GitCore.pushCurrentBranch.pushWithUpstream", cwd, [ - "push", - "-u", - publishRemoteName, - `HEAD:refs/heads/${branch}`, - ]); + const hasNoLocalDelta = details.aheadCount === 0 && details.behindCount === 0; + if (hasNoLocalDelta) { + if (details.hasUpstream) { return { - status: "pushed" as const, + status: "skipped_up_to_date" as const, branch, - upstreamBranch: `${publishRemoteName}/${branch}`, - setUpstream: true, + ...(details.upstreamRef ? { upstreamBranch: details.upstreamRef } : {}), }; } - const currentUpstream = yield* resolveCurrentUpstream(cwd).pipe( + const comparableBaseBranch = yield* resolveBaseBranchForNoUpstream(cwd, branch).pipe( Effect.catch(() => Effect.succeed(null)), ); - if (currentUpstream) { - yield* runGit("GitCore.pushCurrentBranch.pushUpstream", cwd, [ - "push", - currentUpstream.remoteName, - `HEAD:${currentUpstream.upstreamBranch}`, - ]); - return { - status: "pushed" as const, - branch, - upstreamBranch: currentUpstream.upstreamRef, - setUpstream: false, - }; - } - - yield* runGit("GitCore.pushCurrentBranch.push", cwd, ["push"]); - return { - status: "pushed" as const, - branch, - ...(details.upstreamRef ? { upstreamBranch: details.upstreamRef } : {}), - setUpstream: false, - }; - }, - ); + if (comparableBaseBranch) { + const publishRemoteName = yield* resolvePushRemoteName(cwd, branch).pipe( + Effect.catch(() => Effect.succeed(null)), + ); + if (!publishRemoteName) { + return { + status: "skipped_up_to_date" as const, + branch, + }; + } - const pullCurrentBranch: GitCoreShape["pullCurrentBranch"] = Effect.fn("pullCurrentBranch")( - function* (cwd) { - const details = yield* statusDetails(cwd); - const branch = details.branch; - if (!branch) { - return yield* createGitCommandError( - "GitCore.pullCurrentBranch", - cwd, - ["pull", "--ff-only"], - "Cannot pull from detached HEAD.", + const hasRemoteBranch = yield* remoteBranchExists(cwd, publishRemoteName, branch).pipe( + Effect.catch(() => Effect.succeed(false)), ); + if (hasRemoteBranch) { + return { + status: "skipped_up_to_date" as const, + branch, + }; + } } - if (!details.hasUpstream) { + } + + if (!details.hasUpstream) { + const publishRemoteName = yield* resolvePushRemoteName(cwd, branch); + if (!publishRemoteName) { return yield* createGitCommandError( - "GitCore.pullCurrentBranch", + "GitVcsDriver.pushCurrentBranch", cwd, - ["pull", "--ff-only"], - "Current branch has no upstream configured. Push with upstream first.", + ["push"], + "Cannot push because no git remote is configured for this repository.", ); } - const beforeSha = yield* runGitStdout( - "GitCore.pullCurrentBranch.beforeSha", - cwd, - ["rev-parse", "HEAD"], - true, - ).pipe(Effect.map((stdout) => stdout.trim())); - yield* executeGit("GitCore.pullCurrentBranch.pull", cwd, ["pull", "--ff-only"], { - timeoutMs: 30_000, - fallbackErrorMessage: "git pull failed", - }); - const afterSha = yield* runGitStdout( - "GitCore.pullCurrentBranch.afterSha", - cwd, - ["rev-parse", "HEAD"], - true, - ).pipe(Effect.map((stdout) => stdout.trim())); - - const refreshed = yield* statusDetails(cwd); + const publishBranch = yield* resolvePublishBranchName(cwd, branch); + yield* runGit("GitVcsDriver.pushCurrentBranch.pushWithUpstream", cwd, [ + "push", + "-u", + publishRemoteName, + `HEAD:refs/heads/${publishBranch}`, + ]); return { - status: beforeSha.length > 0 && beforeSha === afterSha ? "skipped_up_to_date" : "pulled", + status: "pushed" as const, branch, - upstreamBranch: refreshed.upstreamRef, + upstreamBranch: `${publishRemoteName}/${publishBranch}`, + setUpstream: true, }; - }, - ); - - const readRangeContext: GitCoreShape["readRangeContext"] = Effect.fn("readRangeContext")( - function* (cwd, baseBranch) { - const range = `${baseBranch}..HEAD`; - const [commitSummary, diffSummary, diffPatch] = yield* Effect.all( - [ - runGitStdoutWithOptions( - "GitCore.readRangeContext.log", - cwd, - ["log", "--oneline", range], - { - maxOutputBytes: RANGE_COMMIT_SUMMARY_MAX_OUTPUT_BYTES, - truncateOutputAtMaxBytes: true, - }, - ), - runGitStdoutWithOptions( - "GitCore.readRangeContext.diffStat", - cwd, - ["diff", "--stat", range], - { - maxOutputBytes: RANGE_DIFF_SUMMARY_MAX_OUTPUT_BYTES, - truncateOutputAtMaxBytes: true, - }, - ), - runGitStdoutWithOptions( - "GitCore.readRangeContext.diffPatch", - cwd, - ["diff", "--patch", "--minimal", range], - { - maxOutputBytes: RANGE_DIFF_PATCH_MAX_OUTPUT_BYTES, - truncateOutputAtMaxBytes: true, - }, - ), - ], - { concurrency: "unbounded" }, - ); + } + const currentUpstream = yield* resolveCurrentUpstream(cwd).pipe( + Effect.catch(() => Effect.succeed(null)), + ); + if (currentUpstream) { + yield* runGit("GitVcsDriver.pushCurrentBranch.pushUpstream", cwd, [ + "push", + currentUpstream.remoteName, + `HEAD:refs/heads/${currentUpstream.branchName}`, + ]); return { - commitSummary, - diffSummary, - diffPatch, + status: "pushed" as const, + branch, + upstreamBranch: currentUpstream.upstreamRef, + setUpstream: false, }; - }, - ); - - const readConfigValue: GitCoreShape["readConfigValue"] = (cwd, key) => - runGitStdout("GitCore.readConfigValue", cwd, ["config", "--get", key], true).pipe( - Effect.map((stdout) => stdout.trim()), - Effect.map((trimmed) => (trimmed.length > 0 ? trimmed : null)), - ); + } - const isInsideWorkTree: GitCoreShape["isInsideWorkTree"] = (cwd) => - executeGit("GitCore.isInsideWorkTree", cwd, ["rev-parse", "--is-inside-work-tree"], { - allowNonZeroExit: true, - timeoutMs: 5_000, - maxOutputBytes: 4_096, - }).pipe(Effect.map((result) => result.code === 0 && result.stdout.trim() === "true")); + yield* runGit("GitVcsDriver.pushCurrentBranch.push", cwd, ["push"]); + return { + status: "pushed" as const, + branch, + ...(details.upstreamRef ? { upstreamBranch: details.upstreamRef } : {}), + setUpstream: false, + }; + }); - const listWorkspaceFiles: GitCoreShape["listWorkspaceFiles"] = (cwd) => - executeGit( - "GitCore.listWorkspaceFiles", + const pullCurrentBranch: GitVcsDriver.GitVcsDriverShape["pullCurrentBranch"] = Effect.fn( + "pullCurrentBranch", + )(function* (cwd) { + const details = yield* statusDetails(cwd); + const refName = details.branch; + if (!refName) { + return yield* createGitCommandError( + "GitVcsDriver.pullCurrentBranch", + cwd, + ["pull", "--ff-only"], + "Cannot pull from detached HEAD.", + ); + } + if (!details.hasUpstream) { + return yield* createGitCommandError( + "GitVcsDriver.pullCurrentBranch", + cwd, + ["pull", "--ff-only"], + "Current branch has no upstream configured. Push with upstream first.", + ); + } + const beforeSha = yield* runGitStdout( + "GitVcsDriver.pullCurrentBranch.beforeSha", cwd, - [ - ...WORKSPACE_GIT_HARDENED_CONFIG_ARGS, - "ls-files", - "--cached", - "--others", - "--exclude-standard", - "-z", - ], - { - allowNonZeroExit: true, - timeoutMs: 20_000, - maxOutputBytes: WORKSPACE_FILES_MAX_OUTPUT_BYTES, - truncateOutputAtMaxBytes: true, - }, - ).pipe( - Effect.flatMap((result) => - result.code === 0 - ? Effect.succeed({ - paths: splitNullSeparatedPaths(result.stdout, result.stdoutTruncated), - truncated: result.stdoutTruncated, - }) - : Effect.fail( - createGitCommandError( - "GitCore.listWorkspaceFiles", - cwd, - [ - ...WORKSPACE_GIT_HARDENED_CONFIG_ARGS, - "ls-files", - "--cached", - "--others", - "--exclude-standard", - "-z", - ], - result.stderr.trim().length > 0 ? result.stderr.trim() : "git ls-files failed", - ), - ), - ), - ); - - const filterIgnoredPaths: GitCoreShape["filterIgnoredPaths"] = (cwd, relativePaths) => - Effect.gen(function* () { - if (relativePaths.length === 0) { - return relativePaths; - } + ["rev-parse", "HEAD"], + true, + ).pipe(Effect.map((stdout) => stdout.trim())); + yield* executeGit("GitVcsDriver.pullCurrentBranch.pull", cwd, ["pull", "--ff-only"], { + timeoutMs: 30_000, + fallbackErrorMessage: "git pull failed", + }); + const afterSha = yield* runGitStdout( + "GitVcsDriver.pullCurrentBranch.afterSha", + cwd, + ["rev-parse", "HEAD"], + true, + ).pipe(Effect.map((stdout) => stdout.trim())); - const ignoredPaths = new Set(); - const chunks = chunkPathsForGitCheckIgnore(relativePaths); + const refreshed = yield* statusDetails(cwd); + return { + status: beforeSha.length > 0 && beforeSha === afterSha ? "skipped_up_to_date" : "pulled", + refName, + upstreamRef: refreshed.upstreamRef, + }; + }); - for (const chunk of chunks) { - const result = yield* executeGit( - "GitCore.filterIgnoredPaths", + const readRangeContext: GitVcsDriver.GitVcsDriverShape["readRangeContext"] = Effect.fn( + "readRangeContext", + )(function* (cwd, baseRef) { + const range = `${baseRef}..HEAD`; + const [commitSummary, diffSummary, diffPatch] = yield* Effect.all( + [ + runGitStdoutWithOptions( + "GitVcsDriver.readRangeContext.log", cwd, - [...WORKSPACE_GIT_HARDENED_CONFIG_ARGS, "check-ignore", "--no-index", "-z", "--stdin"], + ["log", "--oneline", range], { - stdin: `${chunk.join("\0")}\0`, - allowNonZeroExit: true, - timeoutMs: 20_000, - maxOutputBytes: WORKSPACE_FILES_MAX_OUTPUT_BYTES, + maxOutputBytes: RANGE_COMMIT_SUMMARY_MAX_OUTPUT_BYTES, truncateOutputAtMaxBytes: true, }, - ); - - if (result.code !== 0 && result.code !== 1) { - return yield* createGitCommandError( - "GitCore.filterIgnoredPaths", - cwd, - [...WORKSPACE_GIT_HARDENED_CONFIG_ARGS, "check-ignore", "--no-index", "-z", "--stdin"], - result.stderr.trim().length > 0 ? result.stderr.trim() : "git check-ignore failed", - ); - } - - for (const ignoredPath of splitNullSeparatedPaths(result.stdout, result.stdoutTruncated)) { - ignoredPaths.add(ignoredPath); - } - } - - if (ignoredPaths.size === 0) { - return relativePaths; - } + ), + runGitStdoutWithOptions( + "GitVcsDriver.readRangeContext.diffStat", + cwd, + ["diff", "--stat", range], + { + maxOutputBytes: RANGE_DIFF_SUMMARY_MAX_OUTPUT_BYTES, + truncateOutputAtMaxBytes: true, + }, + ), + runGitStdoutWithOptions( + "GitVcsDriver.readRangeContext.diffPatch", + cwd, + ["diff", "--patch", "--minimal", range], + { + maxOutputBytes: RANGE_DIFF_PATCH_MAX_OUTPUT_BYTES, + truncateOutputAtMaxBytes: true, + }, + ), + ], + { concurrency: "unbounded" }, + ); - return relativePaths.filter((relativePath) => !ignoredPaths.has(relativePath)); - }); + return { + commitSummary, + diffSummary, + diffPatch, + }; + }); - const listBranches: GitCoreShape["listBranches"] = Effect.fn("listBranches")(function* (input) { - const branchRecencyPromise = readBranchRecency(input.cwd).pipe( - Effect.catch(() => Effect.succeed(new Map())), - ); - const localBranchResult = yield* executeGit( - "GitCore.listBranches.branchNoColor", - input.cwd, - ["branch", "--no-color", "--no-column"], - { - timeoutMs: 10_000, - allowNonZeroExit: true, - }, - ).pipe( - Effect.catchIf(isMissingGitCwdError, () => - Effect.succeed({ - code: 128, - stdout: "", - stderr: "fatal: not a git repository", - stdoutTruncated: false, - stderrTruncated: false, - }), - ), + const readConfigValue: GitVcsDriver.GitVcsDriverShape["readConfigValue"] = (cwd, key) => + runGitStdout("GitVcsDriver.readConfigValue", cwd, ["config", "--get", key], true).pipe( + Effect.map((stdout) => stdout.trim()), + Effect.map((trimmed) => (trimmed.length > 0 ? trimmed : null)), ); - if (localBranchResult.code !== 0) { - const stderr = localBranchResult.stderr.trim(); - if (stderr.toLowerCase().includes("not a git repository")) { - return { - branches: [], - isRepo: false, - hasOriginRemote: false, - nextCursor: null, - totalCount: 0, - }; - } - return yield* createGitCommandError( - "GitCore.listBranches", + const listRefs: GitVcsDriver.GitVcsDriverShape["listRefs"] = Effect.fn("listRefs")( + function* (input) { + const branchRecencyPromise = readBranchRecency(input.cwd).pipe( + Effect.catch(() => Effect.succeed(new Map())), + ); + const localBranchResult = yield* executeGit( + "GitVcsDriver.listRefs.branchNoColor", input.cwd, ["branch", "--no-color", "--no-column"], - stderr || "git branch failed", + { + timeoutMs: 10_000, + allowNonZeroExit: true, + }, + ).pipe( + Effect.catchIf(isMissingGitCwdError, () => + Effect.succeed({ + exitCode: ChildProcessSpawner.ExitCode(128), + stdout: "", + stderr: "fatal: not a git repository", + stdoutTruncated: false, + stderrTruncated: false, + }), + ), ); - } - - const remoteBranchResultEffect = executeGit( - "GitCore.listBranches.remoteBranches", - input.cwd, - ["branch", "--no-color", "--no-column", "--remotes"], - { - timeoutMs: 10_000, - allowNonZeroExit: true, - }, - ).pipe( - Effect.catch((error) => - Effect.logWarning( - `GitCore.listBranches: remote branch lookup failed for ${input.cwd}: ${error.message}. Falling back to an empty remote branch list.`, - ).pipe(Effect.as({ code: 1, stdout: "", stderr: "" })), - ), - ); - const remoteNamesResultEffect = executeGit( - "GitCore.listBranches.remoteNames", - input.cwd, - ["remote"], - { - timeoutMs: 5_000, - allowNonZeroExit: true, - }, - ).pipe( - Effect.catch((error) => - Effect.logWarning( - `GitCore.listBranches: remote name lookup failed for ${input.cwd}: ${error.message}. Falling back to an empty remote name list.`, - ).pipe(Effect.as({ code: 1, stdout: "", stderr: "" })), - ), - ); + if (localBranchResult.exitCode !== 0) { + const stderr = localBranchResult.stderr.trim(); + if (stderr.toLowerCase().includes("not a git repository")) { + return { + refs: [], + isRepo: false, + hasPrimaryRemote: false, + nextCursor: null, + totalCount: 0, + }; + } + return yield* createGitCommandError( + "GitVcsDriver.listRefs", + input.cwd, + ["branch", "--no-color", "--no-column"], + stderr || "git branch failed", + ); + } - const [defaultRef, worktreeList, remoteBranchResult, remoteNamesResult, branchLastCommit] = - yield* Effect.all( - [ - executeGit( - "GitCore.listBranches.defaultRef", - input.cwd, - ["symbolic-ref", "refs/remotes/origin/HEAD"], - { - timeoutMs: 5_000, - allowNonZeroExit: true, - }, - ), - executeGit( - "GitCore.listBranches.worktreeList", - input.cwd, - ["worktree", "list", "--porcelain"], - { - timeoutMs: 5_000, - allowNonZeroExit: true, - }, + const remoteBranchResultEffect = executeGit( + "GitVcsDriver.listRefs.remoteBranches", + input.cwd, + ["branch", "--no-color", "--no-column", "--remotes"], + { + timeoutMs: 10_000, + allowNonZeroExit: true, + }, + ).pipe( + Effect.catch((error) => + Effect.logWarning( + `GitVcsDriver.listRefs: remote refName lookup failed for ${input.cwd}: ${error.message}. Falling back to an empty remote refName list.`, + ).pipe( + Effect.as({ + exitCode: ChildProcessSpawner.ExitCode(1), + stdout: "", + stderr: "", + stdoutTruncated: false, + stderrTruncated: false, + } satisfies GitVcsDriver.ExecuteGitResult), ), - remoteBranchResultEffect, - remoteNamesResultEffect, - branchRecencyPromise, - ], - { concurrency: "unbounded" }, + ), ); - const remoteNames = - remoteNamesResult.code === 0 ? parseRemoteNames(remoteNamesResult.stdout) : []; - if (remoteBranchResult.code !== 0 && remoteBranchResult.stderr.trim().length > 0) { - yield* Effect.logWarning( - `GitCore.listBranches: remote branch lookup returned code ${remoteBranchResult.code} for ${input.cwd}: ${remoteBranchResult.stderr.trim()}. Falling back to an empty remote branch list.`, - ); - } - if (remoteNamesResult.code !== 0 && remoteNamesResult.stderr.trim().length > 0) { - yield* Effect.logWarning( - `GitCore.listBranches: remote name lookup returned code ${remoteNamesResult.code} for ${input.cwd}: ${remoteNamesResult.stderr.trim()}. Falling back to an empty remote name list.`, + const remoteNamesResultEffect = executeGit( + "GitVcsDriver.listRefs.remoteNames", + input.cwd, + ["remote"], + { + timeoutMs: 5_000, + allowNonZeroExit: true, + }, + ).pipe( + Effect.catch((error) => + Effect.logWarning( + `GitVcsDriver.listRefs: remote name lookup failed for ${input.cwd}: ${error.message}. Falling back to an empty remote name list.`, + ).pipe( + Effect.as({ + exitCode: ChildProcessSpawner.ExitCode(1), + stdout: "", + stderr: "", + stdoutTruncated: false, + stderrTruncated: false, + } satisfies GitVcsDriver.ExecuteGitResult), + ), + ), ); - } - const defaultBranch = - defaultRef.code === 0 - ? defaultRef.stdout.trim().replace(/^refs\/remotes\/origin\//, "") - : null; + const [defaultRef, worktreeList, remoteBranchResult, remoteNamesResult, branchLastCommit] = + yield* Effect.all( + [ + executeGit( + "GitVcsDriver.listRefs.defaultRef", + input.cwd, + ["symbolic-ref", "refs/remotes/origin/HEAD"], + { + timeoutMs: 5_000, + allowNonZeroExit: true, + }, + ), + executeGit( + "GitVcsDriver.listRefs.worktreeList", + input.cwd, + ["worktree", "list", "--porcelain"], + { + timeoutMs: 5_000, + allowNonZeroExit: true, + }, + ), + remoteBranchResultEffect, + remoteNamesResultEffect, + branchRecencyPromise, + ], + { concurrency: "unbounded" }, + ); - const worktreeMap = new Map(); - if (worktreeList.code === 0) { - let currentPath: string | null = null; - for (const line of worktreeList.stdout.split("\n")) { - if (line.startsWith("worktree ")) { - const candidatePath = line.slice("worktree ".length); - const exists = yield* fileSystem.stat(candidatePath).pipe( - Effect.map(() => true), - Effect.catch(() => Effect.succeed(false)), - ); - currentPath = exists ? candidatePath : null; - } else if (line.startsWith("branch refs/heads/") && currentPath) { - worktreeMap.set(line.slice("branch refs/heads/".length), currentPath); - } else if (line === "") { - currentPath = null; + const remoteNames = + remoteNamesResult.exitCode === 0 ? parseRemoteNames(remoteNamesResult.stdout) : []; + if (remoteBranchResult.exitCode !== 0 && remoteBranchResult.stderr.trim().length > 0) { + yield* Effect.logWarning( + `GitVcsDriver.listRefs: remote refName lookup returned code ${remoteBranchResult.exitCode} for ${input.cwd}: ${remoteBranchResult.stderr.trim()}. Falling back to an empty remote refName list.`, + ); + } + if (remoteNamesResult.exitCode !== 0 && remoteNamesResult.stderr.trim().length > 0) { + yield* Effect.logWarning( + `GitVcsDriver.listRefs: remote name lookup returned code ${remoteNamesResult.exitCode} for ${input.cwd}: ${remoteNamesResult.stderr.trim()}. Falling back to an empty remote name list.`, + ); + } + + const defaultBranch = + defaultRef.exitCode === 0 + ? defaultRef.stdout.trim().replace(/^refs\/remotes\/origin\//, "") + : null; + + const worktreeMap = new Map(); + if (worktreeList.exitCode === 0) { + let currentPath: string | null = null; + for (const line of worktreeList.stdout.split("\n")) { + if (line.startsWith("worktree ")) { + const candidatePath = line.slice("worktree ".length); + const exists = yield* fileSystem.stat(candidatePath).pipe( + Effect.map(() => true), + Effect.catch(() => Effect.succeed(false)), + ); + currentPath = exists ? candidatePath : null; + } else if (line.startsWith("branch refs/heads/") && currentPath) { + worktreeMap.set(line.slice("branch refs/heads/".length), currentPath); + } else if (line === "") { + currentPath = null; + } } } - } - const localBranches = localBranchResult.stdout - .split("\n") - .map(parseBranchLine) - .filter((branch): branch is { name: string; current: boolean } => branch !== null) - .map((branch) => ({ - name: branch.name, - current: branch.current, - isRemote: false, - isDefault: branch.name === defaultBranch, - worktreePath: worktreeMap.get(branch.name) ?? null, - })) - .toSorted((a, b) => { - const aPriority = a.current ? 0 : a.isDefault ? 1 : 2; - const bPriority = b.current ? 0 : b.isDefault ? 1 : 2; - if (aPriority !== bPriority) return aPriority - bPriority; - - const aLastCommit = branchLastCommit.get(a.name) ?? 0; - const bLastCommit = branchLastCommit.get(b.name) ?? 0; - if (aLastCommit !== bLastCommit) return bLastCommit - aLastCommit; - return a.name.localeCompare(b.name); + const localBranches = localBranchResult.stdout + .split("\n") + .map(parseBranchLine) + .filter((refName): refName is { name: string; current: boolean } => refName !== null) + .map((refName) => ({ + name: refName.name, + current: refName.current, + isRemote: false, + isDefault: refName.name === defaultBranch, + worktreePath: worktreeMap.get(refName.name) ?? null, + })) + .toSorted((a, b) => { + const aPriority = a.current ? 0 : a.isDefault ? 1 : 2; + const bPriority = b.current ? 0 : b.isDefault ? 1 : 2; + if (aPriority !== bPriority) return aPriority - bPriority; + + const aLastCommit = branchLastCommit.get(a.name) ?? 0; + const bLastCommit = branchLastCommit.get(b.name) ?? 0; + if (aLastCommit !== bLastCommit) return bLastCommit - aLastCommit; + return a.name.localeCompare(b.name); + }); + + const remoteBranches = + remoteBranchResult.exitCode === 0 + ? remoteBranchResult.stdout + .split("\n") + .map(parseBranchLine) + .filter((refName): refName is { name: string; current: boolean } => refName !== null) + .map((refName) => { + const parsedRemoteRef = parseRemoteRefWithRemoteNames(refName.name, remoteNames); + const remoteBranch: { + name: string; + current: boolean; + isRemote: boolean; + remoteName?: string; + isDefault: boolean; + worktreePath: string | null; + } = { + name: refName.name, + current: false, + isRemote: true, + isDefault: false, + worktreePath: null, + }; + if (parsedRemoteRef) { + remoteBranch.remoteName = parsedRemoteRef.remoteName; + } + return remoteBranch; + }) + .toSorted((a, b) => { + const aLastCommit = branchLastCommit.get(a.name) ?? 0; + const bLastCommit = branchLastCommit.get(b.name) ?? 0; + if (aLastCommit !== bLastCommit) return bLastCommit - aLastCommit; + return a.name.localeCompare(b.name); + }) + : []; + + const refs = paginateBranches({ + refs: filterBranchesForListQuery( + dedupeRemoteBranchesWithLocalMatches([...localBranches, ...remoteBranches]), + input.query, + ), + cursor: input.cursor, + limit: input.limit, }); - const remoteBranches = - remoteBranchResult.code === 0 - ? remoteBranchResult.stdout - .split("\n") - .map(parseBranchLine) - .filter((branch): branch is { name: string; current: boolean } => branch !== null) - .map((branch) => { - const parsedRemoteRef = parseRemoteRefWithRemoteNames(branch.name, remoteNames); - const remoteBranch: { - name: string; - current: boolean; - isRemote: boolean; - remoteName?: string; - isDefault: boolean; - worktreePath: string | null; - } = { - name: branch.name, - current: false, - isRemote: true, - isDefault: false, - worktreePath: null, - }; - if (parsedRemoteRef) { - remoteBranch.remoteName = parsedRemoteRef.remoteName; - } - return remoteBranch; - }) - .toSorted((a, b) => { - const aLastCommit = branchLastCommit.get(a.name) ?? 0; - const bLastCommit = branchLastCommit.get(b.name) ?? 0; - if (aLastCommit !== bLastCommit) return bLastCommit - aLastCommit; - return a.name.localeCompare(b.name); - }) - : []; - - const branches = paginateBranches({ - branches: filterBranchesForListQuery( - dedupeRemoteBranchesWithLocalMatches([...localBranches, ...remoteBranches]), - input.query, - ), - cursor: input.cursor, - limit: input.limit, + return { + refs: [...refs.refs], + isRepo: true, + hasPrimaryRemote: remoteNames.includes("origin"), + nextCursor: refs.nextCursor, + totalCount: refs.totalCount, + }; + }, + ); + + const createWorktree: GitVcsDriver.GitVcsDriverShape["createWorktree"] = Effect.fn( + "createWorktree", + )(function* (input) { + const targetBranch = input.newRefName ?? input.refName; + const sanitizedBranch = targetBranch.replace(/\//g, "-"); + const repoName = path.basename(input.cwd); + const worktreePath = input.path ?? path.join(worktreesDir, repoName, sanitizedBranch); + const args = input.newRefName + ? ["worktree", "add", "-b", input.newRefName, worktreePath, input.refName] + : ["worktree", "add", worktreePath, input.refName]; + + yield* executeGit("GitVcsDriver.createWorktree", input.cwd, args, { + fallbackErrorMessage: "git worktree add failed", }); return { - branches: [...branches.branches], - isRepo: true, - hasOriginRemote: remoteNames.includes("origin"), - nextCursor: branches.nextCursor, - totalCount: branches.totalCount, + worktree: { + path: worktreePath, + refName: targetBranch, + }, }; }); - const createWorktree: GitCoreShape["createWorktree"] = Effect.fn("createWorktree")( - function* (input) { - const targetBranch = input.newBranch ?? input.branch; - const sanitizedBranch = targetBranch.replace(/\//g, "-"); - const repoName = path.basename(input.cwd); - const worktreePath = input.path ?? path.join(worktreesDir, repoName, sanitizedBranch); - const args = input.newBranch - ? ["worktree", "add", "-b", input.newBranch, worktreePath, input.branch] - : ["worktree", "add", worktreePath, input.branch]; - - yield* executeGit("GitCore.createWorktree", input.cwd, args, { - fallbackErrorMessage: "git worktree add failed", - }); - - return { - worktree: { - path: worktreePath, - branch: targetBranch, + const fetchPullRequestBranch: GitVcsDriver.GitVcsDriverShape["fetchPullRequestBranch"] = + Effect.fn("fetchPullRequestBranch")(function* (input) { + const remoteName = yield* resolvePrimaryRemoteName(input.cwd); + yield* executeGit( + "GitVcsDriver.fetchPullRequestBranch", + input.cwd, + [ + "fetch", + "--quiet", + "--no-tags", + remoteName, + `+refs/pull/${input.prNumber}/head:refs/heads/${input.branch}`, + ], + { + fallbackErrorMessage: "git fetch pull request branch failed", }, - }; - }, - ); + ); + }); - const fetchPullRequestBranch: GitCoreShape["fetchPullRequestBranch"] = Effect.fn( - "fetchPullRequestBranch", + const fetchRemoteBranch: GitVcsDriver.GitVcsDriverShape["fetchRemoteBranch"] = Effect.fn( + "fetchRemoteBranch", )(function* (input) { - const remoteName = yield* resolvePrimaryRemoteName(input.cwd); - yield* executeGit( - "GitCore.fetchPullRequestBranch", + yield* runGit("GitVcsDriver.fetchRemoteBranch.fetch", input.cwd, [ + "fetch", + "--quiet", + "--no-tags", + input.remoteName, + `+refs/heads/${input.remoteBranch}:refs/remotes/${input.remoteName}/${input.remoteBranch}`, + ]); + + const localBranchAlreadyExists = yield* branchExists(input.cwd, input.localBranch); + const targetRef = `${input.remoteName}/${input.remoteBranch}`; + yield* runGit( + "GitVcsDriver.fetchRemoteBranch.materialize", input.cwd, - [ - "fetch", - "--quiet", - "--no-tags", - remoteName, - `+refs/pull/${input.prNumber}/head:refs/heads/${input.branch}`, - ], - { - fallbackErrorMessage: "git fetch pull request branch failed", - }, + localBranchAlreadyExists + ? ["branch", "--force", input.localBranch, targetRef] + : ["branch", input.localBranch, targetRef], ); }); - const fetchRemoteBranch: GitCoreShape["fetchRemoteBranch"] = Effect.fn("fetchRemoteBranch")( - function* (input) { - yield* runGit("GitCore.fetchRemoteBranch.fetch", input.cwd, [ + const fetchRemoteTrackingBranch: GitVcsDriver.GitVcsDriverShape["fetchRemoteTrackingBranch"] = + Effect.fn("fetchRemoteTrackingBranch")(function* (input) { + yield* runGit("GitVcsDriver.fetchRemoteTrackingBranch", input.cwd, [ "fetch", "--quiet", "--no-tags", input.remoteName, `+refs/heads/${input.remoteBranch}:refs/remotes/${input.remoteName}/${input.remoteBranch}`, ]); + }); - const localBranchAlreadyExists = yield* branchExists(input.cwd, input.localBranch); - const targetRef = `${input.remoteName}/${input.remoteBranch}`; - yield* runGit( - "GitCore.fetchRemoteBranch.materialize", - input.cwd, - localBranchAlreadyExists - ? ["branch", "--force", input.localBranch, targetRef] - : ["branch", input.localBranch, targetRef], - ); - }, - ); - - const setBranchUpstream: GitCoreShape["setBranchUpstream"] = (input) => - runGit("GitCore.setBranchUpstream", input.cwd, [ + const setBranchUpstream: GitVcsDriver.GitVcsDriverShape["setBranchUpstream"] = (input) => + runGit("GitVcsDriver.setBranchUpstream", input.cwd, [ "branch", "--set-upstream-to", `${input.remoteName}/${input.remoteBranch}`, input.branch, ]); - const removeWorktree: GitCoreShape["removeWorktree"] = Effect.fn("removeWorktree")( + const removeWorktree: GitVcsDriver.GitVcsDriverShape["removeWorktree"] = Effect.fn( + "removeWorktree", + )(function* (input) { + const args = ["worktree", "remove"]; + if (input.force) { + args.push("--force"); + } + args.push(input.path); + yield* executeGit("GitVcsDriver.removeWorktree", input.cwd, args, { + timeoutMs: 15_000, + fallbackErrorMessage: "git worktree remove failed", + }).pipe( + Effect.mapError((error) => + createGitCommandError( + "GitVcsDriver.removeWorktree", + input.cwd, + args, + `${commandLabel(args)} failed (cwd: ${input.cwd}): ${error.message}`, + error, + ), + ), + ); + }); + + const renameBranch: GitVcsDriver.GitVcsDriverShape["renameBranch"] = Effect.fn("renameBranch")( function* (input) { - const args = ["worktree", "remove"]; - if (input.force) { - args.push("--force"); + if (input.oldBranch === input.newBranch) { + return { branch: input.newBranch }; } - args.push(input.path); - yield* executeGit("GitCore.removeWorktree", input.cwd, args, { - timeoutMs: 15_000, - fallbackErrorMessage: "git worktree remove failed", - }).pipe( - Effect.mapError((error) => - createGitCommandError( - "GitCore.removeWorktree", - input.cwd, - args, - `${commandLabel(args)} failed (cwd: ${input.cwd}): ${error.message}`, - error, - ), - ), + const targetBranch = yield* resolveAvailableBranchName(input.cwd, input.newBranch); + + yield* executeGit( + "GitVcsDriver.renameBranch", + input.cwd, + ["branch", "-m", "--", input.oldBranch, targetBranch], + { + timeoutMs: 10_000, + fallbackErrorMessage: "git branch rename failed", + }, ); + + return { branch: targetBranch }; }, ); - const renameBranch: GitCoreShape["renameBranch"] = Effect.fn("renameBranch")(function* (input) { - if (input.oldBranch === input.newBranch) { - return { branch: input.newBranch }; - } - const targetBranch = yield* resolveAvailableBranchName(input.cwd, input.newBranch); - - yield* executeGit( - "GitCore.renameBranch", - input.cwd, - ["branch", "-m", "--", input.oldBranch, targetBranch], - { - timeoutMs: 10_000, - fallbackErrorMessage: "git branch rename failed", - }, - ); - - return { branch: targetBranch }; - }); - - const checkoutBranch: GitCoreShape["checkoutBranch"] = Effect.fn("checkoutBranch")( + const switchRef: GitVcsDriver.GitVcsDriverShape["switchRef"] = Effect.fn("switchRef")( function* (input) { const [localInputExists, remoteExists] = yield* Effect.all( [ executeGit( - "GitCore.checkoutBranch.localInputExists", + "GitVcsDriver.switchRef.localInputExists", input.cwd, - ["show-ref", "--verify", "--quiet", `refs/heads/${input.branch}`], + ["show-ref", "--verify", "--quiet", `refs/heads/${input.refName}`], { timeoutMs: 5_000, allowNonZeroExit: true, }, - ).pipe(Effect.map((result) => result.code === 0)), + ).pipe(Effect.map((result) => result.exitCode === 0)), executeGit( - "GitCore.checkoutBranch.remoteExists", + "GitVcsDriver.switchRef.remoteExists", input.cwd, - ["show-ref", "--verify", "--quiet", `refs/remotes/${input.branch}`], + ["show-ref", "--verify", "--quiet", `refs/remotes/${input.refName}`], { timeoutMs: 5_000, allowNonZeroExit: true, }, - ).pipe(Effect.map((result) => result.code === 0)), + ).pipe(Effect.map((result) => result.exitCode === 0)), ], { concurrency: "unbounded" }, ); const localTrackingBranch = remoteExists ? yield* executeGit( - "GitCore.checkoutBranch.localTrackingBranch", + "GitVcsDriver.switchRef.localTrackingBranch", input.cwd, ["for-each-ref", "--format=%(refname:short)\t%(upstream:short)", "refs/heads"], { @@ -2097,71 +2036,73 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { }, ).pipe( Effect.map((result) => - result.code === 0 - ? parseTrackingBranchByUpstreamRef(result.stdout, input.branch) + result.exitCode === 0 + ? parseTrackingBranchByUpstreamRef(result.stdout, input.refName) : null, ), ) : null; - const localTrackedBranchCandidate = deriveLocalBranchNameFromRemoteRef(input.branch); + const localTrackedBranchCandidate = deriveLocalBranchNameFromRemoteRef(input.refName); const localTrackedBranchTargetExists = remoteExists && localTrackedBranchCandidate ? yield* executeGit( - "GitCore.checkoutBranch.localTrackedBranchTargetExists", + "GitVcsDriver.switchRef.localTrackedBranchTargetExists", input.cwd, ["show-ref", "--verify", "--quiet", `refs/heads/${localTrackedBranchCandidate}`], { timeoutMs: 5_000, allowNonZeroExit: true, }, - ).pipe(Effect.map((result) => result.code === 0)) + ).pipe(Effect.map((result) => result.exitCode === 0)) : false; const checkoutArgs = localInputExists - ? ["checkout", input.branch] + ? ["checkout", input.refName] : remoteExists && !localTrackingBranch && localTrackedBranchTargetExists - ? ["checkout", input.branch] + ? ["checkout", input.refName] : remoteExists && !localTrackingBranch - ? ["checkout", "--track", input.branch] + ? ["checkout", "--track", input.refName] : remoteExists && localTrackingBranch ? ["checkout", localTrackingBranch] - : ["checkout", input.branch]; + : ["checkout", input.refName]; - yield* executeGit("GitCore.checkoutBranch.checkout", input.cwd, checkoutArgs, { + yield* executeGit("GitVcsDriver.switchRef.checkout", input.cwd, checkoutArgs, { timeoutMs: 10_000, fallbackErrorMessage: "git checkout failed", }); - const branch = yield* runGitStdout("GitCore.checkoutBranch.currentBranch", input.cwd, [ + const refName = yield* runGitStdout("GitVcsDriver.switchRef.currentBranch", input.cwd, [ "branch", "--show-current", ]).pipe(Effect.map((stdout) => stdout.trim() || null)); - return { branch }; + return { refName }; }, ); - const createBranch: GitCoreShape["createBranch"] = Effect.fn("createBranch")(function* (input) { - yield* executeGit("GitCore.createBranch", input.cwd, ["branch", input.branch], { - timeoutMs: 10_000, - fallbackErrorMessage: "git branch create failed", - }); - if (input.checkout) { - yield* checkoutBranch({ cwd: input.cwd, branch: input.branch }); - } + const createRef: GitVcsDriver.GitVcsDriverShape["createRef"] = Effect.fn("createRef")( + function* (input) { + yield* executeGit("GitVcsDriver.createRef", input.cwd, ["branch", input.refName], { + timeoutMs: 10_000, + fallbackErrorMessage: "git branch create failed", + }); + if (input.switchRef) { + yield* switchRef({ cwd: input.cwd, refName: input.refName }); + } - return { branch: input.branch }; - }); + return { refName: input.refName }; + }, + ); - const initRepo: GitCoreShape["initRepo"] = (input) => - executeGit("GitCore.initRepo", input.cwd, ["init"], { + const initRepo: GitVcsDriver.GitVcsDriverShape["initRepo"] = (input) => + executeGit("GitVcsDriver.initRepo", input.cwd, ["init"], { timeoutMs: 10_000, fallbackErrorMessage: "git init failed", }).pipe(Effect.asVoid); - const listLocalBranchNames: GitCoreShape["listLocalBranchNames"] = (cwd) => - runGitStdout("GitCore.listLocalBranchNames", cwd, [ + const listLocalBranchNames: GitVcsDriver.GitVcsDriverShape["listLocalBranchNames"] = (cwd) => + runGitStdout("GitVcsDriver.listLocalBranchNames", cwd, [ "branch", "--list", "--no-column", @@ -2175,7 +2116,7 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { ), ); - return { + return GitVcsDriver.GitVcsDriver.of({ execute, status, statusDetails, @@ -2186,22 +2127,19 @@ export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { pullCurrentBranch, readRangeContext, readConfigValue, - isInsideWorkTree, - listWorkspaceFiles, - filterIgnoredPaths, - listBranches, + listRefs, createWorktree, fetchPullRequestBranch, ensureRemote, + resolvePrimaryRemoteName, fetchRemoteBranch, + fetchRemoteTrackingBranch, setBranchUpstream, removeWorktree, renameBranch, - createBranch, - checkoutBranch, + createRef, + switchRef, initRepo, listLocalBranchNames, - } satisfies GitCoreShape; + }); }); - -export const GitCoreLive = Layer.effect(GitCore, makeGitCore()); diff --git a/apps/server/src/vcs/VcsDriver.ts b/apps/server/src/vcs/VcsDriver.ts new file mode 100644 index 00000000000..ae09d840f88 --- /dev/null +++ b/apps/server/src/vcs/VcsDriver.ts @@ -0,0 +1,31 @@ +import { Context, type Effect } from "effect"; + +import type { + VcsDriverCapabilities, + VcsError, + VcsInitInput, + VcsListRemotesResult, + VcsListWorkspaceFilesResult, + VcsRepositoryIdentity, +} from "@t3tools/contracts"; +import * as VcsProcess from "./VcsProcess.ts"; + +export interface VcsDriverShape { + readonly capabilities: VcsDriverCapabilities; + readonly execute: ( + input: Omit, + ) => Effect.Effect; + readonly detectRepository: (cwd: string) => Effect.Effect; + readonly isInsideWorkTree: (cwd: string) => Effect.Effect; + readonly listWorkspaceFiles: ( + cwd: string, + ) => Effect.Effect; + readonly listRemotes: (cwd: string) => Effect.Effect; + readonly filterIgnoredPaths: ( + cwd: string, + relativePaths: ReadonlyArray, + ) => Effect.Effect, VcsError>; + readonly initRepository: (input: VcsInitInput) => Effect.Effect; +} + +export class VcsDriver extends Context.Service()("t3/vcs/VcsDriver") {} diff --git a/apps/server/src/vcs/VcsDriverRegistry.test.ts b/apps/server/src/vcs/VcsDriverRegistry.test.ts new file mode 100644 index 00000000000..b9330c885a8 --- /dev/null +++ b/apps/server/src/vcs/VcsDriverRegistry.test.ts @@ -0,0 +1,86 @@ +import { assert, it, describe } from "@effect/vitest"; +import { Effect, Layer } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; + +import * as VcsProcess from "./VcsProcess.ts"; +import * as VcsProjectConfig from "./VcsProjectConfig.ts"; +import * as VcsDriverRegistry from "./VcsDriverRegistry.ts"; + +const processOutput = (stdout: string): VcsProcess.VcsProcessOutput => ({ + exitCode: ChildProcessSpawner.ExitCode(0), + stdout, + stderr: "", + stdoutTruncated: false, + stderrTruncated: false, +}); + +describe("VcsDriverRegistry", () => { + it.effect("routes directly by VCS driver kind for non-repository workflows", () => { + const layer = Layer.effect(VcsDriverRegistry.VcsDriverRegistry, VcsDriverRegistry.make()).pipe( + Layer.provide( + Layer.mock(VcsProjectConfig.VcsProjectConfig)({ + resolveKind: (input) => Effect.succeed(input.requestedKind ?? "auto"), + }), + ), + Layer.provide( + Layer.mock(VcsProcess.VcsProcess)({ + run: () => Effect.succeed(processOutput("")), + }), + ), + ); + + return Effect.gen(function* () { + const registry = yield* VcsDriverRegistry.VcsDriverRegistry; + const driver = yield* registry.get("git"); + + assert.strictEqual(driver.capabilities.kind, "git"); + }).pipe(Effect.provide(layer)); + }); + + it.effect("caches repository detection for repeated resolves in the same cwd and kind", () => { + const calls: VcsProcess.VcsProcessInput[] = []; + const layer = Layer.effect(VcsDriverRegistry.VcsDriverRegistry, VcsDriverRegistry.make()).pipe( + Layer.provide( + Layer.mock(VcsProjectConfig.VcsProjectConfig)({ + resolveKind: (input) => Effect.succeed(input.requestedKind ?? "auto"), + }), + ), + Layer.provide( + Layer.mock(VcsProcess.VcsProcess)({ + run: (input) => + Effect.sync(() => { + calls.push(input); + const command = input.args.join(" "); + if (command === "rev-parse --is-inside-work-tree") { + return processOutput("true\n"); + } + if (command === "rev-parse --show-toplevel") { + return processOutput("/repo\n"); + } + if (command === "rev-parse --git-common-dir") { + return processOutput("/repo/.git\n"); + } + return processOutput(""); + }), + }), + ), + ); + + return Effect.gen(function* () { + const registry = yield* VcsDriverRegistry.VcsDriverRegistry; + const first = yield* registry.resolve({ cwd: "/repo", requestedKind: "git" }); + const second = yield* registry.resolve({ cwd: "/repo", requestedKind: "git" }); + + assert.equal(first.repository.rootPath, "/repo"); + assert.equal(second.repository.rootPath, "/repo"); + assert.deepStrictEqual( + calls.map((call) => call.args.join(" ")), + [ + "rev-parse --is-inside-work-tree", + "rev-parse --show-toplevel", + "rev-parse --git-common-dir", + ], + ); + }).pipe(Effect.provide(layer)); + }); +}); diff --git a/apps/server/src/vcs/VcsDriverRegistry.ts b/apps/server/src/vcs/VcsDriverRegistry.ts new file mode 100644 index 00000000000..b29fa43ed8e --- /dev/null +++ b/apps/server/src/vcs/VcsDriverRegistry.ts @@ -0,0 +1,155 @@ +import { Cache, Context, Duration, Effect, Exit, Layer } from "effect"; + +import type { VcsDriverKind, VcsError, VcsRepositoryIdentity } from "@t3tools/contracts"; +import { VcsUnsupportedOperationError } from "@t3tools/contracts"; +import * as GitVcsDriver from "./GitVcsDriver.ts"; +import * as VcsProjectConfig from "./VcsProjectConfig.ts"; +import * as VcsDriver from "./VcsDriver.ts"; + +const DETECTION_CACHE_CAPACITY = 2_048; +const DETECTION_CACHE_TTL = Duration.seconds(2); + +export interface VcsDriverResolveInput { + readonly cwd: string; + readonly requestedKind?: VcsDriverKind | "auto"; +} + +export interface VcsDriverHandle { + readonly kind: VcsDriverKind; + readonly repository: VcsRepositoryIdentity; + readonly driver: VcsDriver.VcsDriverShape; +} + +export interface VcsDriverRegistryShape { + readonly get: (kind: VcsDriverKind) => Effect.Effect; + readonly detect: ( + input: VcsDriverResolveInput, + ) => Effect.Effect; + readonly resolve: (input: VcsDriverResolveInput) => Effect.Effect; +} + +export class VcsDriverRegistry extends Context.Service()( + "t3/vcs/VcsDriverRegistry", +) {} + +const unsupported = (operation: string, kind: VcsDriverKind, detail: string) => + new VcsUnsupportedOperationError({ + operation, + kind, + detail, + }); + +function detectionCacheKey(input: { + readonly cwd: string; + readonly requestedKind: VcsDriverKind | "auto"; +}): string { + return `${input.requestedKind}\0${input.cwd}`; +} + +function parseDetectionCacheKey(key: string): { + readonly cwd: string; + readonly requestedKind: VcsDriverKind | "auto"; +} { + const separatorIndex = key.indexOf("\0"); + if (separatorIndex === -1) { + return { + cwd: key, + requestedKind: "auto", + }; + } + return { + requestedKind: key.slice(0, separatorIndex) as VcsDriverKind | "auto", + cwd: key.slice(separatorIndex + 1), + }; +} + +export const make = Effect.fn("makeVcsDriverRegistry")(function* () { + const projectConfig = yield* VcsProjectConfig.VcsProjectConfig; + const git = yield* GitVcsDriver.makeVcsDriverShape(); + const drivers: Partial> = { + git, + }; + + const get: VcsDriverRegistryShape["get"] = (kind) => { + const driver = drivers[kind]; + if (!driver) { + return Effect.fail( + unsupported("VcsDriverRegistry.get", kind, `No ${kind} VCS driver is registered.`), + ); + } + return Effect.succeed(driver); + }; + + const detectWithDriver = Effect.fn("VcsDriverRegistry.detectWithDriver")(function* ( + kind: VcsDriverKind, + driver: VcsDriver.VcsDriverShape, + cwd: string, + ) { + const repository = yield* driver.detectRepository(cwd); + if (!repository) { + return null; + } + return { + kind, + repository, + driver, + } satisfies VcsDriverHandle; + }); + + const detectResolvedKind = Effect.fn("VcsDriverRegistry.detectResolvedKind")(function* (input: { + readonly cwd: string; + readonly requestedKind: VcsDriverKind | "auto"; + }) { + const requestedKind = input.requestedKind; + + if (requestedKind !== "auto" && requestedKind !== "unknown") { + const driver = yield* get(requestedKind); + return yield* detectWithDriver(requestedKind, driver, input.cwd); + } + + return yield* detectWithDriver("git", git, input.cwd); + }); + + const detectionCache = yield* Cache.makeWith( + (key) => detectResolvedKind(parseDetectionCacheKey(key)), + { + capacity: DETECTION_CACHE_CAPACITY, + timeToLive: (exit) => (Exit.isSuccess(exit) ? DETECTION_CACHE_TTL : Duration.zero), + }, + ); + + const detect: VcsDriverRegistryShape["detect"] = Effect.fn("VcsDriverRegistry.detect")( + function* (input) { + const requestedKind = yield* projectConfig.resolveKind(input); + return yield* Cache.get(detectionCache, detectionCacheKey({ cwd: input.cwd, requestedKind })); + }, + ); + + const resolve: VcsDriverRegistryShape["resolve"] = Effect.fn("VcsDriverRegistry.resolve")( + function* (input) { + const detected = yield* detect(input); + if (detected) { + return detected; + } + + const requestedKind = input.requestedKind ?? "auto"; + return yield* unsupported( + "VcsDriverRegistry.resolve", + requestedKind === "auto" ? "unknown" : requestedKind, + requestedKind === "auto" + ? `No supported VCS repository was detected at ${input.cwd}.` + : `No ${requestedKind} repository was detected at ${input.cwd}.`, + ); + }, + ); + + return VcsDriverRegistry.of({ + get, + detect, + resolve, + }); +}); + +export const layer = Layer.effect(VcsDriverRegistry, make()).pipe( + Layer.provide(VcsProjectConfig.layer), +); diff --git a/apps/server/src/vcs/VcsProcess.ts b/apps/server/src/vcs/VcsProcess.ts new file mode 100644 index 00000000000..33e03a2551d --- /dev/null +++ b/apps/server/src/vcs/VcsProcess.ts @@ -0,0 +1,259 @@ +import { Duration, Context, Effect, Layer, Option, PlatformError, Sink, Stream } from "effect"; +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; + +import { + VcsOutputDecodeError, + type VcsError, + VcsProcessExitError, + VcsProcessSpawnError, + VcsProcessTimeoutError, +} from "@t3tools/contracts"; + +export interface VcsProcessInput { + readonly operation: string; + readonly command: string; + readonly args: ReadonlyArray; + readonly cwd: string; + readonly stdin?: string; + readonly env?: NodeJS.ProcessEnv; + readonly allowNonZeroExit?: boolean; + readonly timeoutMs?: number; + readonly maxOutputBytes?: number; + readonly truncateOutputAtMaxBytes?: boolean; +} + +export interface VcsProcessOutput { + readonly exitCode: ChildProcessSpawner.ExitCode; + readonly stdout: string; + readonly stderr: string; + readonly stdoutTruncated: boolean; + readonly stderrTruncated: boolean; +} + +export interface VcsProcessCollectedText { + readonly text: string; + readonly truncated: boolean; +} + +export interface VcsProcessHandle { + readonly pid: ChildProcessSpawner.ProcessId; + readonly stdin: Sink.Sink; + readonly stdout: Stream.Stream; + readonly stderr: Stream.Stream; + readonly exitCode: Effect.Effect; + readonly writeStdin: (input: string) => Effect.Effect; +} + +export interface VcsProcessShape { + readonly withProcess: ( + input: VcsProcessInput, + use: (handle: VcsProcessHandle) => Effect.Effect, + ) => Effect.Effect; + readonly run: (input: VcsProcessInput) => Effect.Effect; +} + +export class VcsProcess extends Context.Service()( + "t3/vcs/VcsProcess", +) {} + +const DEFAULT_TIMEOUT_MS = 30_000; +const DEFAULT_MAX_OUTPUT_BYTES = 1_000_000; +const OUTPUT_TRUNCATED_MARKER = "\n\n[truncated]"; + +function commandLabel(command: string, args: ReadonlyArray): string { + return [command, ...args].join(" "); +} + +function outputDecodeError( + input: VcsProcessInput, + detail: string, + cause: unknown, +): VcsOutputDecodeError { + return new VcsOutputDecodeError({ + operation: input.operation, + command: commandLabel(input.command, input.args), + cwd: input.cwd, + detail, + cause, + }); +} + +export const collectText = Effect.fn("VcsProcess.collectText")(function* (input: { + readonly operation: string; + readonly command: string; + readonly cwd: string; + readonly stream: Stream.Stream; + readonly maxOutputBytes?: number; + readonly truncateOutputAtMaxBytes?: boolean; +}) { + const decoder = new TextDecoder(); + let text = ""; + let bytes = 0; + let truncated = false; + const maxOutputBytes = input.maxOutputBytes ?? DEFAULT_MAX_OUTPUT_BYTES; + const truncateOutputAtMaxBytes = input.truncateOutputAtMaxBytes ?? false; + + yield* Stream.runForEach(input.stream, (chunk) => + Effect.sync(() => { + if (truncated) return; + + const remainingBytes = maxOutputBytes - bytes; + if (remainingBytes <= 0) { + truncated = true; + if (truncateOutputAtMaxBytes) { + text += OUTPUT_TRUNCATED_MARKER; + } + return; + } + + const nextChunk = chunk.byteLength > remainingBytes ? chunk.slice(0, remainingBytes) : chunk; + text += decoder.decode(nextChunk, { stream: true }); + bytes += nextChunk.byteLength; + + if (chunk.byteLength > remainingBytes) { + truncated = true; + if (truncateOutputAtMaxBytes) { + text += OUTPUT_TRUNCATED_MARKER; + } + } + }), + ); + + if (!truncated) { + text += decoder.decode(); + } + + return { text, truncated } satisfies VcsProcessCollectedText; +}); + +export const make = Effect.fn("makeVcsProcess")(function* () { + const spawner = yield* ChildProcessSpawner.ChildProcessSpawner; + + const spawn = Effect.fn("VcsProcess.spawn")(function* (input: VcsProcessInput) { + const label = commandLabel(input.command, input.args); + const child = yield* spawner + .spawn( + ChildProcess.make(input.command, [...input.args], { + cwd: input.cwd, + env: { + ...process.env, + ...input.env, + }, + }), + ) + .pipe( + Effect.mapError( + (cause) => + new VcsProcessSpawnError({ + operation: input.operation, + command: label, + cwd: input.cwd, + cause, + }), + ), + ); + yield* Effect.addFinalizer(() => child.kill().pipe(Effect.ignore)); + + const mapStreamError = (streamName: "stdout" | "stderr") => + Stream.mapError((cause: PlatformError.PlatformError) => + outputDecodeError(input, `failed to read process ${streamName}`, cause), + ); + const mapEffectError = (detail: string) => + Effect.mapError((cause: PlatformError.PlatformError) => + outputDecodeError(input, detail, cause), + ); + const writeStdin = (stdin: string) => + Stream.run(Stream.encodeText(Stream.make(stdin)), child.stdin).pipe( + mapEffectError("failed to write process stdin"), + ); + + return { + pid: child.pid, + stdin: child.stdin.pipe( + Sink.mapError((cause) => outputDecodeError(input, "failed to write process stdin", cause)), + ), + stdout: child.stdout.pipe(mapStreamError("stdout")), + stderr: child.stderr.pipe(mapStreamError("stderr")), + exitCode: child.exitCode.pipe(mapEffectError("failed to read process exit code")), + writeStdin, + } satisfies VcsProcessHandle; + }); + + const withProcess: VcsProcessShape["withProcess"] = (input, use) => + Effect.scoped(spawn(input).pipe(Effect.flatMap(use))); + + const run = Effect.fn("VcsProcess.run")(function* (input: VcsProcessInput) { + const timeoutMs = input.timeoutMs ?? DEFAULT_TIMEOUT_MS; + const maxOutputBytes = input.maxOutputBytes ?? DEFAULT_MAX_OUTPUT_BYTES; + const label = commandLabel(input.command, input.args); + + const runProcess = Effect.gen(function* () { + const [stdout, stderr, exitCode] = yield* withProcess(input, (child) => + Effect.all( + [ + collectText({ + operation: input.operation, + command: label, + cwd: input.cwd, + stream: child.stdout, + maxOutputBytes, + truncateOutputAtMaxBytes: input.truncateOutputAtMaxBytes ?? false, + }), + collectText({ + operation: input.operation, + command: label, + cwd: input.cwd, + stream: child.stderr, + maxOutputBytes, + truncateOutputAtMaxBytes: input.truncateOutputAtMaxBytes ?? false, + }), + child.exitCode, + input.stdin === undefined ? Effect.void : child.writeStdin(input.stdin), + ], + { concurrency: "unbounded" }, + ), + ).pipe(Effect.map(([stdout, stderr, exitCode]) => [stdout, stderr, exitCode] as const)); + + if (!input.allowNonZeroExit && exitCode !== 0) { + return yield* new VcsProcessExitError({ + operation: input.operation, + command: label, + cwd: input.cwd, + exitCode, + detail: stderr.text.trim() || `${label} exited with code ${exitCode}.`, + }); + } + + return { + exitCode, + stdout: stdout.text, + stderr: stderr.text, + stdoutTruncated: stdout.truncated, + stderrTruncated: stderr.truncated, + } satisfies VcsProcessOutput; + }); + + return yield* runProcess.pipe( + Effect.scoped, + Effect.timeoutOption(Duration.millis(timeoutMs)), + Effect.flatMap((result) => + Option.match(result, { + onSome: Effect.succeed, + onNone: () => + Effect.fail( + new VcsProcessTimeoutError({ + operation: input.operation, + command: label, + cwd: input.cwd, + timeoutMs, + }), + ), + }), + ), + ); + }); + + return VcsProcess.of({ withProcess, run }); +}); + +export const layer = Layer.effect(VcsProcess, make()); diff --git a/apps/server/src/vcs/VcsProjectConfig.test.ts b/apps/server/src/vcs/VcsProjectConfig.test.ts new file mode 100644 index 00000000000..b08b5716732 --- /dev/null +++ b/apps/server/src/vcs/VcsProjectConfig.test.ts @@ -0,0 +1,66 @@ +import { assert, it, describe } from "@effect/vitest"; +import * as NodeServices from "@effect/platform-node/NodeServices"; +import { Effect, FileSystem, Layer, Path } from "effect"; + +import * as VcsProjectConfig from "./VcsProjectConfig.ts"; + +const TestLayer = VcsProjectConfig.layer.pipe( + Layer.provide(NodeServices.layer), + Layer.provideMerge(NodeServices.layer), +); + +describe("VcsProjectConfig", () => { + it.layer(TestLayer)("uses an explicit requested VCS kind before config", (it) => { + it.effect("returns the requested kind", () => + Effect.gen(function* () { + const config = yield* VcsProjectConfig.VcsProjectConfig; + const kind = yield* config.resolveKind({ + cwd: "/repo", + requestedKind: "jj", + }); + + assert.equal(kind, "jj"); + }), + ); + }); + + it.layer(TestLayer)("discovers .t3code/vcs.json from nested workspaces", (it) => { + it.effect("returns the configured kind", () => + Effect.gen(function* () { + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const root = yield* fileSystem.makeTempDirectoryScoped({ + prefix: "t3-vcs-config-test-", + }); + const configDir = path.join(root, ".t3code"); + const nested = path.join(root, "packages", "app"); + yield* fileSystem.makeDirectory(configDir, { recursive: true }); + yield* fileSystem.makeDirectory(nested, { recursive: true }); + yield* fileSystem.writeFileString( + path.join(configDir, "vcs.json"), + JSON.stringify({ vcs: { kind: "jj" } }), + ); + + const config = yield* VcsProjectConfig.VcsProjectConfig; + const kind = yield* config.resolveKind({ cwd: nested }); + + assert.equal(kind, "jj"); + }), + ); + }); + + it.layer(TestLayer)("falls back to auto when no config exists", (it) => { + it.effect("returns auto", () => + Effect.gen(function* () { + const fileSystem = yield* FileSystem.FileSystem; + const root = yield* fileSystem.makeTempDirectoryScoped({ + prefix: "t3-vcs-config-test-", + }); + const config = yield* VcsProjectConfig.VcsProjectConfig; + const kind = yield* config.resolveKind({ cwd: root }); + + assert.equal(kind, "auto"); + }), + ); + }); +}); diff --git a/apps/server/src/vcs/VcsProjectConfig.ts b/apps/server/src/vcs/VcsProjectConfig.ts new file mode 100644 index 00000000000..6b4ba008d4e --- /dev/null +++ b/apps/server/src/vcs/VcsProjectConfig.ts @@ -0,0 +1,117 @@ +import { Context, Effect, FileSystem, Layer, Path, Schema } from "effect"; + +import { VcsDriverKind, type VcsDriverKind as VcsDriverKindType } from "@t3tools/contracts"; + +const ProjectVcsConfig = Schema.Struct({ + vcs: Schema.optional( + Schema.Struct({ + kind: Schema.optional(VcsDriverKind), + }), + ), + vcsKind: Schema.optional(VcsDriverKind), +}); + +interface ProjectVcsConfigFile { + readonly vcs?: + | { + readonly kind?: VcsDriverKindType | undefined; + } + | undefined; + readonly vcsKind?: VcsDriverKindType | undefined; +} + +export interface VcsProjectConfigResolveInput { + readonly cwd: string; + readonly requestedKind?: VcsDriverKindType | "auto"; +} + +export interface VcsProjectConfigShape { + readonly resolveKind: ( + input: VcsProjectConfigResolveInput, + ) => Effect.Effect; +} + +export class VcsProjectConfig extends Context.Service()( + "t3/vcs/VcsProjectConfig", +) {} + +function configuredKind(config: ProjectVcsConfigFile): VcsDriverKindType | "auto" { + return config.vcs?.kind ?? config.vcsKind ?? "auto"; +} + +function parseConfig(raw: string): ProjectVcsConfigFile | null { + try { + const parsed = JSON.parse(raw) as unknown; + return Schema.is(ProjectVcsConfig)(parsed) ? parsed : null; + } catch { + return null; + } +} + +export const make = Effect.fn("makeVcsProjectConfig")(function* () { + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + + const findConfigPath = Effect.fn("VcsProjectConfig.findConfigPath")(function* (cwd: string) { + let current = cwd; + while (true) { + const candidate = path.join(current, ".t3code", "vcs.json"); + if (yield* fileSystem.exists(candidate).pipe(Effect.orElseSucceed(() => false))) { + return candidate; + } + + const parent = path.dirname(current); + if (parent === current) { + return null; + } + current = parent; + } + }); + + const readConfiguredKind = Effect.fn("VcsProjectConfig.readConfiguredKind")(function* ( + configPath: string, + ) { + const raw = yield* fileSystem.readFileString(configPath).pipe( + Effect.catch((error) => + Effect.logWarning("failed to read VCS project config", { + configPath, + error, + }).pipe(Effect.as(null)), + ), + ); + if (raw === null) { + return "auto" as const; + } + + const parsed = parseConfig(raw); + if (parsed === null) { + yield* Effect.logWarning("invalid VCS project config", { + configPath, + }); + return "auto" as const; + } + + return configuredKind(parsed); + }); + + const resolveKind: VcsProjectConfigShape["resolveKind"] = Effect.fn( + "VcsProjectConfig.resolveKind", + )(function* (input) { + if (input.requestedKind !== undefined && input.requestedKind !== "auto") { + return input.requestedKind; + } + + const configPath = yield* findConfigPath(input.cwd); + if (configPath === null) { + return "auto"; + } + + return yield* readConfiguredKind(configPath); + }); + + return VcsProjectConfig.of({ + resolveKind, + }); +}); + +export const layer = Layer.effect(VcsProjectConfig, make()); diff --git a/apps/server/src/vcs/VcsProvisioningService.test.ts b/apps/server/src/vcs/VcsProvisioningService.test.ts new file mode 100644 index 00000000000..b26f331a3c7 --- /dev/null +++ b/apps/server/src/vcs/VcsProvisioningService.test.ts @@ -0,0 +1,94 @@ +import { assert, it } from "@effect/vitest"; +import { DateTime, Effect, Layer, Option } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; + +import * as VcsDriver from "./VcsDriver.ts"; +import * as VcsDriverRegistry from "./VcsDriverRegistry.ts"; +import * as VcsProvisioningService from "./VcsProvisioningService.ts"; + +const TEST_EPOCH = DateTime.makeUnsafe("1970-01-01T00:00:00.000Z"); + +function makeDriver(calls: string[]): VcsDriver.VcsDriverShape { + return { + capabilities: { + kind: "git", + supportsWorktrees: true, + supportsBookmarks: false, + supportsAtomicSnapshot: false, + supportsPushDefaultRemote: true, + ignoreClassifier: "native", + }, + execute: () => + Effect.succeed({ + exitCode: ChildProcessSpawner.ExitCode(0), + stdout: "", + stderr: "", + stdoutTruncated: false, + stderrTruncated: false, + }), + detectRepository: () => Effect.succeed(null), + isInsideWorkTree: () => Effect.succeed(false), + listWorkspaceFiles: () => + Effect.succeed({ + paths: [], + truncated: false, + freshness: { + source: "live-local", + observedAt: TEST_EPOCH, + expiresAt: Option.none(), + }, + }), + listRemotes: () => + Effect.succeed({ + remotes: [], + freshness: { + source: "live-local", + observedAt: TEST_EPOCH, + expiresAt: Option.none(), + }, + }), + filterIgnoredPaths: (_cwd, relativePaths) => Effect.succeed(relativePaths), + initRepository: (input) => + Effect.sync(() => { + calls.push(`${input.kind ?? "default"}:${input.cwd}`); + }), + }; +} + +it.effect("routes repository initialization through an explicit VCS driver kind", () => { + const calls: string[] = []; + const driver = makeDriver(calls); + const testLayer = VcsProvisioningService.layer.pipe( + Layer.provide( + Layer.mock(VcsDriverRegistry.VcsDriverRegistry)({ + get: (kind) => (kind === "git" ? Effect.succeed(driver) : Effect.die("unexpected kind")), + }), + ), + ); + + return Effect.gen(function* () { + const provisioning = yield* VcsProvisioningService.VcsProvisioningService; + yield* provisioning.initRepository({ cwd: "/repo", kind: "git" }); + + assert.deepStrictEqual(calls, ["git:/repo"]); + }).pipe(Effect.provide(testLayer)); +}); + +it.effect("defaults repository initialization to Git until callers choose a VCS kind", () => { + const calls: string[] = []; + const driver = makeDriver(calls); + const testLayer = VcsProvisioningService.layer.pipe( + Layer.provide( + Layer.mock(VcsDriverRegistry.VcsDriverRegistry)({ + get: (kind) => (kind === "git" ? Effect.succeed(driver) : Effect.die("unexpected kind")), + }), + ), + ); + + return Effect.gen(function* () { + const provisioning = yield* VcsProvisioningService.VcsProvisioningService; + yield* provisioning.initRepository({ cwd: "/repo" }); + + assert.deepStrictEqual(calls, ["default:/repo"]); + }).pipe(Effect.provide(testLayer)); +}); diff --git a/apps/server/src/vcs/VcsProvisioningService.ts b/apps/server/src/vcs/VcsProvisioningService.ts new file mode 100644 index 00000000000..9f8f822f2a2 --- /dev/null +++ b/apps/server/src/vcs/VcsProvisioningService.ts @@ -0,0 +1,54 @@ +import { Context, Effect, Layer } from "effect"; + +import { + type VcsDriverKind, + type VcsError, + type VcsInitInput, + VcsUnsupportedOperationError, +} from "@t3tools/contracts"; +import * as VcsDriverRegistry from "./VcsDriverRegistry.ts"; + +export interface VcsProvisioningServiceShape { + readonly initRepository: (input: VcsInitInput) => Effect.Effect; +} + +export class VcsProvisioningService extends Context.Service< + VcsProvisioningService, + VcsProvisioningServiceShape +>()("t3/vcs/VcsProvisioningService") {} + +function resolveRequestedKind( + kind: VcsDriverKind | undefined, +): Effect.Effect { + if (kind === undefined) { + return Effect.succeed("git"); + } + if (kind === "unknown") { + return Effect.fail( + new VcsUnsupportedOperationError({ + operation: "VcsProvisioningService.resolveRequestedKind", + kind, + detail: "A concrete VCS driver kind is required for repository provisioning.", + }), + ); + } + return Effect.succeed(kind); +} + +export const make = Effect.fn("makeVcsProvisioningService")(function* () { + const registry = yield* VcsDriverRegistry.VcsDriverRegistry; + + const initRepository: VcsProvisioningServiceShape["initRepository"] = Effect.fn( + "VcsProvisioningService.initRepository", + )(function* (input) { + const kind = yield* resolveRequestedKind(input.kind); + const driver = yield* registry.get(kind); + return yield* driver.initRepository(input); + }); + + return VcsProvisioningService.of({ + initRepository, + }); +}); + +export const layer = Layer.effect(VcsProvisioningService, make()); diff --git a/apps/server/src/git/Layers/GitStatusBroadcaster.test.ts b/apps/server/src/vcs/VcsStatusBroadcaster.test.ts similarity index 61% rename from apps/server/src/git/Layers/GitStatusBroadcaster.test.ts rename to apps/server/src/vcs/VcsStatusBroadcaster.test.ts index 72a0c24e27b..ca7cccf303c 100644 --- a/apps/server/src/git/Layers/GitStatusBroadcaster.test.ts +++ b/apps/server/src/vcs/VcsStatusBroadcaster.test.ts @@ -1,83 +1,78 @@ -import { assert, it } from "@effect/vitest"; -import { Deferred, Effect, Exit, Layer, Option, Scope, Stream } from "effect"; +import { assert, it, describe } from "@effect/vitest"; +import * as NodeServices from "@effect/platform-node/NodeServices"; +import { Deferred, Effect, Exit, FileSystem, Layer, Option, Path, Scope, Stream } from "effect"; import type { - GitStatusLocalResult, - GitStatusRemoteResult, - GitStatusResult, - GitStatusStreamEvent, + VcsStatusLocalResult, + VcsStatusRemoteResult, + VcsStatusResult, + VcsStatusStreamEvent, } from "@t3tools/contracts"; -import { describe } from "vitest"; -import { GitStatusBroadcaster } from "../Services/GitStatusBroadcaster.ts"; -import { GitStatusBroadcasterLive } from "./GitStatusBroadcaster.ts"; -import { type GitManagerShape, GitManager } from "../Services/GitManager.ts"; +import * as VcsStatusBroadcaster from "./VcsStatusBroadcaster.ts"; +import * as GitWorkflowService from "../git/GitWorkflowService.ts"; -const baseLocalStatus: GitStatusLocalResult = { +const baseLocalStatus: VcsStatusLocalResult = { isRepo: true, - hostingProvider: { + sourceControlProvider: { kind: "github", name: "GitHub", baseUrl: "https://github.com", }, - hasOriginRemote: true, - isDefaultBranch: false, - branch: "feature/status-broadcast", + hasPrimaryRemote: true, + isDefaultRef: false, + refName: "feature/status-broadcast", hasWorkingTreeChanges: false, workingTree: { files: [], insertions: 0, deletions: 0 }, }; -const baseRemoteStatus: GitStatusRemoteResult = { +const baseRemoteStatus: VcsStatusRemoteResult = { hasUpstream: true, aheadCount: 0, behindCount: 0, pr: null, }; -const baseStatus: GitStatusResult = { +const baseStatus: VcsStatusResult = { ...baseLocalStatus, ...baseRemoteStatus, }; function makeTestLayer(state: { - currentLocalStatus: GitStatusLocalResult; - currentRemoteStatus: GitStatusRemoteResult | null; + currentLocalStatus: VcsStatusLocalResult; + currentRemoteStatus: VcsStatusRemoteResult | null; localStatusCalls: number; remoteStatusCalls: number; localInvalidationCalls: number; remoteInvalidationCalls: number; }) { - const gitManager: GitManagerShape = { - localStatus: () => - Effect.sync(() => { - state.localStatusCalls += 1; - return state.currentLocalStatus; + return VcsStatusBroadcaster.layer.pipe( + Layer.provide( + Layer.mock(GitWorkflowService.GitWorkflowService)({ + localStatus: () => + Effect.sync(() => { + state.localStatusCalls += 1; + return state.currentLocalStatus; + }), + remoteStatus: () => + Effect.sync(() => { + state.remoteStatusCalls += 1; + return state.currentRemoteStatus; + }), + invalidateLocalStatus: () => + Effect.sync(() => { + state.localInvalidationCalls += 1; + }), + invalidateRemoteStatus: () => + Effect.sync(() => { + state.remoteInvalidationCalls += 1; + }), }), - remoteStatus: () => - Effect.sync(() => { - state.remoteStatusCalls += 1; - return state.currentRemoteStatus; - }), - status: () => Effect.die("status should not be called in this test"), - invalidateLocalStatus: () => - Effect.sync(() => { - state.localInvalidationCalls += 1; - }), - invalidateRemoteStatus: () => - Effect.sync(() => { - state.remoteInvalidationCalls += 1; - }), - invalidateStatus: () => Effect.die("invalidateStatus should not be called in this test"), - resolvePullRequest: () => Effect.die("resolvePullRequest should not be called in this test"), - preparePullRequestThread: () => - Effect.die("preparePullRequestThread should not be called in this test"), - runStackedAction: () => Effect.die("runStackedAction should not be called in this test"), - }; - - return GitStatusBroadcasterLive.pipe(Layer.provide(Layer.succeed(GitManager, gitManager))); + ), + ); } -describe("GitStatusBroadcasterLive", () => { - it.effect("reuses the cached git status across repeated reads", () => { +describe("VcsStatusBroadcaster", () => { + it.effect("reuses the cached VCS status across repeated reads", () => { const state = { currentLocalStatus: baseLocalStatus, currentRemoteStatus: baseRemoteStatus, @@ -88,7 +83,7 @@ describe("GitStatusBroadcasterLive", () => { }; return Effect.gen(function* () { - const broadcaster = yield* GitStatusBroadcaster; + const broadcaster = yield* VcsStatusBroadcaster.VcsStatusBroadcaster; const first = yield* broadcaster.getStatus({ cwd: "/repo" }); const second = yield* broadcaster.getStatus({ cwd: "/repo" }); @@ -113,12 +108,12 @@ describe("GitStatusBroadcasterLive", () => { }; return Effect.gen(function* () { - const broadcaster = yield* GitStatusBroadcaster; + const broadcaster = yield* VcsStatusBroadcaster.VcsStatusBroadcaster; const initial = yield* broadcaster.getStatus({ cwd: "/repo" }); state.currentLocalStatus = { ...baseLocalStatus, - branch: "feature/updated-status", + refName: "feature/updated-status", }; state.currentRemoteStatus = { ...baseRemoteStatus, @@ -154,12 +149,12 @@ describe("GitStatusBroadcasterLive", () => { }; return Effect.gen(function* () { - const broadcaster = yield* GitStatusBroadcaster; + const broadcaster = yield* VcsStatusBroadcaster.VcsStatusBroadcaster; const initial = yield* broadcaster.getStatus({ cwd: "/repo" }); state.currentLocalStatus = { ...baseLocalStatus, - branch: "feature/local-only-refresh", + refName: "feature/local-only-refresh", hasWorkingTreeChanges: true, }; @@ -179,6 +174,66 @@ describe("GitStatusBroadcasterLive", () => { }).pipe(Effect.provide(makeTestLayer(state))); }); + it.effect("normalizes symlinked CWDs before cache lookup and workflow calls", () => { + const seenCwds: string[] = []; + const state = { + currentLocalStatus: baseLocalStatus, + currentRemoteStatus: baseRemoteStatus, + localStatusCalls: 0, + remoteStatusCalls: 0, + localInvalidationCalls: 0, + remoteInvalidationCalls: 0, + }; + const testLayer = VcsStatusBroadcaster.layer.pipe( + Layer.provide( + Layer.mock(GitWorkflowService.GitWorkflowService)({ + localStatus: (input) => + Effect.sync(() => { + seenCwds.push(input.cwd); + state.localStatusCalls += 1; + return state.currentLocalStatus; + }), + remoteStatus: (input) => + Effect.sync(() => { + seenCwds.push(input.cwd); + state.remoteStatusCalls += 1; + return state.currentRemoteStatus; + }), + invalidateLocalStatus: () => + Effect.sync(() => { + state.localInvalidationCalls += 1; + }), + invalidateRemoteStatus: () => + Effect.sync(() => { + state.remoteInvalidationCalls += 1; + }), + } satisfies Partial), + ), + ); + + return Effect.gen(function* () { + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const realDir = yield* fileSystem.makeTempDirectoryScoped({ + prefix: "t3-vcs-status-real-", + }); + const linkParent = yield* fileSystem.makeTempDirectoryScoped({ + prefix: "t3-vcs-status-link-", + }); + const linkDir = path.join(linkParent, "repo-link"); + yield* fileSystem.symlink(realDir, linkDir); + const realPath = yield* fileSystem.realPath(realDir); + + const broadcaster = yield* VcsStatusBroadcaster.VcsStatusBroadcaster; + yield* broadcaster.getStatus({ cwd: linkDir }); + yield* broadcaster.getStatus({ cwd: realDir }); + + assert.deepStrictEqual(seenCwds, [realPath, realPath]); + assert.equal(state.localStatusCalls, 1); + assert.equal(state.remoteStatusCalls, 1); + }).pipe(Effect.provide(Layer.mergeAll(testLayer, NodeServices.layer))); + }); + it.effect("streams a local snapshot first and remote updates later", () => { const state = { currentLocalStatus: baseLocalStatus, @@ -190,9 +245,9 @@ describe("GitStatusBroadcasterLive", () => { }; return Effect.gen(function* () { - const broadcaster = yield* GitStatusBroadcaster; - const snapshotDeferred = yield* Deferred.make(); - const remoteUpdatedDeferred = yield* Deferred.make(); + const broadcaster = yield* VcsStatusBroadcaster.VcsStatusBroadcaster; + const snapshotDeferred = yield* Deferred.make(); + const remoteUpdatedDeferred = yield* Deferred.make(); yield* Stream.runForEach(broadcaster.streamStatus({ cwd: "/repo" }), (event) => { if (event._tag === "snapshot") { return Deferred.succeed(snapshotDeferred, event).pipe(Effect.ignore); @@ -211,11 +266,11 @@ describe("GitStatusBroadcasterLive", () => { _tag: "snapshot", local: baseLocalStatus, remote: null, - } satisfies GitStatusStreamEvent); + } satisfies VcsStatusStreamEvent); assert.deepStrictEqual(remoteUpdated, { _tag: "remoteUpdated", remote: baseRemoteStatus, - } satisfies GitStatusStreamEvent); + } satisfies VcsStatusStreamEvent); }).pipe(Effect.provide(makeTestLayer(state))); }); @@ -230,9 +285,9 @@ describe("GitStatusBroadcasterLive", () => { }; let remoteInterruptedDeferred: Deferred.Deferred | null = null; let remoteStartedDeferred: Deferred.Deferred | null = null; - const testLayer = GitStatusBroadcasterLive.pipe( + const testLayer = VcsStatusBroadcaster.layer.pipe( Layer.provide( - Layer.succeed(GitManager, { + Layer.mock(GitWorkflowService.GitWorkflowService)({ localStatus: () => Effect.sync(() => { state.localStatusCalls += 1; @@ -247,14 +302,13 @@ describe("GitStatusBroadcasterLive", () => { ? Deferred.succeed(remoteStartedDeferred, undefined).pipe(Effect.ignore) : Effect.void, ), - Effect.andThen(Effect.never as Effect.Effect), + Effect.andThen(Effect.never as Effect.Effect), Effect.onInterrupt(() => remoteInterruptedDeferred ? Deferred.succeed(remoteInterruptedDeferred, undefined).pipe(Effect.ignore) : Effect.void, ), ), - status: () => Effect.die("status should not be called in this test"), invalidateLocalStatus: () => Effect.sync(() => { state.localInvalidationCalls += 1; @@ -263,13 +317,7 @@ describe("GitStatusBroadcasterLive", () => { Effect.sync(() => { state.remoteInvalidationCalls += 1; }), - invalidateStatus: () => Effect.die("invalidateStatus should not be called in this test"), - resolvePullRequest: () => - Effect.die("resolvePullRequest should not be called in this test"), - preparePullRequestThread: () => - Effect.die("preparePullRequestThread should not be called in this test"), - runStackedAction: () => Effect.die("runStackedAction should not be called in this test"), - } satisfies GitManagerShape), + } satisfies Partial), ), ); @@ -279,9 +327,9 @@ describe("GitStatusBroadcasterLive", () => { remoteInterruptedDeferred = remoteInterrupted; remoteStartedDeferred = remoteStarted; - const broadcaster = yield* GitStatusBroadcaster; - const firstSnapshot = yield* Deferred.make(); - const secondSnapshot = yield* Deferred.make(); + const broadcaster = yield* VcsStatusBroadcaster.VcsStatusBroadcaster; + const firstSnapshot = yield* Deferred.make(); + const secondSnapshot = yield* Deferred.make(); const firstScope = yield* Scope.make(); const secondScope = yield* Scope.make(); yield* Stream.runForEach(broadcaster.streamStatus({ cwd: "/repo" }), (event) => @@ -302,11 +350,11 @@ describe("GitStatusBroadcasterLive", () => { assert.equal(state.remoteStatusCalls, 1); yield* Scope.close(firstScope, Exit.void); - assert.equal(Option.isNone(yield* Deferred.poll(remoteInterrupted)), true); + assert.isTrue(Option.isNone(yield* Deferred.poll(remoteInterrupted))); yield* Scope.close(secondScope, Exit.void).pipe(Effect.forkScoped); yield* Deferred.await(remoteInterrupted); - assert.equal(Option.isSome(yield* Deferred.poll(remoteInterrupted)), true); + assert.isTrue(Option.isSome(yield* Deferred.poll(remoteInterrupted))); }).pipe(Effect.provide(testLayer)); }); }); diff --git a/apps/server/src/vcs/VcsStatusBroadcaster.ts b/apps/server/src/vcs/VcsStatusBroadcaster.ts new file mode 100644 index 00000000000..1d0cddc4c41 --- /dev/null +++ b/apps/server/src/vcs/VcsStatusBroadcaster.ts @@ -0,0 +1,344 @@ +import { realpathSync } from "node:fs"; + +import { + Context, + Duration, + Effect, + Exit, + Fiber, + Layer, + PubSub, + Ref, + Scope, + Stream, + SynchronizedRef, +} from "effect"; +import type { + GitManagerServiceError, + VcsStatusInput, + VcsStatusLocalResult, + VcsStatusRemoteResult, + VcsStatusResult, + VcsStatusStreamEvent, +} from "@t3tools/contracts"; +import { mergeGitStatusParts } from "@t3tools/shared/git"; + +import * as GitWorkflowService from "../git/GitWorkflowService.ts"; + +const VCS_STATUS_REFRESH_INTERVAL = Duration.seconds(30); + +interface VcsStatusChange { + readonly cwd: string; + readonly event: VcsStatusStreamEvent; +} + +interface CachedValue { + readonly fingerprint: string; + readonly value: T; +} + +interface CachedVcsStatus { + readonly local: CachedValue | null; + readonly remote: CachedValue | null; +} + +interface ActiveRemotePoller { + readonly fiber: Fiber.Fiber; + readonly subscriberCount: number; +} + +export interface VcsStatusBroadcasterShape { + readonly getStatus: ( + input: VcsStatusInput, + ) => Effect.Effect; + readonly refreshLocalStatus: ( + cwd: string, + ) => Effect.Effect; + readonly refreshStatus: (cwd: string) => Effect.Effect; + readonly streamStatus: ( + input: VcsStatusInput, + ) => Stream.Stream; +} + +export class VcsStatusBroadcaster extends Context.Service< + VcsStatusBroadcaster, + VcsStatusBroadcasterShape +>()("t3/vcs/VcsStatusBroadcaster") {} + +function fingerprintStatusPart(status: unknown): string { + return JSON.stringify(status); +} + +function normalizeCwd(cwd: string): string { + try { + return realpathSync.native(cwd); + } catch { + return cwd; + } +} + +export const layer = Layer.effect( + VcsStatusBroadcaster, + Effect.gen(function* () { + const workflow = yield* GitWorkflowService.GitWorkflowService; + const changesPubSub = yield* Effect.acquireRelease( + PubSub.unbounded(), + (pubsub) => PubSub.shutdown(pubsub), + ); + const broadcasterScope = yield* Effect.acquireRelease(Scope.make(), (scope) => + Scope.close(scope, Exit.void), + ); + const cacheRef = yield* Ref.make(new Map()); + const pollersRef = yield* SynchronizedRef.make(new Map()); + + const getCachedStatus = Effect.fn("VcsStatusBroadcaster.getCachedStatus")(function* ( + cwd: string, + ) { + return yield* Ref.get(cacheRef).pipe(Effect.map((cache) => cache.get(cwd) ?? null)); + }); + + const updateCachedLocalStatus = Effect.fn("VcsStatusBroadcaster.updateCachedLocalStatus")( + function* (cwd: string, local: VcsStatusLocalResult, options?: { publish?: boolean }) { + const nextLocal = { + fingerprint: fingerprintStatusPart(local), + value: local, + } satisfies CachedValue; + const shouldPublish = yield* Ref.modify(cacheRef, (cache) => { + const previous = cache.get(cwd) ?? { local: null, remote: null }; + const nextCache = new Map(cache); + nextCache.set(cwd, { + ...previous, + local: nextLocal, + }); + return [previous.local?.fingerprint !== nextLocal.fingerprint, nextCache] as const; + }); + + if (options?.publish && shouldPublish) { + yield* PubSub.publish(changesPubSub, { + cwd, + event: { + _tag: "localUpdated", + local, + }, + }); + } + + return local; + }, + ); + + const updateCachedRemoteStatus = Effect.fn("VcsStatusBroadcaster.updateCachedRemoteStatus")( + function* ( + cwd: string, + remote: VcsStatusRemoteResult | null, + options?: { publish?: boolean }, + ) { + const nextRemote = { + fingerprint: fingerprintStatusPart(remote), + value: remote, + } satisfies CachedValue; + const shouldPublish = yield* Ref.modify(cacheRef, (cache) => { + const previous = cache.get(cwd) ?? { local: null, remote: null }; + const nextCache = new Map(cache); + nextCache.set(cwd, { + ...previous, + remote: nextRemote, + }); + return [previous.remote?.fingerprint !== nextRemote.fingerprint, nextCache] as const; + }); + + if (options?.publish && shouldPublish) { + yield* PubSub.publish(changesPubSub, { + cwd, + event: { + _tag: "remoteUpdated", + remote, + }, + }); + } + + return remote; + }, + ); + + const loadLocalStatus = Effect.fn("VcsStatusBroadcaster.loadLocalStatus")(function* ( + cwd: string, + ) { + const local = yield* workflow.localStatus({ cwd }); + return yield* updateCachedLocalStatus(cwd, local); + }); + + const loadRemoteStatus = Effect.fn("VcsStatusBroadcaster.loadRemoteStatus")(function* ( + cwd: string, + ) { + const remote = yield* workflow.remoteStatus({ cwd }); + return yield* updateCachedRemoteStatus(cwd, remote); + }); + + const getOrLoadLocalStatus = Effect.fn("VcsStatusBroadcaster.getOrLoadLocalStatus")(function* ( + cwd: string, + ) { + const cached = yield* getCachedStatus(cwd); + if (cached?.local) { + return cached.local.value; + } + return yield* loadLocalStatus(cwd); + }); + + const getOrLoadRemoteStatus = Effect.fn("VcsStatusBroadcaster.getOrLoadRemoteStatus")( + function* (cwd: string) { + const cached = yield* getCachedStatus(cwd); + if (cached?.remote) { + return cached.remote.value; + } + return yield* loadRemoteStatus(cwd); + }, + ); + + const getStatus: VcsStatusBroadcasterShape["getStatus"] = Effect.fn( + "VcsStatusBroadcaster.getStatus", + )(function* (input) { + const cwd = normalizeCwd(input.cwd); + const [local, remote] = yield* Effect.all([ + getOrLoadLocalStatus(cwd), + getOrLoadRemoteStatus(cwd), + ]); + return mergeGitStatusParts(local, remote); + }); + + const refreshLocalStatus: VcsStatusBroadcasterShape["refreshLocalStatus"] = Effect.fn( + "VcsStatusBroadcaster.refreshLocalStatus", + )(function* (rawCwd) { + const cwd = normalizeCwd(rawCwd); + yield* workflow.invalidateLocalStatus(cwd); + const local = yield* workflow.localStatus({ cwd }); + return yield* updateCachedLocalStatus(cwd, local, { publish: true }); + }); + + const refreshRemoteStatus = Effect.fn("VcsStatusBroadcaster.refreshRemoteStatus")(function* ( + cwd: string, + ) { + yield* workflow.invalidateRemoteStatus(cwd); + const remote = yield* workflow.remoteStatus({ cwd }); + return yield* updateCachedRemoteStatus(cwd, remote, { publish: true }); + }); + + const refreshStatus: VcsStatusBroadcasterShape["refreshStatus"] = Effect.fn( + "VcsStatusBroadcaster.refreshStatus", + )(function* (rawCwd) { + const cwd = normalizeCwd(rawCwd); + const [local, remote] = yield* Effect.all([ + refreshLocalStatus(cwd), + refreshRemoteStatus(cwd), + ]); + return mergeGitStatusParts(local, remote); + }); + + const makeRemoteRefreshLoop = (cwd: string) => { + const logRefreshFailure = (error: Error) => + Effect.logWarning("VCS remote status refresh failed", { + cwd, + detail: error.message, + }); + + return refreshRemoteStatus(cwd).pipe( + Effect.catch(logRefreshFailure), + Effect.andThen( + Effect.forever( + Effect.sleep(VCS_STATUS_REFRESH_INTERVAL).pipe( + Effect.andThen(refreshRemoteStatus(cwd).pipe(Effect.catch(logRefreshFailure))), + ), + ), + ), + ); + }; + + const retainRemotePoller = Effect.fn("VcsStatusBroadcaster.retainRemotePoller")(function* ( + cwd: string, + ) { + yield* SynchronizedRef.modifyEffect(pollersRef, (activePollers) => { + const existing = activePollers.get(cwd); + if (existing) { + const nextPollers = new Map(activePollers); + nextPollers.set(cwd, { + ...existing, + subscriberCount: existing.subscriberCount + 1, + }); + return Effect.succeed([undefined, nextPollers] as const); + } + + return makeRemoteRefreshLoop(cwd).pipe( + Effect.forkIn(broadcasterScope), + Effect.map((fiber) => { + const nextPollers = new Map(activePollers); + nextPollers.set(cwd, { + fiber, + subscriberCount: 1, + }); + return [undefined, nextPollers] as const; + }), + ); + }); + }); + + const releaseRemotePoller = Effect.fn("VcsStatusBroadcaster.releaseRemotePoller")(function* ( + cwd: string, + ) { + const pollerToInterrupt = yield* SynchronizedRef.modify(pollersRef, (activePollers) => { + const existing = activePollers.get(cwd); + if (!existing) { + return [null, activePollers] as const; + } + + if (existing.subscriberCount > 1) { + const nextPollers = new Map(activePollers); + nextPollers.set(cwd, { + ...existing, + subscriberCount: existing.subscriberCount - 1, + }); + return [null, nextPollers] as const; + } + + const nextPollers = new Map(activePollers); + nextPollers.delete(cwd); + return [existing.fiber, nextPollers] as const; + }); + + if (pollerToInterrupt) { + yield* Fiber.interrupt(pollerToInterrupt).pipe(Effect.ignore); + } + }); + + const streamStatus: VcsStatusBroadcasterShape["streamStatus"] = (input) => + Stream.unwrap( + Effect.gen(function* () { + const cwd = normalizeCwd(input.cwd); + const subscription = yield* PubSub.subscribe(changesPubSub); + const initialLocal = yield* getOrLoadLocalStatus(cwd); + const initialRemote = (yield* getCachedStatus(cwd))?.remote?.value ?? null; + yield* retainRemotePoller(cwd); + + const release = releaseRemotePoller(cwd).pipe(Effect.ignore, Effect.asVoid); + + return Stream.concat( + Stream.make({ + _tag: "snapshot" as const, + local: initialLocal, + remote: initialRemote, + }), + Stream.fromSubscription(subscription).pipe( + Stream.filter((event) => event.cwd === cwd), + Stream.map((event) => event.event), + ), + ).pipe(Stream.ensuring(release)); + }), + ); + + return VcsStatusBroadcaster.of({ + getStatus, + refreshLocalStatus, + refreshStatus, + streamStatus, + }); + }), +); diff --git a/apps/server/src/vcs/testing/VcsDriverContractHarness.ts b/apps/server/src/vcs/testing/VcsDriverContractHarness.ts new file mode 100644 index 00000000000..f513f03b756 --- /dev/null +++ b/apps/server/src/vcs/testing/VcsDriverContractHarness.ts @@ -0,0 +1,163 @@ +import { assert, it, describe } from "@effect/vitest"; +import { + Effect, + FileSystem, + Layer, + Path, + type PlatformError, + type Scope, + DateTime, + Option, +} from "effect"; + +import type { VcsDriverKind } from "@t3tools/contracts"; +import * as VcsDriver from "../VcsDriver.ts"; + +export interface VcsDriverFixture { + readonly createRepo: (cwd: string) => Effect.Effect; + readonly writeFile: ( + cwd: string, + relativePath: string, + contents: string, + ) => Effect.Effect; + readonly trackFile?: (cwd: string, relativePath: string) => Effect.Effect; + readonly commit?: (cwd: string, message: string) => Effect.Effect; + readonly ignorePath: ( + cwd: string, + pattern: string, + ) => Effect.Effect; +} + +export interface VcsDriverContractSuiteInput { + readonly name: string; + readonly kind: VcsDriverKind; + readonly layer: Layer.Layer< + VcsDriver.VcsDriver | R | FileSystem.FileSystem | Path.Path, + E, + never + >; + readonly fixture: VcsDriverFixture; +} + +export function runVcsDriverContractSuite(input: VcsDriverContractSuiteInput) { + const makeTmpDir = ( + prefix = `t3-${input.kind}-vcs-contract-`, + ): Effect.Effect => + Effect.gen(function* () { + const fileSystem = yield* FileSystem.FileSystem; + return yield* fileSystem.makeTempDirectoryScoped({ prefix }); + }); + + it.layer(input.layer)(`${input.name} VCS driver contract`, (it) => { + describe("repository detection", () => { + it.effect("returns null outside a repository", () => + Effect.gen(function* () { + const cwd = yield* makeTmpDir(); + const driver = yield* VcsDriver.VcsDriver; + + assert.equal(yield* driver.detectRepository(cwd), null); + assert.equal(yield* driver.isInsideWorkTree(cwd), false); + }), + ); + + it.effect("detects repository identity inside a repository and nested directories", () => + Effect.gen(function* () { + const cwd = yield* makeTmpDir(); + const driver = yield* VcsDriver.VcsDriver; + + yield* input.fixture.createRepo(cwd); + yield* input.fixture.writeFile(cwd, "src/index.ts", "export const value = 1;\n"); + const identity = yield* driver.detectRepository(cwd); + assert.equal(identity?.kind, input.kind); + assert.isTrue(identity?.rootPath.endsWith(cwd)); + assert.equal(identity?.freshness.source, "live-local"); + assert.isTrue(DateTime.isDateTime(identity?.freshness.observedAt)); + assert.isTrue(Option.isNone(identity?.freshness.expiresAt ?? Option.none())); + assert.equal(yield* driver.isInsideWorkTree(cwd), true); + + const path = yield* Path.Path; + const nestedDir = path.join(cwd, "src"); + const nestedIdentity = yield* driver.detectRepository(nestedDir); + assert.equal(nestedIdentity?.rootPath, identity?.rootPath); + assert.equal(yield* driver.isInsideWorkTree(nestedDir), true); + }), + ); + }); + + describe("workspace files", () => { + it.effect("lists tracked and untracked non-ignored files", () => + Effect.gen(function* () { + const cwd = yield* makeTmpDir(); + const driver = yield* VcsDriver.VcsDriver; + + yield* input.fixture.createRepo(cwd); + yield* input.fixture.writeFile(cwd, "tracked.ts", "export const tracked = true;\n"); + if (input.fixture.trackFile && input.fixture.commit) { + yield* input.fixture.trackFile(cwd, "tracked.ts"); + yield* input.fixture.commit(cwd, "Track file"); + } + yield* input.fixture.writeFile(cwd, "untracked.ts", "export const untracked = true;\n"); + + const result = yield* driver.listWorkspaceFiles(cwd); + + assert.include(result.paths, "tracked.ts"); + assert.include(result.paths, "untracked.ts"); + assert.equal(result.truncated, false); + assert.equal(result.freshness.source, "live-local"); + assert.isTrue(DateTime.isDateTime(result.freshness.observedAt)); + assert.isTrue(Option.isNone(result.freshness.expiresAt)); + }), + ); + + it.effect("excludes ignored files from workspace listing", () => + Effect.gen(function* () { + const cwd = yield* makeTmpDir(); + const driver = yield* VcsDriver.VcsDriver; + + yield* input.fixture.createRepo(cwd); + yield* input.fixture.ignorePath(cwd, "*.log"); + yield* input.fixture.writeFile(cwd, "included.ts", "export const included = true;\n"); + yield* input.fixture.writeFile(cwd, "debug.log", "ignore me\n"); + yield* input.fixture.writeFile(cwd, "nested/error.log", "ignore me too\n"); + + const result = yield* driver.listWorkspaceFiles(cwd); + + assert.include(result.paths, "included.ts"); + assert.notInclude(result.paths, "debug.log"); + assert.notInclude(result.paths, "nested/error.log"); + }), + ); + }); + + describe("ignored path filtering", () => { + it.effect("filters ignored paths", () => + Effect.gen(function* () { + const cwd = yield* makeTmpDir(); + const driver = yield* VcsDriver.VcsDriver; + + yield* input.fixture.createRepo(cwd); + yield* input.fixture.ignorePath(cwd, "*.log"); + + const result = yield* driver.filterIgnoredPaths(cwd, [ + "keep.ts", + "debug.log", + "nested/error.log", + ]); + + assert.deepStrictEqual(result, ["keep.ts"]); + }), + ); + + it.effect("returns empty input unchanged", () => + Effect.gen(function* () { + const cwd = yield* makeTmpDir(); + const driver = yield* VcsDriver.VcsDriver; + + yield* input.fixture.createRepo(cwd); + + assert.deepStrictEqual(yield* driver.filterIgnoredPaths(cwd, []), []); + }), + ); + }); + }); +} diff --git a/apps/server/src/workspace/Layers/WorkspaceEntries.test.ts b/apps/server/src/workspace/Layers/WorkspaceEntries.test.ts index 85b43ab37f6..a7385794e93 100644 --- a/apps/server/src/workspace/Layers/WorkspaceEntries.test.ts +++ b/apps/server/src/workspace/Layers/WorkspaceEntries.test.ts @@ -5,8 +5,8 @@ import { it, afterEach, describe, expect, vi } from "@effect/vitest"; import { Effect, FileSystem, Layer, Path, PlatformError } from "effect"; import { ServerConfig } from "../../config.ts"; -import { GitCoreLive } from "../../git/Layers/GitCore.ts"; -import { GitCore } from "../../git/Services/GitCore.ts"; +import * as VcsDriverRegistry from "../../vcs/VcsDriverRegistry.ts"; +import * as VcsProcess from "../../vcs/VcsProcess.ts"; import { WorkspaceEntries } from "../Services/WorkspaceEntries.ts"; import { WorkspaceEntriesLive } from "./WorkspaceEntries.ts"; import { WorkspacePathsLive } from "./WorkspacePaths.ts"; @@ -14,7 +14,8 @@ import { WorkspacePathsLive } from "./WorkspacePaths.ts"; const TestLayer = Layer.empty.pipe( Layer.provideMerge(WorkspaceEntriesLive.pipe(Layer.provide(WorkspacePathsLive))), Layer.provideMerge(WorkspacePathsLive), - Layer.provideMerge(GitCoreLive), + Layer.provideMerge(VcsProcess.layer), + Layer.provideMerge(VcsDriverRegistry.layer.pipe(Layer.provide(VcsProcess.layer))), Layer.provide( ServerConfig.layerTest(process.cwd(), { prefix: "t3-workspace-entries-test-", @@ -25,12 +26,11 @@ const TestLayer = Layer.empty.pipe( const makeTempDir = Effect.fn(function* (opts?: { prefix?: string; git?: boolean }) { const fileSystem = yield* FileSystem.FileSystem; - const gitCore = yield* GitCore; const dir = yield* fileSystem.makeTempDirectoryScoped({ prefix: opts?.prefix ?? "t3code-workspace-entries-", }); if (opts?.git) { - yield* gitCore.initRepo({ cwd: dir }); + yield* git(dir, ["init"]); } return dir; }); @@ -51,9 +51,10 @@ function writeTextFile( const git = (cwd: string, args: ReadonlyArray, env?: NodeJS.ProcessEnv) => Effect.gen(function* () { - const gitCore = yield* GitCore; - const result = yield* gitCore.execute({ + const process = yield* VcsProcess.VcsProcess; + const result = yield* process.run({ operation: "WorkspaceEntries.test.git", + command: "git", cwd, args, ...(env ? { env } : {}), diff --git a/apps/server/src/workspace/Layers/WorkspaceEntries.ts b/apps/server/src/workspace/Layers/WorkspaceEntries.ts index 497c5170c38..3046546813f 100644 --- a/apps/server/src/workspace/Layers/WorkspaceEntries.ts +++ b/apps/server/src/workspace/Layers/WorkspaceEntries.ts @@ -2,7 +2,7 @@ import * as OS from "node:os"; import fsPromises from "node:fs/promises"; import type { Dirent } from "node:fs"; -import { Cache, Duration, Effect, Exit, Layer, Option, Path } from "effect"; +import { Cache, DateTime, Duration, Effect, Exit, Layer, Path } from "effect"; import { type FilesystemBrowseInput, type ProjectEntry } from "@t3tools/contracts"; import { isExplicitRelativePath, isWindowsAbsolutePath } from "@t3tools/shared/path"; @@ -13,7 +13,7 @@ import { type RankedSearchResult, } from "@t3tools/shared/searchRanking"; -import { GitCore } from "../../git/Services/GitCore.ts"; +import { VcsDriverRegistry } from "../../vcs/VcsDriverRegistry.ts"; import { WorkspaceEntries, WorkspaceEntriesBrowseError, @@ -126,8 +126,9 @@ function scoreEntry(entry: SearchableWorkspaceEntry, query: string): number | nu } function isPathInIgnoredDirectory(relativePath: string): boolean { - const segments = relativePath.split("/").filter((segment) => segment.length > 0); - return segments.slice(0, -1).some((segment) => IGNORED_DIRECTORY_NAMES.has(segment)); + const firstSegment = relativePath.split("/")[0]; + if (!firstSegment) return false; + return IGNORED_DIRECTORY_NAMES.has(firstSegment); } function directoryAncestorsOf(relativePath: string): string[] { @@ -173,38 +174,39 @@ const resolveBrowseTarget = ( export const makeWorkspaceEntries = Effect.gen(function* () { const path = yield* Path.Path; - const gitOption = yield* Effect.serviceOption(GitCore); + const vcsRegistry = yield* VcsDriverRegistry; const workspacePaths = yield* WorkspacePaths; - const isInsideGitWorkTree = (cwd: string): Effect.Effect => - Option.match(gitOption, { - onSome: (git) => git.isInsideWorkTree(cwd).pipe(Effect.catch(() => Effect.succeed(false))), - onNone: () => Effect.succeed(false), - }); + const isInsideVcsWorkTree = (cwd: string): Effect.Effect => + vcsRegistry.detect({ cwd }).pipe( + Effect.map((handle) => handle !== null), + Effect.catch(() => Effect.succeed(false)), + ); - const filterGitIgnoredPaths = ( + const filterVcsIgnoredPaths = ( cwd: string, relativePaths: string[], ): Effect.Effect => - Option.match(gitOption, { - onSome: (git) => - git.filterIgnoredPaths(cwd, relativePaths).pipe( - Effect.map((paths) => [...paths]), - Effect.catch(() => Effect.succeed(relativePaths)), - ), - onNone: () => Effect.succeed(relativePaths), - }); - - const buildWorkspaceIndexFromGit = Effect.fn("WorkspaceEntries.buildWorkspaceIndexFromGit")( + vcsRegistry.detect({ cwd }).pipe( + Effect.flatMap((handle) => + handle + ? handle.driver.filterIgnoredPaths(cwd, relativePaths).pipe( + Effect.map((paths) => [...paths]), + Effect.catch(() => Effect.succeed(relativePaths)), + ) + : Effect.succeed(relativePaths), + ), + Effect.catch(() => Effect.succeed(relativePaths)), + ); + + const buildWorkspaceIndexFromVcs = Effect.fn("WorkspaceEntries.buildWorkspaceIndexFromVcs")( function* (cwd: string) { - if (Option.isNone(gitOption)) { - return null; - } - if (!(yield* isInsideGitWorkTree(cwd))) { + const vcs = yield* vcsRegistry.detect({ cwd }).pipe(Effect.catch(() => Effect.succeed(null))); + if (!vcs) { return null; } - const listedFiles = yield* gitOption.value + const listedFiles = yield* vcs.driver .listWorkspaceFiles(cwd) .pipe(Effect.catch(() => Effect.succeed(null))); @@ -215,7 +217,10 @@ export const makeWorkspaceEntries = Effect.gen(function* () { const listedPaths = [...listedFiles.paths] .map((entry) => toPosixPath(entry)) .filter((entry) => entry.length > 0 && !isPathInIgnoredDirectory(entry)); - const filePaths = yield* filterGitIgnoredPaths(cwd, listedPaths); + const filePaths = yield* vcs.driver.filterIgnoredPaths(cwd, listedPaths).pipe( + Effect.map((paths) => [...paths]), + Effect.catch(() => filterVcsIgnoredPaths(cwd, listedPaths)), + ); const directorySet = new Set(); for (const filePath of filePaths) { @@ -247,16 +252,12 @@ export const makeWorkspaceEntries = Effect.gen(function* () { ) .map(toSearchableWorkspaceEntry); - const directoryBudget = Math.floor(WORKSPACE_INDEX_MAX_ENTRIES / 2); - const cappedDirectories = directoryEntries.slice(0, directoryBudget); - const fileBudget = WORKSPACE_INDEX_MAX_ENTRIES - cappedDirectories.length; - const cappedFiles = fileEntries.slice(0, fileBudget); - const entries = [...cappedDirectories, ...cappedFiles]; - const totalAvailable = directoryEntries.length + fileEntries.length; + const now = yield* DateTime.now; + const entries = [...directoryEntries, ...fileEntries]; return { - scannedAt: Date.now(), - entries, - truncated: listedFiles.truncated || totalAvailable > WORKSPACE_INDEX_MAX_ENTRIES, + scannedAt: now.epochMilliseconds, + entries: entries.slice(0, WORKSPACE_INDEX_MAX_ENTRIES), + truncated: listedFiles.truncated || entries.length > WORKSPACE_INDEX_MAX_ENTRIES, }; }, ); @@ -283,15 +284,7 @@ export const makeWorkspaceEntries = Effect.gen(function* () { }), }).pipe( Effect.catchIf( - (error) => { - if (relativeDir.length === 0) return false; - const cause = error.cause; - if (cause instanceof Error && "code" in cause) { - const code = (cause as NodeJS.ErrnoException).code; - return code === "ENOENT" || code === "ENOTDIR"; - } - return false; - }, + () => relativeDir.length > 0, () => Effect.succeed({ relativeDir, dirents: null }), ), ); @@ -300,7 +293,7 @@ export const makeWorkspaceEntries = Effect.gen(function* () { const buildWorkspaceIndexFromFilesystem = Effect.fn( "WorkspaceEntries.buildWorkspaceIndexFromFilesystem", )(function* (cwd: string): Effect.fn.Return { - const shouldFilterWithGitIgnore = yield* isInsideGitWorkTree(cwd); + const shouldFilterWithGitIgnore = yield* isInsideVcsWorkTree(cwd); let pendingDirectories: string[] = [""]; const entries: SearchableWorkspaceEntry[] = []; @@ -348,7 +341,7 @@ export const makeWorkspaceEntries = Effect.gen(function* () { candidateEntries.map((entry) => entry.relativePath), ); const allowedPathSet = shouldFilterWithGitIgnore - ? new Set(yield* filterGitIgnoredPaths(cwd, candidatePaths)) + ? new Set(yield* filterVcsIgnoredPaths(cwd, candidatePaths)) : null; for (const candidateEntries of candidateEntriesByDirectory) { @@ -380,8 +373,9 @@ export const makeWorkspaceEntries = Effect.gen(function* () { } } + const now = yield* DateTime.now; return { - scannedAt: Date.now(), + scannedAt: now.epochMilliseconds, entries, truncated, }; @@ -390,9 +384,9 @@ export const makeWorkspaceEntries = Effect.gen(function* () { const buildWorkspaceIndex = Effect.fn("WorkspaceEntries.buildWorkspaceIndex")(function* ( cwd: string, ): Effect.fn.Return { - const gitIndexed = yield* buildWorkspaceIndexFromGit(cwd); - if (gitIndexed) { - return gitIndexed; + const vcsIndexed = yield* buildWorkspaceIndexFromVcs(cwd); + if (vcsIndexed) { + return vcsIndexed; } return yield* buildWorkspaceIndexFromFilesystem(cwd); }); diff --git a/apps/server/src/workspace/Layers/WorkspaceFileSystem.test.ts b/apps/server/src/workspace/Layers/WorkspaceFileSystem.test.ts index fcfd13c912e..9c38b88f851 100644 --- a/apps/server/src/workspace/Layers/WorkspaceFileSystem.test.ts +++ b/apps/server/src/workspace/Layers/WorkspaceFileSystem.test.ts @@ -3,7 +3,8 @@ import { it, describe, expect } from "@effect/vitest"; import { Effect, FileSystem, Layer, Path } from "effect"; import { ServerConfig } from "../../config.ts"; -import { GitCoreLive } from "../../git/Layers/GitCore.ts"; +import * as VcsDriverRegistry from "../../vcs/VcsDriverRegistry.ts"; +import * as VcsProcess from "../../vcs/VcsProcess.ts"; import { WorkspaceEntries } from "../Services/WorkspaceEntries.ts"; import { WorkspaceFileSystem } from "../Services/WorkspaceFileSystem.ts"; import { WorkspaceEntriesLive } from "./WorkspaceEntries.ts"; @@ -19,7 +20,7 @@ const TestLayer = Layer.empty.pipe( Layer.provideMerge(ProjectLayer), Layer.provideMerge(WorkspaceEntriesLive.pipe(Layer.provide(WorkspacePathsLive))), Layer.provideMerge(WorkspacePathsLive), - Layer.provideMerge(GitCoreLive), + Layer.provideMerge(VcsDriverRegistry.layer.pipe(Layer.provide(VcsProcess.layer))), Layer.provide( ServerConfig.layerTest(process.cwd(), { prefix: "t3-workspace-files-test-", diff --git a/apps/server/src/ws.ts b/apps/server/src/ws.ts index aac716cfeb6..fbefe6eac62 100644 --- a/apps/server/src/ws.ts +++ b/apps/server/src/ws.ts @@ -29,9 +29,6 @@ import { RpcSerialization, RpcServer } from "effect/unstable/rpc"; import { CheckpointDiffQuery } from "./checkpointing/Services/CheckpointDiffQuery.ts"; import { ServerConfig } from "./config.ts"; -import { GitCore } from "./git/Services/GitCore.ts"; -import { GitManager } from "./git/Services/GitManager.ts"; -import { GitStatusBroadcaster } from "./git/Services/GitStatusBroadcaster.ts"; import { Keybindings } from "./keybindings.ts"; import { Open, resolveAvailableEditors } from "./open.ts"; import { normalizeDispatchCommand } from "./orchestration/Normalizer.ts"; @@ -45,15 +42,29 @@ import { import { ProviderRegistry } from "./provider/Services/ProviderRegistry.ts"; import { ServerLifecycleEvents } from "./serverLifecycleEvents.ts"; import { ServerRuntimeStartup } from "./serverRuntimeStartup.ts"; -import { ServerSettingsService } from "./serverSettings.ts"; +import { redactServerSettingsForClient, ServerSettingsService } from "./serverSettings.ts"; import { TerminalManager } from "./terminal/Services/Manager.ts"; import { WorkspaceEntries } from "./workspace/Services/WorkspaceEntries.ts"; import { WorkspaceFileSystem } from "./workspace/Services/WorkspaceFileSystem.ts"; import { WorkspacePathOutsideRootError } from "./workspace/Services/WorkspacePaths.ts"; +import { VcsStatusBroadcaster } from "./vcs/VcsStatusBroadcaster.ts"; +import { VcsProvisioningService } from "./vcs/VcsProvisioningService.ts"; +import { GitWorkflowService } from "./git/GitWorkflowService.ts"; import { ProjectSetupScriptRunner } from "./project/Services/ProjectSetupScriptRunner.ts"; import { RepositoryIdentityResolver } from "./project/Services/RepositoryIdentityResolver.ts"; import { ServerEnvironment } from "./environment/Services/ServerEnvironment.ts"; import { ServerAuth } from "./auth/Services/ServerAuth.ts"; +import * as SourceControlDiscoveryLayer from "./sourceControl/SourceControlDiscovery.ts"; +import { SourceControlRepositoryService } from "./sourceControl/SourceControlRepositoryService.ts"; +import * as AzureDevOpsCli from "./sourceControl/AzureDevOpsCli.ts"; +import * as BitbucketApi from "./sourceControl/BitbucketApi.ts"; +import * as GitHubCli from "./sourceControl/GitHubCli.ts"; +import * as GitLabCli from "./sourceControl/GitLabCli.ts"; +import * as SourceControlProviderRegistry from "./sourceControl/SourceControlProviderRegistry.ts"; +import * as GitVcsDriver from "./vcs/GitVcsDriver.ts"; +import * as VcsDriverRegistry from "./vcs/VcsDriverRegistry.ts"; +import * as VcsProjectConfig from "./vcs/VcsProjectConfig.ts"; +import * as VcsProcess from "./vcs/VcsProcess.ts"; import { BootstrapCredentialService, type BootstrapCredentialChange, @@ -136,9 +147,9 @@ const makeWsRpcLayer = (currentSessionId: AuthSessionId) => const checkpointDiffQuery = yield* CheckpointDiffQuery; const keybindings = yield* Keybindings; const open = yield* Open; - const gitManager = yield* GitManager; - const git = yield* GitCore; - const gitStatusBroadcaster = yield* GitStatusBroadcaster; + const gitWorkflow = yield* GitWorkflowService; + const vcsProvisioning = yield* VcsProvisioningService; + const vcsStatusBroadcaster = yield* VcsStatusBroadcaster; const terminalManager = yield* TerminalManager; const providerRegistry = yield* ProviderRegistry; const config = yield* ServerConfig; @@ -151,6 +162,8 @@ const makeWsRpcLayer = (currentSessionId: AuthSessionId) => const repositoryIdentityResolver = yield* RepositoryIdentityResolver; const serverEnvironment = yield* ServerEnvironment; const serverAuth = yield* ServerAuth; + const sourceControlDiscovery = yield* SourceControlDiscoveryLayer.SourceControlDiscovery; + const sourceControlRepositories = yield* SourceControlRepositoryService; const bootstrapCredentials = yield* BootstrapCredentialService; const sessions = yield* SessionCredentialService; const serverCommandId = (tag: string) => @@ -453,10 +466,10 @@ const makeWsRpcLayer = (currentSessionId: AuthSessionId) => } if (bootstrap?.prepareWorktree) { - const worktree = yield* git.createWorktree({ + const worktree = yield* gitWorkflow.createWorktree({ cwd: bootstrap.prepareWorktree.projectCwd, - branch: bootstrap.prepareWorktree.baseBranch, - newBranch: bootstrap.prepareWorktree.branch, + refName: bootstrap.prepareWorktree.baseBranch, + newRefName: bootstrap.prepareWorktree.branch, path: null, }); targetWorktreePath = worktree.worktree.path; @@ -464,7 +477,7 @@ const makeWsRpcLayer = (currentSessionId: AuthSessionId) => type: "thread.meta.update", commandId: serverCommandId("bootstrap-thread-meta-update"), threadId: command.threadId, - branch: worktree.worktree.branch, + branch: worktree.worktree.refName, worktreePath: targetWorktreePath, }); yield* refreshGitStatus(targetWorktreePath); @@ -512,7 +525,7 @@ const makeWsRpcLayer = (currentSessionId: AuthSessionId) => const loadServerConfig = Effect.gen(function* () { const keybindingsConfig = yield* keybindings.loadConfigState; const providers = yield* providerRegistry.getProviders; - const settings = yield* serverSettings.getSettings; + const settings = redactServerSettingsForClient(yield* serverSettings.getSettings); const environment = yield* serverEnvironment.getDescriptor; const auth = yield* serverAuth.getDescriptor(); @@ -540,7 +553,7 @@ const makeWsRpcLayer = (currentSessionId: AuthSessionId) => }); const refreshGitStatus = (cwd: string) => - gitStatusBroadcaster + vcsStatusBroadcaster .refreshStatus(cwd) .pipe(Effect.ignoreCause({ log: true }), Effect.forkDetach, Effect.asVoid); @@ -667,6 +680,9 @@ const makeWsRpcLayer = (currentSessionId: AuthSessionId) => ORCHESTRATION_WS_METHODS.subscribeShell, Effect.gen(function* () { const snapshot = yield* projectionSnapshotQuery.getShellSnapshot().pipe( + Effect.tapError((cause) => + Effect.logError("orchestration shell snapshot load failed", { cause }), + ), Effect.mapError( (cause) => new OrchestrationGetSnapshotError({ @@ -749,10 +765,13 @@ const makeWsRpcLayer = (currentSessionId: AuthSessionId) => observeRpcEffect(WS_METHODS.serverGetConfig, loadServerConfig, { "rpc.aggregate": "server", }), - [WS_METHODS.serverRefreshProviders]: (_input) => + [WS_METHODS.serverRefreshProviders]: (input) => observeRpcEffect( WS_METHODS.serverRefreshProviders, - providerRegistry.refresh().pipe(Effect.map((providers) => ({ providers }))), + (input.instanceId !== undefined + ? providerRegistry.refreshInstance(input.instanceId) + : providerRegistry.refresh() + ).pipe(Effect.map((providers) => ({ providers }))), { "rpc.aggregate": "server" }, ), [WS_METHODS.serverUpsertKeybinding]: (rule) => @@ -765,13 +784,55 @@ const makeWsRpcLayer = (currentSessionId: AuthSessionId) => { "rpc.aggregate": "server" }, ), [WS_METHODS.serverGetSettings]: (_input) => - observeRpcEffect(WS_METHODS.serverGetSettings, serverSettings.getSettings, { - "rpc.aggregate": "server", - }), + observeRpcEffect( + WS_METHODS.serverGetSettings, + serverSettings.getSettings.pipe(Effect.map(redactServerSettingsForClient)), + { + "rpc.aggregate": "server", + }, + ), [WS_METHODS.serverUpdateSettings]: ({ patch }) => - observeRpcEffect(WS_METHODS.serverUpdateSettings, serverSettings.updateSettings(patch), { - "rpc.aggregate": "server", - }), + observeRpcEffect( + WS_METHODS.serverUpdateSettings, + serverSettings.updateSettings(patch).pipe(Effect.map(redactServerSettingsForClient)), + { + "rpc.aggregate": "server", + }, + ), + [WS_METHODS.serverDiscoverSourceControl]: (_input) => + observeRpcEffect( + WS_METHODS.serverDiscoverSourceControl, + sourceControlDiscovery.discover, + { + "rpc.aggregate": "server", + }, + ), + [WS_METHODS.sourceControlLookupRepository]: (input) => + observeRpcEffect( + WS_METHODS.sourceControlLookupRepository, + sourceControlRepositories.lookupRepository(input), + { + "rpc.aggregate": "source-control", + }, + ), + [WS_METHODS.sourceControlCloneRepository]: (input) => + observeRpcEffect( + WS_METHODS.sourceControlCloneRepository, + sourceControlRepositories.cloneRepository(input), + { + "rpc.aggregate": "source-control", + }, + ), + [WS_METHODS.sourceControlPublishRepository]: (input) => + observeRpcEffect( + WS_METHODS.sourceControlPublishRepository, + sourceControlRepositories + .publishRepository(input) + .pipe(Effect.tap(() => refreshGitStatus(input.cwd))), + { + "rpc.aggregate": "source-control", + }, + ), [WS_METHODS.projectsSearchEntries]: (input) => observeRpcEffect( WS_METHODS.projectsSearchEntries, @@ -820,26 +881,26 @@ const makeWsRpcLayer = (currentSessionId: AuthSessionId) => ), { "rpc.aggregate": "workspace" }, ), - [WS_METHODS.subscribeGitStatus]: (input) => + [WS_METHODS.subscribeVcsStatus]: (input) => observeRpcStream( - WS_METHODS.subscribeGitStatus, - gitStatusBroadcaster.streamStatus(input), + WS_METHODS.subscribeVcsStatus, + vcsStatusBroadcaster.streamStatus(input), { - "rpc.aggregate": "git", + "rpc.aggregate": "vcs", }, ), - [WS_METHODS.gitRefreshStatus]: (input) => + [WS_METHODS.vcsRefreshStatus]: (input) => observeRpcEffect( - WS_METHODS.gitRefreshStatus, - gitStatusBroadcaster.refreshStatus(input.cwd), + WS_METHODS.vcsRefreshStatus, + vcsStatusBroadcaster.refreshStatus(input.cwd), { - "rpc.aggregate": "git", + "rpc.aggregate": "vcs", }, ), - [WS_METHODS.gitPull]: (input) => + [WS_METHODS.vcsPull]: (input) => observeRpcEffect( - WS_METHODS.gitPull, - git.pullCurrentBranch(input.cwd).pipe( + WS_METHODS.vcsPull, + gitWorkflow.pullCurrentBranch(input.cwd).pipe( Effect.matchCauseEffect({ onFailure: (cause) => Effect.failCause(cause), onSuccess: (result) => @@ -852,7 +913,7 @@ const makeWsRpcLayer = (currentSessionId: AuthSessionId) => observeRpcStream( WS_METHODS.gitRunStackedAction, Stream.callback((queue) => - gitManager + gitWorkflow .runStackedAction(input, { actionId: input.actionId, progressReporter: { @@ -869,55 +930,59 @@ const makeWsRpcLayer = (currentSessionId: AuthSessionId) => }), ), ), - { "rpc.aggregate": "git" }, + { "rpc.aggregate": "vcs" }, ), [WS_METHODS.gitResolvePullRequest]: (input) => - observeRpcEffect(WS_METHODS.gitResolvePullRequest, gitManager.resolvePullRequest(input), { - "rpc.aggregate": "git", - }), + observeRpcEffect( + WS_METHODS.gitResolvePullRequest, + gitWorkflow.resolvePullRequest(input), + { + "rpc.aggregate": "git", + }, + ), [WS_METHODS.gitPreparePullRequestThread]: (input) => observeRpcEffect( WS_METHODS.gitPreparePullRequestThread, - gitManager + gitWorkflow .preparePullRequestThread(input) .pipe(Effect.tap(() => refreshGitStatus(input.cwd))), { "rpc.aggregate": "git" }, ), - [WS_METHODS.gitListBranches]: (input) => - observeRpcEffect(WS_METHODS.gitListBranches, git.listBranches(input), { - "rpc.aggregate": "git", + [WS_METHODS.vcsListRefs]: (input) => + observeRpcEffect(WS_METHODS.vcsListRefs, gitWorkflow.listRefs(input), { + "rpc.aggregate": "vcs", }), - [WS_METHODS.gitCreateWorktree]: (input) => + [WS_METHODS.vcsCreateWorktree]: (input) => observeRpcEffect( - WS_METHODS.gitCreateWorktree, - git.createWorktree(input).pipe(Effect.tap(() => refreshGitStatus(input.cwd))), - { "rpc.aggregate": "git" }, + WS_METHODS.vcsCreateWorktree, + gitWorkflow.createWorktree(input).pipe(Effect.tap(() => refreshGitStatus(input.cwd))), + { "rpc.aggregate": "vcs" }, ), - [WS_METHODS.gitRemoveWorktree]: (input) => + [WS_METHODS.vcsRemoveWorktree]: (input) => observeRpcEffect( - WS_METHODS.gitRemoveWorktree, - git.removeWorktree(input).pipe(Effect.tap(() => refreshGitStatus(input.cwd))), - { "rpc.aggregate": "git" }, + WS_METHODS.vcsRemoveWorktree, + gitWorkflow.removeWorktree(input).pipe(Effect.tap(() => refreshGitStatus(input.cwd))), + { "rpc.aggregate": "vcs" }, ), - [WS_METHODS.gitCreateBranch]: (input) => + [WS_METHODS.vcsCreateRef]: (input) => observeRpcEffect( - WS_METHODS.gitCreateBranch, - git.createBranch(input).pipe(Effect.tap(() => refreshGitStatus(input.cwd))), - { "rpc.aggregate": "git" }, + WS_METHODS.vcsCreateRef, + gitWorkflow.createRef(input).pipe(Effect.tap(() => refreshGitStatus(input.cwd))), + { "rpc.aggregate": "vcs" }, ), - [WS_METHODS.gitCheckout]: (input) => + [WS_METHODS.vcsSwitchRef]: (input) => observeRpcEffect( - WS_METHODS.gitCheckout, - Effect.scoped(git.checkoutBranch(input)).pipe( - Effect.tap(() => refreshGitStatus(input.cwd)), - ), - { "rpc.aggregate": "git" }, + WS_METHODS.vcsSwitchRef, + gitWorkflow.switchRef(input).pipe(Effect.tap(() => refreshGitStatus(input.cwd))), + { "rpc.aggregate": "vcs" }, ), - [WS_METHODS.gitInit]: (input) => + [WS_METHODS.vcsInit]: (input) => observeRpcEffect( - WS_METHODS.gitInit, - git.initRepo(input).pipe(Effect.tap(() => refreshGitStatus(input.cwd))), - { "rpc.aggregate": "git" }, + WS_METHODS.vcsInit, + vcsProvisioning + .initRepository(input) + .pipe(Effect.tap(() => refreshGitStatus(input.cwd))), + { "rpc.aggregate": "vcs" }, ), [WS_METHODS.terminalOpen]: (input) => observeRpcEffect(WS_METHODS.terminalOpen, terminalManager.open(input), { @@ -963,6 +1028,7 @@ const makeWsRpcLayer = (currentSessionId: AuthSessionId) => version: 1 as const, type: "keybindingsUpdated" as const, payload: { + keybindings: event.keybindings, issues: event.issues, }, })), @@ -976,6 +1042,7 @@ const makeWsRpcLayer = (currentSessionId: AuthSessionId) => Stream.debounce(Duration.millis(PROVIDER_STATUS_DEBOUNCE_MS)), ); const settingsUpdates = serverSettings.streamChanges.pipe( + Stream.map((settings) => redactServerSettingsForClient(settings)), Stream.map((settings) => ({ version: 1 as const, type: "settingsUpdated" as const, @@ -983,13 +1050,9 @@ const makeWsRpcLayer = (currentSessionId: AuthSessionId) => })), ); - yield* Effect.all( - [providerRegistry.refresh("codex"), providerRegistry.refresh("claudeAgent")], - { - concurrency: "unbounded", - discard: true, - }, - ).pipe(Effect.ignoreCause({ log: true }), Effect.forkScoped); + yield* providerRegistry + .refresh() + .pipe(Effect.ignoreCause({ log: true }), Effect.forkScoped); const liveUpdates = Stream.merge( keybindingsUpdates, @@ -1076,7 +1139,30 @@ export const websocketRpcRouteLayer = Layer.unwrap( }, }).pipe( Effect.provide( - makeWsRpcLayer(session.sessionId).pipe(Layer.provideMerge(RpcSerialization.layerJson)), + makeWsRpcLayer(session.sessionId).pipe( + Layer.provideMerge(RpcSerialization.layerJson), + Layer.provide( + SourceControlDiscoveryLayer.layer.pipe( + Layer.provide( + SourceControlProviderRegistry.layer.pipe( + Layer.provide( + Layer.mergeAll( + AzureDevOpsCli.layer, + BitbucketApi.layer, + GitHubCli.layer, + GitLabCli.layer, + ), + ), + Layer.provideMerge(GitVcsDriver.layer), + Layer.provide( + VcsDriverRegistry.layer.pipe(Layer.provide(VcsProjectConfig.layer)), + ), + ), + ), + Layer.provide(VcsProcess.layer), + ), + ), + ), ), ); return yield* Effect.acquireUseRelease( diff --git a/apps/web/index.html b/apps/web/index.html index 53e59c71bbc..53040ee5f88 100644 --- a/apps/web/index.html +++ b/apps/web/index.html @@ -2,7 +2,10 @@ - + diff --git a/apps/web/src/appSettings.test.ts b/apps/web/src/appSettings.test.ts deleted file mode 100644 index cf3548beeff..00000000000 --- a/apps/web/src/appSettings.test.ts +++ /dev/null @@ -1,440 +0,0 @@ -import { Schema } from "effect"; -import { afterEach, beforeEach, describe, expect, it } from "vitest"; - -import { - AppSettingsSchema, - DEFAULT_SIDEBAR_PROJECT_SORT_ORDER, - DEFAULT_SIDEBAR_THREAD_SORT_ORDER, - DEFAULT_TIMESTAMP_FORMAT, - getProviderStartOptions, -} from "./appSettings"; -import { - getAppModelOptions, - getAppSettingsSnapshot, - getCustomModelOptionsByProvider, - getCustomModelsByProvider, - getCustomModelsForProvider, - getDefaultCustomModelsForProvider, - MODEL_PROVIDER_SETTINGS, - normalizeCustomModelSlugs, - patchCustomModels, - patchGitTextGenerationModelOverrides, - resolveAppModelSelection, - resolveGitTextGenerationModelSelection, -} from "./appSettings"; - -/** Empty custom models for all providers — test helper */ -const EMPTY_CUSTOM_MODELS = { - codex: [] as readonly string[], - copilot: [] as readonly string[], - claudeAgent: [] as readonly string[], - cursor: [] as readonly string[], - opencode: [] as readonly string[], - geminiCli: [] as readonly string[], - amp: [] as readonly string[], - kilo: [] as readonly string[], -} as const; - -const APP_SETTINGS_STORAGE_KEY = "t3code:app-settings:v1"; - -const originalWindow = globalThis.window; -const originalLocalStorage = globalThis.localStorage; - -function createLocalStorageMock(): Storage { - const store = new Map(); - return { - get length() { - return store.size; - }, - clear() { - store.clear(); - }, - getItem(key) { - return store.get(key) ?? null; - }, - key(index) { - return Array.from(store.keys())[index] ?? null; - }, - removeItem(key) { - store.delete(key); - }, - setItem(key, value) { - store.set(key, String(value)); - }, - }; -} - -beforeEach(() => { - const localStorage = createLocalStorageMock(); - Object.defineProperty(globalThis, "localStorage", { - configurable: true, - value: localStorage, - }); - Object.defineProperty(globalThis, "window", { - configurable: true, - value: { - localStorage, - }, - }); -}); - -afterEach(() => { - Object.defineProperty(globalThis, "window", { - configurable: true, - value: originalWindow, - }); - Object.defineProperty(globalThis, "localStorage", { - configurable: true, - value: originalLocalStorage, - }); -}); - -describe("normalizeCustomModelSlugs", () => { - it("normalizes aliases, removes built-ins, and deduplicates values", () => { - expect( - normalizeCustomModelSlugs([ - " custom/internal-model ", - "gpt-5.3-codex", - "5.3", - "custom/internal-model", - "", - null, - ]), - ).toEqual(["custom/internal-model"]); - }); - - it("normalizes provider-specific aliases for claude and cursor", () => { - expect(normalizeCustomModelSlugs(["sonnet"], "claudeAgent")).toEqual([]); - expect(normalizeCustomModelSlugs(["claude/custom-sonnet"], "claudeAgent")).toEqual([ - "claude/custom-sonnet", - ]); - expect(normalizeCustomModelSlugs(["composer"], "cursor")).toEqual([]); - expect(normalizeCustomModelSlugs(["cursor/custom-model"], "cursor")).toEqual([ - "cursor/custom-model", - ]); - }); -}); - -describe("getAppModelOptions", () => { - it("appends saved custom models after the built-in options", () => { - const options = getAppModelOptions("codex", ["custom/internal-model"]); - - expect(options.map((option) => option.slug)).toEqual([ - "gpt-5.4", - "gpt-5.4-mini", - "gpt-5.3-codex", - "gpt-5.3-codex-spark", - "gpt-5.2-codex", - "gpt-5.2", - "custom/internal-model", - ]); - }); - - it("keeps the currently selected custom model available even if it is no longer saved", () => { - const options = getAppModelOptions("codex", [], "custom/selected-model"); - - expect(options.at(-1)).toEqual({ - slug: "custom/selected-model", - name: "custom/selected-model", - isCustom: true, - }); - }); - - it("keeps a saved custom provider model available as an exact slug option", () => { - const options = getAppModelOptions("claudeAgent", ["claude/custom-opus"], "claude/custom-opus"); - - expect(options.some((option) => option.slug === "claude/custom-opus" && option.isCustom)).toBe( - true, - ); - }); -}); - -describe("resolveAppModelSelection", () => { - it("preserves saved custom model slugs instead of falling back to the default", () => { - expect( - resolveAppModelSelection( - "codex", - { ...EMPTY_CUSTOM_MODELS, codex: ["galapagos-alpha"] }, - "galapagos-alpha", - ), - ).toBe("galapagos-alpha"); - }); - - it("falls back to the provider default when no model is selected", () => { - expect(resolveAppModelSelection("codex", EMPTY_CUSTOM_MODELS, "")).toBe("gpt-5.4"); - }); - - it("resolves display names through the shared resolver", () => { - expect(resolveAppModelSelection("codex", EMPTY_CUSTOM_MODELS, "GPT-5.3 Codex")).toBe( - "gpt-5.3-codex", - ); - }); - - it("resolves aliases through the shared resolver", () => { - expect(resolveAppModelSelection("claudeAgent", EMPTY_CUSTOM_MODELS, "sonnet")).toBe( - "claude-sonnet-4-6", - ); - }); - - it("resolves transient selected custom models included in app model options", () => { - expect(resolveAppModelSelection("codex", EMPTY_CUSTOM_MODELS, "custom/selected-model")).toBe( - "custom/selected-model", - ); - }); -}); - -describe("resolveGitTextGenerationModelSelection", () => { - it("prefers a provider-specific override over the active thread model", () => { - const settings = { - ...getAppSettingsSnapshot(), - ...patchGitTextGenerationModelOverrides({}, "codex", "gpt-5.4-mini"), - }; - - expect(resolveGitTextGenerationModelSelection("codex", settings, "gpt-5.4")).toBe( - "gpt-5.4-mini", - ); - }); - - it("falls back to the active thread model when no override is configured", () => { - const settings = getAppSettingsSnapshot(); - - expect(resolveGitTextGenerationModelSelection("cursor", settings, "opus-4.6-thinking")).toBe( - "opus-4.6-thinking", - ); - }); - - it("uses the provider git default when neither an override nor thread model exists", () => { - const settings = getAppSettingsSnapshot(); - - expect(resolveGitTextGenerationModelSelection("codex", settings, null)).toBe("gpt-5.4-mini"); - }); -}); - -describe("timestamp format defaults", () => { - it("defaults timestamp format to locale", () => { - expect(DEFAULT_TIMESTAMP_FORMAT).toBe("locale"); - }); - - it("includes provider-specific custom slugs in non-codex model lists", () => { - const claudeOptions = getAppModelOptions("claudeAgent", ["claude/custom-opus"]); - const cursorOptions = getAppModelOptions("cursor", ["cursor/custom-model"]); - - expect(claudeOptions.some((option) => option.slug === "claude/custom-opus")).toBe(true); - expect(cursorOptions.some((option) => option.slug === "cursor/custom-model")).toBe(true); - }); -}); - -describe("getAppSettingsSnapshot", () => { - it("defaults provider logos to color", () => { - expect(getAppSettingsSnapshot().providerLogoAppearance).toBe("original"); - }); - - it("hydrates a persisted provider logo appearance preference", () => { - const persistedSettings = { - ...getAppSettingsSnapshot(), - providerLogoAppearance: "accent", - }; - localStorage.setItem(APP_SETTINGS_STORAGE_KEY, JSON.stringify(persistedSettings)); - - expect(getAppSettingsSnapshot().providerLogoAppearance).toBe("accent"); - }); - - it("migrates the legacy grayscale provider logo preference", () => { - localStorage.setItem( - APP_SETTINGS_STORAGE_KEY, - JSON.stringify({ - grayscaleProviderLogos: true, - }), - ); - - expect(getAppSettingsSnapshot().providerLogoAppearance).toBe("grayscale"); - }); -}); - -describe("sidebar sort defaults", () => { - it("defaults project sorting to updated_at", () => { - expect(DEFAULT_SIDEBAR_PROJECT_SORT_ORDER).toBe("updated_at"); - }); - - it("defaults thread sorting to updated_at", () => { - expect(DEFAULT_SIDEBAR_THREAD_SORT_ORDER).toBe("updated_at"); - }); -}); - -describe("provider-specific custom models", () => { - it("includes provider-specific custom slugs in non-codex model lists", () => { - const claudeOptions = getAppModelOptions("claudeAgent", ["claude/custom-opus"]); - - expect(claudeOptions.some((option) => option.slug === "claude/custom-opus")).toBe(true); - }); -}); - -describe("getProviderStartOptions", () => { - it("returns only populated provider overrides", () => { - expect( - getProviderStartOptions({ - claudeBinaryPath: "/usr/local/bin/claude", - codexBinaryPath: "", - codexHomePath: "/Users/you/.codex", - }), - ).toEqual({ - claudeAgent: { - binaryPath: "/usr/local/bin/claude", - }, - codex: { - homePath: "/Users/you/.codex", - }, - }); - }); - - it("returns undefined when no provider overrides are configured", () => { - expect( - getProviderStartOptions({ - claudeBinaryPath: "", - codexBinaryPath: "", - codexHomePath: "", - }), - ).toBeUndefined(); - }); -}); - -describe("provider-indexed custom model settings", () => { - const settings = { - customCodexModels: ["custom/codex-model"], - customClaudeModels: ["claude/custom-opus"], - customCopilotModels: [], - customCursorModels: [], - customOpencodeModels: [], - customGeminiCliModels: [], - customAmpModels: [], - customKiloModels: [], - } as const; - - it("exports one provider config per provider", () => { - expect(MODEL_PROVIDER_SETTINGS.map((config) => config.provider)).toEqual([ - "codex", - "copilot", - "claudeAgent", - "cursor", - "opencode", - "geminiCli", - "amp", - "kilo", - ]); - }); - - it("reads custom models for each provider", () => { - expect(getCustomModelsForProvider(settings, "codex")).toEqual(["custom/codex-model"]); - expect(getCustomModelsForProvider(settings, "claudeAgent")).toEqual(["claude/custom-opus"]); - }); - - it("reads default custom models for each provider", () => { - const defaults = { - customCodexModels: ["default/codex-model"], - customClaudeModels: ["claude/default-opus"], - customCopilotModels: [], - customCursorModels: [], - customOpencodeModels: [], - customGeminiCliModels: [], - customAmpModels: [], - customKiloModels: [], - } as const; - - expect(getDefaultCustomModelsForProvider(defaults, "codex")).toEqual(["default/codex-model"]); - expect(getDefaultCustomModelsForProvider(defaults, "claudeAgent")).toEqual([ - "claude/default-opus", - ]); - }); - - it("patches custom models for codex", () => { - expect(patchCustomModels("codex", ["custom/codex-model"])).toEqual({ - customCodexModels: ["custom/codex-model"], - }); - }); - - it("patches custom models for claude", () => { - expect(patchCustomModels("claudeAgent", ["claude/custom-opus"])).toEqual({ - customClaudeModels: ["claude/custom-opus"], - }); - }); - - it("builds a complete provider-indexed custom model record", () => { - expect(getCustomModelsByProvider(settings)).toEqual({ - codex: ["custom/codex-model"], - copilot: [], - claudeAgent: ["claude/custom-opus"], - cursor: [], - opencode: [], - geminiCli: [], - amp: [], - kilo: [], - }); - }); - - it("builds provider-indexed model options including custom models", () => { - const modelOptionsByProvider = getCustomModelOptionsByProvider(settings); - - expect( - modelOptionsByProvider.codex.some((option) => option.slug === "custom/codex-model"), - ).toBe(true); - expect( - modelOptionsByProvider.claudeAgent.some((option) => option.slug === "claude/custom-opus"), - ).toBe(true); - }); - - it("normalizes and deduplicates custom model options per provider", () => { - const modelOptionsByProvider = getCustomModelOptionsByProvider({ - customCodexModels: [" custom/codex-model ", "gpt-5.4", "custom/codex-model"], - customClaudeModels: [" sonnet ", "claude/custom-opus", "claude/custom-opus"], - customCopilotModels: [], - customCursorModels: [], - customOpencodeModels: [], - customGeminiCliModels: [], - customAmpModels: [], - customKiloModels: [], - }); - - expect( - modelOptionsByProvider.codex.filter((option) => option.slug === "custom/codex-model"), - ).toHaveLength(1); - expect(modelOptionsByProvider.codex.some((option) => option.slug === "gpt-5.4")).toBe(true); - expect( - modelOptionsByProvider.claudeAgent.filter((option) => option.slug === "claude/custom-opus"), - ).toHaveLength(1); - expect( - modelOptionsByProvider.claudeAgent.some((option) => option.slug === "claude-sonnet-4-6"), - ).toBe(true); - }); -}); - -describe("AppSettingsSchema", () => { - it("fills decoding defaults for persisted settings that predate newer keys", () => { - const decode = Schema.decodeUnknownSync(Schema.fromJsonString(AppSettingsSchema)); - - expect( - decode( - JSON.stringify({ - codexBinaryPath: "/usr/local/bin/codex", - confirmThreadDelete: false, - }), - ), - ).toMatchObject({ - claudeBinaryPath: "", - codexBinaryPath: "/usr/local/bin/codex", - codexHomePath: "", - defaultThreadEnvMode: "local", - confirmThreadDelete: false, - enableAssistantStreaming: false, - sidebarProjectSortOrder: DEFAULT_SIDEBAR_PROJECT_SORT_ORDER, - sidebarThreadSortOrder: DEFAULT_SIDEBAR_THREAD_SORT_ORDER, - timestampFormat: DEFAULT_TIMESTAMP_FORMAT, - customCodexModels: [], - customClaudeModels: [], - }); - }); -}); - -// Note: upstream's resolveAppModelSelectionState tests removed — the fork -// uses resolveGitTextGenerationModelSelection with per-provider overrides -// instead of a single textGenerationModelSelection field. Equivalent -// coverage lives in the resolveGitTextGenerationModelSelection tests above. diff --git a/apps/web/src/appSettings.ts b/apps/web/src/appSettings.ts index 1c6784e4920..c89244df6ca 100644 --- a/apps/web/src/appSettings.ts +++ b/apps/web/src/appSettings.ts @@ -1,11 +1,8 @@ import { useCallback, useMemo } from "react"; import { Effect, Schema } from "effect"; -import { - DEFAULT_SERVER_SETTINGS, - type ProviderStartOptions, - type ProviderKind, -} from "@t3tools/contracts"; +import { DEFAULT_SERVER_SETTINGS } from "@t3tools/contracts"; import { DEFAULT_CLIENT_SETTINGS, type UnifiedSettings } from "@t3tools/contracts/settings"; +import type { ProviderKind } from "./providerKind"; import { DEFAULT_ACCENT_COLOR, isValidAccentColor, normalizeAccentColor } from "./accentColor"; import { useLocalStorage } from "./hooks/useLocalStorage"; import { useSettings, useUpdateSettings } from "./hooks/useSettings"; @@ -19,8 +16,6 @@ import { SidebarProjectSortOrder, SidebarThreadSortOrder, } from "./appearance"; -import { normalizeCustomModelSlugs } from "./customModels"; -import { normalizeGitTextGenerationModelByProvider } from "./gitTextGeneration"; // Re-export everything from domain modules for backwards compatibility export { @@ -36,29 +31,47 @@ export { DEFAULT_SIDEBAR_THREAD_SORT_ORDER, } from "./appearance"; -export { - MAX_CUSTOM_MODEL_LENGTH, - type CustomModelSettingsKey, - type ProviderCustomModelConfig, - type ProviderCustomModelSettings, - MODEL_PROVIDER_SETTINGS, - type AppModelOption, - normalizeCustomModelSlugs, - getCustomModelsForProvider, - patchCustomModels, - getDefaultCustomModelsForProvider, - getCustomModelsByProvider, - getAppModelOptions, - resolveAppModelSelection, - getCustomModelOptionsByProvider, - getSlashModelOptions, -} from "./customModels"; +const MAX_CUSTOM_MODEL_COUNT = 32; +const MAX_CUSTOM_MODEL_LENGTH_VALUE = 256; +export const MAX_CUSTOM_MODEL_LENGTH = MAX_CUSTOM_MODEL_LENGTH_VALUE; -export { - getGitTextGenerationModelOverride, - patchGitTextGenerationModelOverrides, - resolveGitTextGenerationModelSelection, -} from "./gitTextGeneration"; +/** + * Lightweight, fork-local custom-model normalizer used while the legacy + * AppSettings shape is still alive in the web client. The new instance-keyed + * pipeline lives in `modelSelection.ts`; this helper just trims, dedupes and + * caps the legacy per-driver string arrays so we can keep round-tripping them + * through `withUnifiedCompatSettings` / `toUnifiedPatch` without touching the + * removed contracts surface. + */ +function normalizeCustomModelSlugsLocal( + models: Iterable, +): ReadonlyArray { + const out: string[] = []; + const seen = new Set(); + for (const candidate of models) { + if (typeof candidate !== "string") continue; + const trimmed = candidate.trim(); + if (!trimmed || trimmed.length > MAX_CUSTOM_MODEL_LENGTH_VALUE) continue; + if (seen.has(trimmed)) continue; + seen.add(trimmed); + out.push(trimmed); + if (out.length >= MAX_CUSTOM_MODEL_COUNT) break; + } + return out; +} + +function normalizeGitTextGenerationModelByProviderLocal( + overrides: Record, +): Record { + const next: Record = {}; + for (const [k, v] of Object.entries(overrides)) { + if (typeof v !== "string") continue; + const trimmed = v.trim(); + if (!trimmed) continue; + next[k === "claudeCode" ? "claudeAgent" : k] = trimmed; + } + return next; +} const APP_SETTINGS_STORAGE_KEY = "t3code:app-settings:v1"; const APP_SETTINGS_PROVIDER_CUSTOM_MODEL_KEYS = { @@ -74,6 +87,7 @@ const APP_SETTINGS_PROVIDER_CUSTOM_MODEL_KEYS = { const MIRRORED_CLIENT_KEYS = new Set([ "confirmThreadDelete", "diffWordWrap", + "diffIgnoreWhitespace", "sidebarProjectSortOrder", "sidebarThreadSortOrder", "timestampFormat", @@ -120,6 +134,7 @@ export const AppSettingsSchema = Schema.Struct({ ), confirmThreadDelete: Schema.Boolean.pipe(withDefaults(() => true)), diffWordWrap: Schema.Boolean.pipe(withDefaults(() => false)), + diffIgnoreWhitespace: Schema.Boolean.pipe(withDefaults(() => true)), enableAssistantStreaming: Schema.Boolean.pipe(withDefaults(() => false)), showCommandOutput: Schema.Boolean.pipe(withDefaults(() => true)), showFileChangeDiffs: Schema.Boolean.pipe(withDefaults(() => true)), @@ -175,15 +190,15 @@ const DEFAULT_APP_SETTINGS = AppSettingsSchema.make({}); function normalizeAppSettings(settings: AppSettings): AppSettings { return { ...settings, - customCodexModels: normalizeCustomModelSlugs(settings.customCodexModels, "codex"), - customCopilotModels: normalizeCustomModelSlugs(settings.customCopilotModels, "copilot"), - customClaudeModels: normalizeCustomModelSlugs(settings.customClaudeModels, "claudeAgent"), - customCursorModels: normalizeCustomModelSlugs(settings.customCursorModels, "cursor"), - customOpencodeModels: normalizeCustomModelSlugs(settings.customOpencodeModels, "opencode"), - customGeminiCliModels: normalizeCustomModelSlugs(settings.customGeminiCliModels, "geminiCli"), - customAmpModels: normalizeCustomModelSlugs(settings.customAmpModels, "amp"), - customKiloModels: normalizeCustomModelSlugs(settings.customKiloModels, "kilo"), - gitTextGenerationModelByProvider: normalizeGitTextGenerationModelByProvider( + customCodexModels: normalizeCustomModelSlugsLocal(settings.customCodexModels), + customCopilotModels: normalizeCustomModelSlugsLocal(settings.customCopilotModels), + customClaudeModels: normalizeCustomModelSlugsLocal(settings.customClaudeModels), + customCursorModels: normalizeCustomModelSlugsLocal(settings.customCursorModels), + customOpencodeModels: normalizeCustomModelSlugsLocal(settings.customOpencodeModels), + customGeminiCliModels: normalizeCustomModelSlugsLocal(settings.customGeminiCliModels), + customAmpModels: normalizeCustomModelSlugsLocal(settings.customAmpModels), + customKiloModels: normalizeCustomModelSlugsLocal(settings.customKiloModels), + gitTextGenerationModelByProvider: normalizeGitTextGenerationModelByProviderLocal( settings.gitTextGenerationModelByProvider, ), accentColor: normalizeAccentColor(settings.accentColor), @@ -195,30 +210,6 @@ function normalizeAppSettings(settings: AppSettings): AppSettings { }; } -export function getProviderStartOptions( - settings: Pick, -): ProviderStartOptions | undefined { - const providerOptions: ProviderStartOptions = { - ...(settings.codexBinaryPath || settings.codexHomePath - ? { - codex: { - ...(settings.codexBinaryPath ? { binaryPath: settings.codexBinaryPath } : {}), - ...(settings.codexHomePath ? { homePath: settings.codexHomePath } : {}), - }, - } - : {}), - ...(settings.claudeBinaryPath - ? { - claudeAgent: { - binaryPath: settings.claudeBinaryPath, - }, - } - : {}), - }; - - return Object.keys(providerOptions).length > 0 ? providerOptions : undefined; -} - let cachedRawSettings: string | null = null; let cachedSnapshot: AppSettings = DEFAULT_APP_SETTINGS; @@ -269,6 +260,7 @@ function withUnifiedCompatSettings( | "confirmThreadDelete" | "defaultThreadEnvMode" | "diffWordWrap" + | "diffIgnoreWhitespace" | "enableAssistantStreaming" | "providers" | "sidebarProjectSortOrder" @@ -286,6 +278,7 @@ function withUnifiedCompatSettings( defaultThreadEnvMode: unifiedSettings.defaultThreadEnvMode, confirmThreadDelete: unifiedSettings.confirmThreadDelete, diffWordWrap: unifiedSettings.diffWordWrap, + diffIgnoreWhitespace: unifiedSettings.diffIgnoreWhitespace, enableAssistantStreaming: unifiedSettings.enableAssistantStreaming, sidebarProjectSortOrder: unifiedSettings.sidebarProjectSortOrder, sidebarThreadSortOrder: unifiedSettings.sidebarThreadSortOrder, @@ -340,7 +333,7 @@ function toUnifiedPatch(patch: Partial): Partial { } providersPatch[provider] = { ...(providersPatch[provider] ?? {}), - customModels: normalizeCustomModelSlugs(models, provider), + customModels: normalizeCustomModelSlugsLocal(models), }; } return { @@ -348,6 +341,9 @@ function toUnifiedPatch(patch: Partial): Partial { ? { confirmThreadDelete: patch.confirmThreadDelete } : {}), ...(patch.diffWordWrap !== undefined ? { diffWordWrap: patch.diffWordWrap } : {}), + ...(patch.diffIgnoreWhitespace !== undefined + ? { diffIgnoreWhitespace: patch.diffIgnoreWhitespace } + : {}), ...(patch.sidebarProjectSortOrder !== undefined ? { sidebarProjectSortOrder: patch.sidebarProjectSortOrder } : {}), @@ -402,6 +398,7 @@ export function useAppSettings() { confirmThreadDelete: unifiedSettings.confirmThreadDelete, defaultThreadEnvMode: unifiedSettings.defaultThreadEnvMode, diffWordWrap: unifiedSettings.diffWordWrap, + diffIgnoreWhitespace: unifiedSettings.diffIgnoreWhitespace, enableAssistantStreaming: unifiedSettings.enableAssistantStreaming, providers: unifiedSettings.providers, sidebarProjectSortOrder: unifiedSettings.sidebarProjectSortOrder, diff --git a/apps/web/src/authBootstrap.test.ts b/apps/web/src/authBootstrap.test.ts index b06c4248fca..e6f08f147ca 100644 --- a/apps/web/src/authBootstrap.test.ts +++ b/apps/web/src/authBootstrap.test.ts @@ -353,6 +353,34 @@ describe("resolveInitialServerAuthGateState", () => { expect(fetchMock).toHaveBeenCalledTimes(3); }); + it("surfaces a friendly error message when an invalid pairing token is submitted", async () => { + const fetchMock = vi.fn().mockResolvedValueOnce( + jsonResponse( + { + error: "Invalid bootstrap credential.", + }, + { + status: 401, + }, + ), + ); + vi.stubGlobal("fetch", fetchMock); + + const { submitServerAuthCredential } = await import("./environments/primary"); + + await expect(submitServerAuthCredential("bad-token")).rejects.toThrow( + "Invalid pairing token. Check the token and try again.", + ); + expect(fetchMock).toHaveBeenCalledWith("http://localhost/api/auth/bootstrap", { + body: JSON.stringify({ credential: "bad-token" }), + credentials: "include", + headers: { + "content-type": "application/json", + }, + method: "POST", + }); + }); + it("waits for the authenticated session to become observable after silent desktop bootstrap", async () => { vi.useFakeTimers(); const fetchMock = vi diff --git a/apps/web/src/clientPersistenceStorage.test.ts b/apps/web/src/clientPersistenceStorage.test.ts index a74ce18ac30..e02168e09b3 100644 --- a/apps/web/src/clientPersistenceStorage.test.ts +++ b/apps/web/src/clientPersistenceStorage.test.ts @@ -10,6 +10,12 @@ const savedRegistryRecord: PersistedSavedEnvironmentRecord = { wsBaseUrl: "wss://remote.example.com/", createdAt: "2026-04-09T00:00:00.000Z", lastConnectedAt: null, + desktopSsh: { + alias: "devbox", + hostname: "devbox.example.com", + username: "julius", + port: 22, + }, }; function createLocalStorageStub(): Storage { diff --git a/apps/web/src/clientPersistenceStorage.ts b/apps/web/src/clientPersistenceStorage.ts index 70f51d5c30a..30c949b37ac 100644 --- a/apps/web/src/clientPersistenceStorage.ts +++ b/apps/web/src/clientPersistenceStorage.ts @@ -19,6 +19,14 @@ const BrowserSavedEnvironmentRecordSchema = Schema.Struct({ wsBaseUrl: Schema.String, createdAt: Schema.String, lastConnectedAt: Schema.NullOr(Schema.String), + desktopSsh: Schema.optionalKey( + Schema.Struct({ + alias: Schema.String, + hostname: Schema.String, + username: Schema.NullOr(Schema.String), + port: Schema.NullOr(Schema.Number), + }), + ), bearerToken: Schema.optionalKey(Schema.String), }); type BrowserSavedEnvironmentRecord = typeof BrowserSavedEnvironmentRecordSchema.Type; @@ -37,7 +45,7 @@ function hasWindow(): boolean { function toPersistedSavedEnvironmentRecord( record: PersistedSavedEnvironmentRecord, ): PersistedSavedEnvironmentRecord { - return { + const nextRecord = { environmentId: record.environmentId, label: record.label, httpBaseUrl: record.httpBaseUrl, @@ -45,6 +53,7 @@ function toPersistedSavedEnvironmentRecord( createdAt: record.createdAt, lastConnectedAt: record.lastConnectedAt, }; + return record.desktopSsh ? { ...nextRecord, desktopSsh: record.desktopSsh } : nextRecord; } export function readBrowserClientSettings(): ClientSettings | null { @@ -135,6 +144,7 @@ export function writeBrowserSavedEnvironmentRegistry( wsBaseUrl: record.wsBaseUrl, createdAt: record.createdAt, lastConnectedAt: record.lastConnectedAt, + ...(record.desktopSsh ? { desktopSsh: record.desktopSsh } : {}), bearerToken, } : toPersistedSavedEnvironmentRecord(record); @@ -166,7 +176,7 @@ export function writeBrowserSavedEnvironmentSecret( return record; } found = true; - return { + const nextRecord = { environmentId: record.environmentId, label: record.label, httpBaseUrl: record.httpBaseUrl, @@ -174,7 +184,8 @@ export function writeBrowserSavedEnvironmentSecret( createdAt: record.createdAt, lastConnectedAt: record.lastConnectedAt, bearerToken: secret, - } satisfies BrowserSavedEnvironmentRecord; + }; + return record.desktopSsh ? { ...nextRecord, desktopSsh: record.desktopSsh } : nextRecord; }), }); return found; diff --git a/apps/web/src/components/AnimatedHeight.tsx b/apps/web/src/components/AnimatedHeight.tsx new file mode 100644 index 00000000000..dd404c49dff --- /dev/null +++ b/apps/web/src/components/AnimatedHeight.tsx @@ -0,0 +1,59 @@ +"use client"; + +import { type ReactNode, useLayoutEffect, useRef, useState } from "react"; + +export function AnimatedHeight({ children }: { readonly children: ReactNode }) { + const contentRef = useRef(null); + const [height, setHeight] = useState(null); + + useLayoutEffect(() => { + const element = contentRef.current; + if (!element) return; + let firstFrameId: number | null = null; + let secondFrameId: number | null = null; + + const updateHeight = () => { + const nextHeight = Math.ceil(element.scrollHeight || element.getBoundingClientRect().height); + setHeight((currentHeight) => (currentHeight === nextHeight ? currentHeight : nextHeight)); + }; + const cancelPendingFrames = () => { + if (firstFrameId !== null) { + window.cancelAnimationFrame(firstFrameId); + firstFrameId = null; + } + if (secondFrameId !== null) { + window.cancelAnimationFrame(secondFrameId); + secondFrameId = null; + } + }; + const updateHeightAfterPaint = () => { + cancelPendingFrames(); + updateHeight(); + firstFrameId = window.requestAnimationFrame(() => { + firstFrameId = null; + updateHeight(); + secondFrameId = window.requestAnimationFrame(() => { + secondFrameId = null; + updateHeight(); + }); + }); + }; + + updateHeightAfterPaint(); + const resizeObserver = new ResizeObserver(updateHeightAfterPaint); + resizeObserver.observe(element); + return () => { + resizeObserver.disconnect(); + cancelPendingFrames(); + }; + }, []); + + return ( +
+
{children}
+
+ ); +} diff --git a/apps/web/src/components/AppSidebarLayout.tsx b/apps/web/src/components/AppSidebarLayout.tsx index b1ce57235a8..d98f30a1e5c 100644 --- a/apps/web/src/components/AppSidebarLayout.tsx +++ b/apps/web/src/components/AppSidebarLayout.tsx @@ -54,7 +54,7 @@ export function AppSidebarLayout({ children }: { children: ReactNode }) { }, [navigate]); return ( - + { ).toBe("local"); }); - it("keeps new-worktree mode when selecting a base branch before worktree creation", () => { + it("keeps new-worktree mode when selecting a base ref before worktree creation", () => { expect( resolveDraftEnvModeAfterBranchChange({ nextWorktreePath: null, @@ -38,7 +38,7 @@ describe("resolveDraftEnvModeAfterBranchChange", () => { ).toBe("worktree"); }); - it("uses worktree mode when selecting a branch already attached to a worktree", () => { + it("uses worktree mode when selecting a ref already attached to a worktree", () => { expect( resolveDraftEnvModeAfterBranchChange({ nextWorktreePath: "/repo/.t3/worktrees/feature-a", @@ -50,7 +50,7 @@ describe("resolveDraftEnvModeAfterBranchChange", () => { }); describe("resolveBranchToolbarValue", () => { - it("defaults new-worktree mode to current git branch when no explicit base branch is set", () => { + it("defaults new-worktree mode to current git ref when no explicit base ref is set", () => { expect( resolveBranchToolbarValue({ envMode: "worktree", @@ -61,7 +61,7 @@ describe("resolveBranchToolbarValue", () => { ).toBe("main"); }); - it("keeps an explicitly selected worktree base branch", () => { + it("keeps an explicitly selected worktree base ref", () => { expect( resolveBranchToolbarValue({ envMode: "worktree", @@ -72,7 +72,7 @@ describe("resolveBranchToolbarValue", () => { ).toBe("feature/base"); }); - it("shows the actual checked-out branch when not selecting a new worktree base", () => { + it("shows the actual checked-out ref when not selecting a new worktree base", () => { expect( resolveBranchToolbarValue({ envMode: "local", @@ -186,8 +186,8 @@ describe("deriveLocalBranchNameFromRemoteRef", () => { }); describe("dedupeRemoteBranchesWithLocalMatches", () => { - it("hides remote refs when the matching local branch exists", () => { - const input: GitBranch[] = [ + it("hides remote refs when the matching local ref exists", () => { + const input: VcsRef[] = [ { name: "feature/demo", current: false, @@ -212,14 +212,14 @@ describe("dedupeRemoteBranchesWithLocalMatches", () => { }, ]; - expect(dedupeRemoteBranchesWithLocalMatches(input).map((branch) => branch.name)).toEqual([ + expect(dedupeRemoteBranchesWithLocalMatches(input).map((ref) => ref.name)).toEqual([ "feature/demo", "origin/feature/remote-only", ]); }); it("keeps all entries when no local match exists for a remote ref", () => { - const input: GitBranch[] = [ + const input: VcsRef[] = [ { name: "feature/local", current: false, @@ -236,14 +236,14 @@ describe("dedupeRemoteBranchesWithLocalMatches", () => { }, ]; - expect(dedupeRemoteBranchesWithLocalMatches(input).map((branch) => branch.name)).toEqual([ + expect(dedupeRemoteBranchesWithLocalMatches(input).map((ref) => ref.name)).toEqual([ "feature/local", "origin/feature/remote-only", ]); }); - it("keeps non-origin remote refs visible even when a matching local branch exists", () => { - const input: GitBranch[] = [ + it("keeps non-origin remote refs visible even when a matching local ref exists", () => { + const input: VcsRef[] = [ { name: "feature/demo", current: false, @@ -260,14 +260,14 @@ describe("dedupeRemoteBranchesWithLocalMatches", () => { }, ]; - expect(dedupeRemoteBranchesWithLocalMatches(input).map((branch) => branch.name)).toEqual([ + expect(dedupeRemoteBranchesWithLocalMatches(input).map((ref) => ref.name)).toEqual([ "feature/demo", "my-org/upstream/feature/demo", ]); }); it("keeps non-origin remote refs visible when git tracks with first-slash local naming", () => { - const input: GitBranch[] = [ + const input: VcsRef[] = [ { name: "upstream/feature", current: false, @@ -284,7 +284,7 @@ describe("dedupeRemoteBranchesWithLocalMatches", () => { }, ]; - expect(dedupeRemoteBranchesWithLocalMatches(input).map((branch) => branch.name)).toEqual([ + expect(dedupeRemoteBranchesWithLocalMatches(input).map((ref) => ref.name)).toEqual([ "upstream/feature", "my-org/upstream/feature", ]); @@ -292,12 +292,12 @@ describe("dedupeRemoteBranchesWithLocalMatches", () => { }); describe("resolveBranchSelectionTarget", () => { - it("reuses an existing secondary worktree for the selected branch", () => { + it("reuses an existing secondary worktree for the selected ref", () => { expect( resolveBranchSelectionTarget({ activeProjectCwd: "/repo", activeWorktreePath: "/repo/.t3/worktrees/feature-a", - branch: { + refName: { isDefault: false, worktreePath: "/repo/.t3/worktrees/feature-b", }, @@ -309,12 +309,12 @@ describe("resolveBranchSelectionTarget", () => { }); }); - it("switches back to the main repo when the branch already lives there", () => { + it("switches back to the main repo when the ref already lives there", () => { expect( resolveBranchSelectionTarget({ activeProjectCwd: "/repo", activeWorktreePath: "/repo/.t3/worktrees/feature-a", - branch: { + refName: { isDefault: true, worktreePath: "/repo", }, @@ -326,12 +326,12 @@ describe("resolveBranchSelectionTarget", () => { }); }); - it("checks out the default branch in the main repo when leaving a secondary worktree", () => { + it("checks out the default ref in the main repo when leaving a secondary worktree", () => { expect( resolveBranchSelectionTarget({ activeProjectCwd: "/repo", activeWorktreePath: "/repo/.t3/worktrees/feature-a", - branch: { + refName: { isDefault: true, worktreePath: null, }, @@ -343,12 +343,12 @@ describe("resolveBranchSelectionTarget", () => { }); }); - it("keeps checkout in the current worktree for non-default branches", () => { + it("keeps checkout in the current worktree for non-default refs", () => { expect( resolveBranchSelectionTarget({ activeProjectCwd: "/repo", activeWorktreePath: "/repo/.t3/worktrees/feature-a", - branch: { + refName: { isDefault: false, worktreePath: null, }, @@ -373,7 +373,7 @@ describe("shouldIncludeBranchPickerItem", () => { ).toBe(true); }); - it("keeps the synthetic create-branch item visible for arbitrary branch input", () => { + it("keeps the synthetic create-ref item visible for arbitrary ref input", () => { expect( shouldIncludeBranchPickerItem({ itemValue: "__create_new_branch__:feature/demo", @@ -384,7 +384,7 @@ describe("shouldIncludeBranchPickerItem", () => { ).toBe(true); }); - it("still filters ordinary branch items by query text", () => { + it("still filters ordinary ref items by query text", () => { expect( shouldIncludeBranchPickerItem({ itemValue: "main", diff --git a/apps/web/src/components/BranchToolbar.logic.ts b/apps/web/src/components/BranchToolbar.logic.ts index 7adab1a2e16..31f614a2e2c 100644 --- a/apps/web/src/components/BranchToolbar.logic.ts +++ b/apps/web/src/components/BranchToolbar.logic.ts @@ -1,4 +1,4 @@ -import type { EnvironmentId, GitBranch, ProjectId } from "@t3tools/contracts"; +import type { EnvironmentId, VcsRef, ProjectId } from "@t3tools/contracts"; import { Schema } from "effect"; export { dedupeRemoteBranchesWithLocalMatches, @@ -100,24 +100,24 @@ export function resolveBranchToolbarValue(input: { export function resolveBranchSelectionTarget(input: { activeProjectCwd: string; activeWorktreePath: string | null; - branch: Pick; + refName: Pick; }): { checkoutCwd: string; nextWorktreePath: string | null; reuseExistingWorktree: boolean; } { - const { activeProjectCwd, activeWorktreePath, branch } = input; + const { activeProjectCwd, activeWorktreePath, refName } = input; - if (branch.worktreePath) { + if (refName.worktreePath) { return { - checkoutCwd: branch.worktreePath, - nextWorktreePath: branch.worktreePath === activeProjectCwd ? null : branch.worktreePath, + checkoutCwd: refName.worktreePath, + nextWorktreePath: refName.worktreePath === activeProjectCwd ? null : refName.worktreePath, reuseExistingWorktree: true, }; } const nextWorktreePath = - activeWorktreePath !== null && branch.isDefault ? null : activeWorktreePath; + activeWorktreePath !== null && refName.isDefault ? null : activeWorktreePath; return { checkoutCwd: nextWorktreePath ?? activeProjectCwd, diff --git a/apps/web/src/components/BranchToolbar.tsx b/apps/web/src/components/BranchToolbar.tsx index c6f37c42a95..27c5c311c60 100644 --- a/apps/web/src/components/BranchToolbar.tsx +++ b/apps/web/src/components/BranchToolbar.tsx @@ -1,18 +1,41 @@ import { scopeProjectRef, scopeThreadRef } from "@t3tools/client-runtime"; import type { EnvironmentId, ThreadId } from "@t3tools/contracts"; +import { + ChevronDownIcon, + CloudIcon, + FolderGit2Icon, + FolderGitIcon, + FolderIcon, + MonitorIcon, +} from "lucide-react"; import { memo, useMemo } from "react"; import { useComposerDraftStore, type DraftId } from "../composerDraftStore"; +import { useIsMobile } from "../hooks/useMediaQuery"; import { useStore } from "../store"; import { createProjectSelectorByRef, createThreadSelectorByRef } from "../storeSelectors"; import { type EnvMode, type EnvironmentOption, + resolveCurrentWorkspaceLabel, + resolveEnvModeLabel, resolveEffectiveEnvMode, + resolveLockedWorkspaceLabel, } from "./BranchToolbar.logic"; import { BranchToolbarBranchSelector } from "./BranchToolbarBranchSelector"; import { BranchToolbarEnvironmentSelector } from "./BranchToolbarEnvironmentSelector"; import { BranchToolbarEnvModeSelector } from "./BranchToolbarEnvModeSelector"; +import { Button } from "./ui/button"; +import { + Menu, + MenuGroup, + MenuGroupLabel, + MenuPopup, + MenuRadioGroup, + MenuRadioItem, + MenuSeparator, + MenuTrigger, +} from "./ui/menu"; import { Separator } from "./ui/separator"; interface BranchToolbarProps { @@ -30,6 +53,142 @@ interface BranchToolbarProps { onEnvironmentChange?: (environmentId: EnvironmentId) => void; } +interface MobileRunContextSelectorProps { + envLocked: boolean; + envModeLocked: boolean; + environmentId: EnvironmentId; + availableEnvironments: readonly EnvironmentOption[] | undefined; + showEnvironmentPicker: boolean; + onEnvironmentChange: ((environmentId: EnvironmentId) => void) | undefined; + effectiveEnvMode: EnvMode; + activeWorktreePath: string | null; + onEnvModeChange: (mode: EnvMode) => void; +} + +const MobileRunContextSelector = memo(function MobileRunContextSelector({ + envLocked, + envModeLocked, + environmentId, + availableEnvironments, + showEnvironmentPicker, + onEnvironmentChange, + effectiveEnvMode, + activeWorktreePath, + onEnvModeChange, +}: MobileRunContextSelectorProps) { + const activeEnvironment = useMemo( + () => availableEnvironments?.find((env) => env.environmentId === environmentId) ?? null, + [availableEnvironments, environmentId], + ); + const WorkspaceIcon = + effectiveEnvMode === "worktree" + ? FolderGit2Icon + : activeWorktreePath + ? FolderGitIcon + : FolderIcon; + const workspaceLabel = envModeLocked + ? resolveLockedWorkspaceLabel(activeWorktreePath) + : effectiveEnvMode === "worktree" + ? resolveEnvModeLabel("worktree") + : resolveCurrentWorkspaceLabel(activeWorktreePath); + const isLocked = envLocked || envModeLocked; + const EnvironmentIcon = activeEnvironment?.isPrimary ? MonitorIcon : CloudIcon; + const icon = showEnvironmentPicker ? ( + // Button's base styles apply `-mx-0.5` to descendant SVGs, which eats 4px + // out of whatever gap we set. mx-0! cancels that so gap-0.5 reads as 2px. + + + + + ) : ( + + ); + const triggerContent = ( + <> + {icon} + + {showEnvironmentPicker ? (activeEnvironment?.label ?? "Run on") : workspaceLabel} + + + ); + + if (isLocked) { + return ( + + {triggerContent} + + ); + } + + return ( + + } + className="min-w-0 max-w-[48%] flex-1 justify-start text-muted-foreground/70 hover:text-foreground/80 md:hidden" + > + {triggerContent} + + + + {showEnvironmentPicker && availableEnvironments && onEnvironmentChange ? ( + <> + + Run on + onEnvironmentChange(value as EnvironmentId)} + > + {availableEnvironments.map((env) => { + const Icon = env.isPrimary ? MonitorIcon : CloudIcon; + return ( + + + + {env.label} + + + ); + })} + + + + + ) : null} + + Workspace + onEnvModeChange(value as EnvMode)} + > + + + {activeWorktreePath ? ( + + ) : ( + + )} + + {resolveCurrentWorkspaceLabel(activeWorktreePath)} + + + + + + + {resolveEnvModeLabel("worktree")} + + + + + + + ); +}); + export const BranchToolbar = memo(function BranchToolbar({ environmentId, threadId, @@ -74,34 +233,51 @@ export const BranchToolbar = memo(function BranchToolbar({ }); const envModeLocked = envLocked || (serverThread !== undefined && activeWorktreePath !== null); - const showEnvironmentPicker = - availableEnvironments && availableEnvironments.length > 1 && onEnvironmentChange; + const showEnvironmentPicker = Boolean( + availableEnvironments && availableEnvironments.length > 1 && onEnvironmentChange, + ); + const isMobile = useIsMobile(); if (!hasActiveThread || !activeProject) return null; return ( -
-
- {showEnvironmentPicker && ( - <> - - - - )} - + {isMobile ? ( + -
+ ) : ( +
+ {showEnvironmentPicker && availableEnvironments && onEnvironmentChange && ( + <> + + + + )} + +
+ )} void; + onActiveThreadBranchOverrideChange?: (refName: string | null) => void; onCheckoutPullRequestRequest?: (reference: string) => void; onComposerFocusRequest?: () => void; } @@ -67,7 +70,7 @@ function getBranchTriggerLabel(input: { }): string { const { activeWorktreePath, effectiveEnvMode, resolvedActiveBranch } = input; if (!resolvedActiveBranch) { - return "Select branch"; + return "Select ref"; } if (effectiveEnvMode === "worktree" && !activeWorktreePath) { return `From ${resolvedActiveBranch}`; @@ -76,6 +79,7 @@ function getBranchTriggerLabel(input: { } export function BranchToolbarBranchSelector({ + className, environmentId, threadId, draftId, @@ -190,7 +194,7 @@ export function BranchToolbarBranchSelector({ ); // --------------------------------------------------------------------------- - // Git branch queries + // Git ref queries // --------------------------------------------------------------------------- const queryClient = useQueryClient(); const [isBranchMenuOpen, setIsBranchMenuOpen] = useState(false); @@ -221,22 +225,27 @@ export function BranchToolbarBranchSelector({ query: deferredTrimmedBranchQuery, }), ); - const branches = useMemo( - () => branchesSearchData?.pages.flatMap((page) => page.branches) ?? [], + const refs = useMemo( + () => branchesSearchData?.pages.flatMap((page) => page.refs) ?? [], [branchesSearchData?.pages], ); const currentGitBranch = - branchStatusQuery.data?.branch ?? branches.find((branch) => branch.current)?.name ?? null; + branchStatusQuery.data?.refName ?? refs.find((refName) => refName.current)?.name ?? null; + const sourceControlPresentation = useMemo( + () => getSourceControlPresentation(branchStatusQuery.data?.sourceControlProvider), + [branchStatusQuery.data?.sourceControlProvider], + ); + const SourceControlIcon = sourceControlPresentation.Icon; const canonicalActiveBranch = resolveBranchToolbarValue({ envMode: effectiveEnvMode, activeWorktreePath, activeThreadBranch, currentGitBranch, }); - const branchNames = useMemo(() => branches.map((branch) => branch.name), [branches]); + const branchNames = useMemo(() => refs.map((refName) => refName.name), [refs]); const branchByName = useMemo( - () => new Map(branches.map((branch) => [branch.name, branch] as const)), - [branches], + () => new Map(refs.map((refName) => [refName.name, refName] as const)), + [refs], ); const normalizedDeferredBranchQuery = deferredTrimmedBranchQuery.toLowerCase(); const prReference = parsePullRequestReference(trimmedBranchQuery); @@ -286,11 +295,11 @@ export function BranchToolbarBranchSelector({ const shouldVirtualizeBranchList = filteredBranchPickerItems.length > 40; const totalBranchCount = branchesSearchData?.pages[0]?.totalCount ?? 0; const branchStatusText = isBranchesSearchPending - ? "Loading branches..." + ? "Loading refs..." : isFetchingNextPage - ? "Loading more branches..." + ? "Loading more refs..." : hasNextPage - ? `Showing ${branches.length} of ${totalBranchCount} branches` + ? `Showing ${refs.length} of ${totalBranchCount} refs` : null; // --------------------------------------------------------------------------- @@ -300,17 +309,17 @@ export function BranchToolbarBranchSelector({ startBranchActionTransition(async () => { await action().catch(() => undefined); await queryClient - .invalidateQueries({ queryKey: gitQueryKeys.branches(environmentId, branchCwd) }) + .invalidateQueries({ queryKey: gitQueryKeys.refs(environmentId, branchCwd) }) .catch(() => undefined); }); }; - const selectBranch = (branch: GitBranch) => { + const selectBranch = (refName: VcsRef) => { const api = readEnvironmentApi(environmentId); if (!api || !branchCwd || !activeProjectCwd || isBranchActionPending) return; if (isSelectingWorktreeBase) { - setThreadBranch(branch.name, null); + setThreadBranch(refName.name, null); setIsBranchMenuOpen(false); onComposerFocusRequest?.(); return; @@ -319,19 +328,19 @@ export function BranchToolbarBranchSelector({ const selectionTarget = resolveBranchSelectionTarget({ activeProjectCwd, activeWorktreePath, - branch, + refName, }); if (selectionTarget.reuseExistingWorktree) { - setThreadBranch(branch.name, selectionTarget.nextWorktreePath); + setThreadBranch(refName.name, selectionTarget.nextWorktreePath); setIsBranchMenuOpen(false); onComposerFocusRequest?.(); return; } - const selectedBranchName = branch.isRemote - ? deriveLocalBranchNameFromRemoteRef(branch.name) - : branch.name; + const selectedBranchName = refName.isRemote + ? deriveLocalBranchNameFromRemoteRef(refName.name) + : refName.name; setIsBranchMenuOpen(false); onComposerFocusRequest?.(); @@ -340,12 +349,12 @@ export function BranchToolbarBranchSelector({ const previousBranch = resolvedActiveBranch; setOptimisticBranch(selectedBranchName); try { - const checkoutResult = await api.git.checkout({ + const checkoutResult = await api.vcs.switchRef({ cwd: selectionTarget.checkoutCwd, - branch: branch.name, + refName: refName.name, }); - const nextBranchName = branch.isRemote - ? (checkoutResult.branch ?? selectedBranchName) + const nextBranchName = refName.isRemote + ? (checkoutResult.refName ?? selectedBranchName) : selectedBranchName; setOptimisticBranch(nextBranchName); setThreadBranch(nextBranchName, selectionTarget.nextWorktreePath); @@ -354,7 +363,7 @@ export function BranchToolbarBranchSelector({ toastManager.add( stackedThreadToast({ type: "error", - title: "Failed to checkout branch.", + title: "Failed to switch ref.", description: toBranchActionErrorMessage(error), }), ); @@ -362,7 +371,7 @@ export function BranchToolbarBranchSelector({ }); }; - const createBranch = (rawName: string) => { + const createRef = (rawName: string) => { const name = rawName.trim(); const api = readEnvironmentApi(environmentId); if (!api || !branchCwd || !name || isBranchActionPending) return; @@ -374,19 +383,19 @@ export function BranchToolbarBranchSelector({ const previousBranch = resolvedActiveBranch; setOptimisticBranch(name); try { - const createBranchResult = await api.git.createBranch({ + const createBranchResult = await api.vcs.createRef({ cwd: branchCwd, - branch: name, - checkout: true, + refName: name, + switchRef: true, }); - setOptimisticBranch(createBranchResult.branch); - setThreadBranch(createBranchResult.branch, activeWorktreePath); + setOptimisticBranch(createBranchResult.refName); + setThreadBranch(createBranchResult.refName, activeWorktreePath); } catch (error) { setOptimisticBranch(previousBranch); toastManager.add( stackedThreadToast({ type: "error", - title: "Failed to create and checkout branch.", + title: "Failed to create and switch ref.", description: toBranchActionErrorMessage(error), }), ); @@ -417,7 +426,7 @@ export function BranchToolbarBranchSelector({ return; } void queryClient.invalidateQueries({ - queryKey: gitQueryKeys.branches(environmentId, branchCwd), + queryKey: gitQueryKeys.refs(environmentId, branchCwd), }); }, [branchCwd, environmentId, queryClient], @@ -479,7 +488,7 @@ export function BranchToolbarBranchSelector({ useEffect(() => { if (shouldVirtualizeBranchList) return; maybeFetchNextBranchPage(); - }, [branches.length, maybeFetchNextBranchPage, shouldVirtualizeBranchList]); + }, [refs.length, maybeFetchNextBranchPage, shouldVirtualizeBranchList]); const triggerLabel = getBranchTriggerLabel({ activeWorktreePath, @@ -505,9 +514,14 @@ export function BranchToolbarBranchSelector({ onCheckoutPullRequestRequest(prReference); }} > -
- Checkout Pull Request - {prReference} +
+ + + + Checkout {sourceControlPresentation.terminology.singular} + + {prReference} +
); @@ -519,25 +533,25 @@ export function BranchToolbarBranchSelector({ key={itemValue} index={index} value={itemValue} - onClick={() => createBranch(trimmedBranchQuery)} + onClick={() => createRef(trimmedBranchQuery)} > - Create new branch "{trimmedBranchQuery}" + Create new ref "{trimmedBranchQuery}" ); } - const branch = branchByName.get(itemValue); - if (!branch) return null; + const refName = branchByName.get(itemValue); + if (!refName) return null; const hasSecondaryWorktree = - branch.worktreePath && activeProjectCwd && branch.worktreePath !== activeProjectCwd; - const badge = branch.current + refName.worktreePath && activeProjectCwd && refName.worktreePath !== activeProjectCwd; + const badge = refName.current ? "current" : hasSecondaryWorktree ? "worktree" - : branch.isRemote + : refName.isRemote ? "remote" - : branch.isDefault + : refName.isDefault ? "default" : null; return ( @@ -546,7 +560,7 @@ export function BranchToolbarBranchSelector({ key={itemValue} index={index} value={itemValue} - onClick={() => selectBranch(branch)} + onClick={() => selectBranch(refName)} >
{itemValue} @@ -577,25 +591,25 @@ export function BranchToolbarBranchSelector({ > } - className="text-muted-foreground/70 hover:text-foreground/80" - disabled={(isBranchesSearchPending && branches.length === 0) || isBranchActionPending} + className={cn("min-w-0 text-muted-foreground/70 hover:text-foreground/80", className)} + disabled={(isBranchesSearchPending && refs.length === 0) || isBranchActionPending} > - {triggerLabel} - + {triggerLabel} +
setBranchQuery(event.target.value)} />
- No branches found. + No refs found. {shouldVirtualizeBranchList ? ( diff --git a/apps/web/src/components/BranchToolbarEnvModeSelector.tsx b/apps/web/src/components/BranchToolbarEnvModeSelector.tsx index 6e1c80f5573..6d06882662f 100644 --- a/apps/web/src/components/BranchToolbarEnvModeSelector.tsx +++ b/apps/web/src/components/BranchToolbarEnvModeSelector.tsx @@ -58,6 +58,7 @@ export const BranchToolbarEnvModeSelector = memo(function BranchToolbarEnvModeSe return ( onEnvironmentChange(value as EnvironmentId)} items={environmentItems} diff --git a/apps/web/src/components/ChatView.browser.tsx b/apps/web/src/components/ChatView.browser.tsx index 2469acfb821..578dc7c045d 100644 --- a/apps/web/src/components/ChatView.browser.tsx +++ b/apps/web/src/components/ChatView.browser.tsx @@ -9,6 +9,8 @@ import { type MessageId, type OrchestrationReadModel, type ProjectId, + ProviderDriverKind, + ProviderInstanceId, type ServerConfig, type ServerLifecycleWelcomePayload, type ThreadId, @@ -18,7 +20,9 @@ import { DEFAULT_SERVER_SETTINGS, } from "@t3tools/contracts"; import { scopedThreadKey, scopeThreadRef } from "@t3tools/client-runtime"; +import { createModelCapabilities, createModelSelection } from "@t3tools/shared/model"; import { RouterProvider, createMemoryHistory } from "@tanstack/react-router"; +import { Option } from "effect"; import { HttpResponse, http, ws } from "msw"; import { setupWorker } from "msw/browser"; import { page } from "vitest/browser"; @@ -166,7 +170,8 @@ function createBaseServerConfig(): ServerConfig { issues: [], providers: [ { - provider: "codex", + driver: ProviderDriverKind.make("codex"), + instanceId: ProviderInstanceId.make("codex"), enabled: true, installed: true, version: "0.116.0", @@ -202,6 +207,8 @@ function createMockEnvironmentApi(input: { filesystem: { browse: input.browse, }, + sourceControl: {} as EnvironmentApi["sourceControl"], + vcs: {} as EnvironmentApi["vcs"], git: {} as EnvironmentApi["git"], orchestration: { dispatchCommand: input.dispatchCommand, @@ -322,7 +329,7 @@ function createSnapshotForTargetUser(options: { title: "Project", workspaceRoot: "/repo/project", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5", }, scripts: [], @@ -337,7 +344,7 @@ function createSnapshotForTargetUser(options: { projectId: PROJECT_ID, title: THREAD_TITLE, modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5", }, interactionMode: "default", @@ -402,7 +409,7 @@ function addThreadToSnapshot( projectId: PROJECT_ID, title: "New thread", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5", }, interactionMode: "default", @@ -739,7 +746,7 @@ function createSnapshotWithSecondaryProject(options?: { id: "thread-secondary-project" as ThreadId, projectId: SECOND_PROJECT_ID, title: "Release checklist", - modelSelection: { provider: "codex", model: "gpt-5" }, + modelSelection: { instanceId: ProviderInstanceId.make("codex"), model: "gpt-5" }, interactionMode: "default", runtimeMode: "full-access", branch: "release/docs-portal", @@ -771,7 +778,7 @@ function createSnapshotWithSecondaryProject(options?: { id: ARCHIVED_SECONDARY_THREAD_ID, projectId: SECOND_PROJECT_ID, title: "Archived Docs Notes", - modelSelection: { provider: "codex", model: "gpt-5" }, + modelSelection: { instanceId: ProviderInstanceId.make("codex"), model: "gpt-5" }, interactionMode: "default", runtimeMode: "full-access", branch: "release/docs-archive", @@ -806,7 +813,7 @@ function createSnapshotWithSecondaryProject(options?: { id: SECOND_PROJECT_ID, title: "Docs Portal", workspaceRoot: "/repo/clients/docs-portal", - defaultModelSelection: { provider: "codex", model: "gpt-5" }, + defaultModelSelection: { instanceId: ProviderInstanceId.make("codex"), model: "gpt-5" }, scripts: [], createdAt: NOW_ISO, updatedAt: NOW_ISO, @@ -883,7 +890,7 @@ function createSnapshotWithPendingUserInput(): OrchestrationReadModel { } function createSnapshotWithPlanFollowUpPrompt(options?: { - modelSelection?: { provider: "codex"; model: string }; + modelSelection?: { instanceId: ProviderInstanceId; model: string }; planMarkdown?: string; }): OrchestrationReadModel { const snapshot = createSnapshotForTargetUser({ @@ -891,7 +898,7 @@ function createSnapshotWithPlanFollowUpPrompt(options?: { targetText: "plan follow-up thread", }); const modelSelection = options?.modelSelection ?? { - provider: "codex" as const, + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5", }; const planMarkdown = @@ -947,13 +954,80 @@ function resolveWsRpc(body: NormalizedWsRpcRequestBody): unknown { if (tag === WS_METHODS.serverGetConfig) { return fixture.serverConfig; } - if (tag === WS_METHODS.gitListBranches) { + if (tag === WS_METHODS.serverDiscoverSourceControl) { + return { + versionControlSystems: [], + sourceControlProviders: [ + { + kind: "github", + label: "GitHub", + executable: "gh", + status: "available", + version: Option.some("gh version 2.0.0"), + installHint: "Install GitHub CLI.", + detail: Option.none(), + auth: { + status: "authenticated", + account: Option.some("t3-oss"), + host: Option.some("github.com"), + detail: Option.none(), + }, + }, + { + kind: "gitlab", + label: "GitLab", + executable: "glab", + status: "available", + version: Option.some("glab version 1.0.0"), + installHint: "Install GitLab CLI.", + detail: Option.none(), + auth: { + status: "authenticated", + account: Option.some("t3-oss"), + host: Option.some("gitlab.com"), + detail: Option.none(), + }, + }, + { + kind: "bitbucket", + label: "Bitbucket", + executable: "Bitbucket REST API", + status: "available", + version: Option.none(), + installHint: "Set Bitbucket API token environment variables.", + detail: Option.none(), + auth: { + status: "authenticated", + account: Option.some("t3-oss"), + host: Option.some("bitbucket.org"), + detail: Option.none(), + }, + }, + { + kind: "azure-devops", + label: "Azure DevOps", + executable: "az", + status: "available", + version: Option.some("azure-cli 2.0.0"), + installHint: "Install Azure CLI.", + detail: Option.none(), + auth: { + status: "authenticated", + account: Option.some("t3-oss"), + host: Option.some("dev.azure.com"), + detail: Option.none(), + }, + }, + ], + }; + } + if (tag === WS_METHODS.vcsListRefs) { return { isRepo: true, - hasOriginRemote: true, + hasPrimaryRemote: true, nextCursor: null, totalCount: 1, - branches: [ + refs: [ { name: "main", current: true, @@ -1657,6 +1731,74 @@ describe("ChatView timeline estimator parity (full app)", () => { customWsRpcResolver = null; document.body.innerHTML = ""; }); + + it("renders locked single-environment mobile run context as a static workspace label", async () => { + const mounted = await mountChatView({ + viewport: COMPACT_FOOTER_VIEWPORT, + snapshot: createSnapshotForTargetUser({ + targetMessageId: "msg-user-mobile-locked-workspace" as MessageId, + targetText: "locked mobile workspace", + }), + }); + + try { + await waitForElement( + () => + Array.from(document.querySelectorAll("span")).find( + (element) => element.textContent?.trim() === "Local checkout", + ) ?? null, + "Unable to find static mobile workspace label.", + ); + + expect(findButtonByText("Local checkout")).toBeNull(); + } finally { + await mounted.cleanup(); + } + }); + + it("keeps dismiss-only composer banners aligned on mobile", async () => { + const mounted = await mountChatView({ + viewport: COMPACT_FOOTER_VIEWPORT, + snapshot: createSnapshotForTargetUser({ + targetMessageId: "msg-user-mobile-version-banner" as MessageId, + targetText: "mobile version banner", + }), + configureFixture: (nextFixture) => { + nextFixture.serverConfig = { + ...nextFixture.serverConfig, + environment: { + ...nextFixture.serverConfig.environment, + serverVersion: "9.9.9", + }, + }; + }, + }); + + try { + const banner = await waitForElement( + () => + Array.from(document.querySelectorAll('[data-slot="alert"]')).find( + (element) => element.textContent?.includes("Client and server versions differ"), + ) ?? null, + "Unable to find version mismatch banner.", + ); + const title = banner.querySelector('[data-slot="alert-title"]'); + const description = banner.querySelector('[data-slot="alert-description"]'); + const dismissButton = banner.querySelector( + 'button[aria-label="Dismiss version mismatch warning"]', + ); + + expect(title).toBeTruthy(); + expect(description).toBeTruthy(); + expect(dismissButton).toBeTruthy(); + expect(dismissButton!.getBoundingClientRect().top).toBeLessThan( + description!.getBoundingClientRect().top, + ); + } finally { + await mounted.cleanup(); + } + }); + it("re-expands the bootstrap project using its logical key", async () => { useUiStateStore.setState({ projectExpandedById: { @@ -1922,7 +2064,7 @@ describe("ChatView timeline estimator parity (full app)", () => { try { await waitForServerConfigToApply(); const menuButton = await waitForElement( - () => document.querySelector('button[aria-label="Select editor"]'), + () => document.querySelector('button[aria-label="Copy options"]'), "Unable to find Open picker button.", ); (menuButton as HTMLButtonElement).click(); @@ -1971,7 +2113,7 @@ describe("ChatView timeline estimator parity (full app)", () => { try { await waitForServerConfigToApply(); const menuButton = await waitForElement( - () => document.querySelector('button[aria-label="Select editor"]'), + () => document.querySelector('button[aria-label="Copy options"]'), "Unable to find Open picker button.", ); (menuButton as HTMLButtonElement).click(); @@ -2283,16 +2425,16 @@ describe("ChatView timeline estimator parity (full app)", () => { branchButton.click(); const branchInput = await waitForElement( - () => document.querySelector('input[placeholder="Search branches..."]'), - "Unable to find branch search input.", + () => document.querySelector('input[placeholder="Search refs..."]'), + "Unable to find ref search input.", ); branchInput.focus(); - await page.getByPlaceholder("Search branches...").fill("1359"); + await page.getByPlaceholder("Search refs...").fill("1359"); const checkoutItem = await waitForElement( () => Array.from(document.querySelectorAll("span")).find( - (element) => element.textContent?.trim() === "Checkout Pull Request", + (element) => element.textContent?.trim() === "Checkout pull request", ) as HTMLSpanElement | null, "Unable to find checkout pull request option.", ); @@ -2421,7 +2563,7 @@ describe("ChatView timeline estimator parity (full app)", () => { { timeout: 8_000, interval: 16 }, ); - expect(wsRequests.some((request) => request._tag === WS_METHODS.gitCreateWorktree)).toBe( + expect(wsRequests.some((request) => request._tag === WS_METHODS.vcsCreateWorktree)).toBe( false, ); expect( @@ -2437,6 +2579,126 @@ describe("ChatView timeline estimator parity (full app)", () => { } }); + it("keeps custom provider instance ids when bootstrapping a local draft thread", async () => { + setDraftThreadWithoutWorktree(); + const openRouterInstanceId = ProviderInstanceId.make("claude_openrouter"); + const openRouterSelection = createModelSelection(openRouterInstanceId, "openai/gpt-5.5"); + useComposerDraftStore.getState().setModelSelection(THREAD_REF, openRouterSelection); + + const mounted = await mountChatView({ + viewport: DEFAULT_VIEWPORT, + snapshot: createDraftOnlySnapshot(), + configureFixture: (nextFixture) => { + nextFixture.serverConfig = { + ...nextFixture.serverConfig, + providers: [ + ...nextFixture.serverConfig.providers, + { + driver: ProviderDriverKind.make("claudeAgent"), + instanceId: ProviderInstanceId.make("claudeAgent"), + enabled: true, + installed: true, + version: "2.1.117", + status: "ready", + auth: { status: "authenticated" }, + checkedAt: NOW_ISO, + models: [ + { + slug: "claude-opus-4-7", + name: "Claude Opus 4.7", + isCustom: false, + capabilities: createModelCapabilities({ optionDescriptors: [] }), + }, + ], + slashCommands: [], + skills: [], + }, + { + driver: ProviderDriverKind.make("claudeAgent"), + instanceId: openRouterInstanceId, + displayName: "Claude OpenRouter", + enabled: true, + installed: true, + version: "2.1.117", + status: "ready", + auth: { status: "authenticated" }, + checkedAt: NOW_ISO, + models: [ + { + slug: "claude-opus-4-7", + name: "Claude Opus 4.7", + isCustom: false, + capabilities: createModelCapabilities({ optionDescriptors: [] }), + }, + ], + slashCommands: [], + skills: [], + }, + ], + settings: { + ...nextFixture.serverConfig.settings, + providerInstances: { + ...nextFixture.serverConfig.settings.providerInstances, + [openRouterInstanceId]: { + driver: ProviderDriverKind.make("claudeAgent"), + displayName: "Claude OpenRouter", + config: { customModels: ["openai/gpt-5.5"] }, + }, + }, + }, + }; + }, + resolveRpc: (body) => { + if (body._tag === ORCHESTRATION_WS_METHODS.dispatchCommand) { + return { + sequence: fixture.snapshot.snapshotSequence + 1, + }; + } + return undefined; + }, + }); + + try { + useComposerDraftStore.getState().setPrompt(THREAD_REF, "Hello there"); + await waitForLayout(); + + const sendButton = await waitForSendButton(); + expect(sendButton.disabled).toBe(false); + sendButton.click(); + + await vi.waitFor( + () => { + const turnStartRequest = wsRequests.find( + (request) => + request._tag === ORCHESTRATION_WS_METHODS.dispatchCommand && + request.type === "thread.turn.start", + ) as + | { + modelSelection?: { instanceId?: string; model?: string }; + bootstrap?: { + createThread?: { + modelSelection?: { instanceId?: string; model?: string }; + }; + }; + } + | undefined; + + expect(turnStartRequest?.modelSelection).toMatchObject({ + instanceId: openRouterInstanceId, + model: "openai/gpt-5.5", + }); + expect(turnStartRequest?.bootstrap?.createThread?.modelSelection).toMatchObject({ + instanceId: openRouterInstanceId, + model: "openai/gpt-5.5", + }); + }, + { timeout: 8_000, interval: 16 }, + ); + } finally { + await mounted.cleanup(); + } + }); + it("keeps new-worktree mode on empty server threads and bootstraps the first send", async () => { const snapshot = addThreadToSnapshot(createDraftOnlySnapshot(), THREAD_ID); const mounted = await mountChatView({ @@ -2448,13 +2710,13 @@ describe("ChatView timeline estimator parity (full app)", () => { ), }, resolveRpc: (body) => { - if (body._tag === WS_METHODS.gitListBranches) { + if (body._tag === WS_METHODS.vcsListRefs) { return { isRepo: true, - hasOriginRemote: true, + hasPrimaryRemote: true, nextCursor: null, totalCount: 1, - branches: [ + refs: [ { name: "main", current: true, @@ -2541,13 +2803,13 @@ describe("ChatView timeline estimator parity (full app)", () => { ), }, resolveRpc: (body) => { - if (body._tag === WS_METHODS.gitListBranches) { + if (body._tag === WS_METHODS.vcsListRefs) { return { isRepo: true, - hasOriginRemote: true, + hasPrimaryRemote: true, nextCursor: null, totalCount: 2, - branches: [ + refs: [ { name: "main", current: true, @@ -2637,13 +2899,13 @@ describe("ChatView timeline estimator parity (full app)", () => { viewport: DEFAULT_VIEWPORT, snapshot: snapshotWithTwoThreads, resolveRpc: (body) => { - if (body._tag === WS_METHODS.gitListBranches) { + if (body._tag === WS_METHODS.vcsListRefs) { return { isRepo: true, - hasOriginRemote: true, + hasPrimaryRemote: true, nextCursor: null, totalCount: 2, - branches: [ + refs: [ { name: "main", current: true, @@ -2897,13 +3159,13 @@ describe("ChatView timeline estimator parity (full app)", () => { snapshot: createDraftOnlySnapshot(), initialPath: `/draft/${activeDraftId}`, resolveRpc: (body) => { - if (body._tag === WS_METHODS.gitListBranches) { + if (body._tag === WS_METHODS.vcsListRefs) { return { isRepo: true, - hasOriginRemote: true, + hasPrimaryRemote: true, nextCursor: null, totalCount: 2, - branches: [ + refs: [ { name: "main", current: true, @@ -3022,13 +3284,13 @@ describe("ChatView timeline estimator parity (full app)", () => { snapshot: createDraftOnlySnapshot(), initialPath: `/draft/${draftId}`, resolveRpc: (body) => { - if (body._tag === WS_METHODS.gitListBranches) { + if (body._tag === WS_METHODS.vcsListRefs) { return { isRepo: true, - hasOriginRemote: true, + hasPrimaryRemote: true, nextCursor: null, totalCount: branches.length, - branches, + refs: branches, }; } return undefined; @@ -3046,8 +3308,8 @@ describe("ChatView timeline estimator parity (full app)", () => { branchButton.click(); await waitForElement( - () => document.querySelector('input[placeholder="Search branches..."]'), - "Unable to find branch search input.", + () => document.querySelector('input[placeholder="Search refs..."]'), + "Unable to find ref search input.", ); const popup = await waitForElement( @@ -3777,16 +4039,16 @@ describe("ChatView timeline estimator parity (full app)", () => { it("snapshots sticky codex settings into a new draft thread", async () => { useComposerDraftStore.setState({ stickyModelSelectionByProvider: { - codex: { - provider: "codex", - model: "gpt-5.3-codex", - options: { - reasoningEffort: "medium", - fastMode: true, - }, - }, + [ProviderInstanceId.make("codex")]: createModelSelection( + ProviderInstanceId.make("codex"), + "gpt-5.3-codex", + [ + { id: "reasoningEffort", value: "medium" }, + { id: "fastMode", value: true }, + ], + ), }, - stickyActiveProvider: "codex", + stickyActiveProvider: ProviderInstanceId.make("codex"), }); const mounted = await mountChatView({ @@ -3810,14 +4072,16 @@ describe("ChatView timeline estimator parity (full app)", () => { ); const newDraftId = draftIdFromPath(newThreadPath); + // `toMatchObject` matches objects loosely (extras ignored) but compares + // arrays strictly, so wrap `options` in `arrayContaining` to keep the + // assertion focused on sticky `fastMode` carrying over without asserting + // on exactly which other options are preserved. expect(composerDraftFor(newDraftId)).toMatchObject({ modelSelectionByProvider: { codex: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.3-codex", - options: { - fastMode: true, - }, + options: expect.arrayContaining([{ id: "fastMode", value: true }]), }, }, activeProvider: "codex", @@ -3830,16 +4094,16 @@ describe("ChatView timeline estimator parity (full app)", () => { it("hydrates the provider alongside a sticky claude model", async () => { useComposerDraftStore.setState({ stickyModelSelectionByProvider: { - claudeAgent: { - provider: "claudeAgent", - model: "claude-opus-4-6", - options: { - effort: "max", - fastMode: true, - }, - }, + [ProviderInstanceId.make("claudeAgent")]: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-opus-4-6", + [ + { id: "effort", value: "max" }, + { id: "fastMode", value: true }, + ], + ), }, - stickyActiveProvider: "claudeAgent", + stickyActiveProvider: ProviderInstanceId.make("claudeAgent"), }); const mounted = await mountChatView({ @@ -3865,14 +4129,14 @@ describe("ChatView timeline estimator parity (full app)", () => { expect(composerDraftFor(newDraftId)).toMatchObject({ modelSelectionByProvider: { - claudeAgent: { - provider: "claudeAgent", - model: "claude-opus-4-6", - options: { - effort: "max", - fastMode: true, - }, - }, + claudeAgent: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-opus-4-6", + [ + { id: "effort", value: "max" }, + { id: "fastMode", value: true }, + ], + ), }, activeProvider: "claudeAgent", }); @@ -3912,16 +4176,16 @@ describe("ChatView timeline estimator parity (full app)", () => { it("prefers draft state over sticky composer settings and defaults", async () => { useComposerDraftStore.setState({ stickyModelSelectionByProvider: { - codex: { - provider: "codex", - model: "gpt-5.3-codex", - options: { - reasoningEffort: "medium", - fastMode: true, - }, - }, + [ProviderInstanceId.make("codex")]: createModelSelection( + ProviderInstanceId.make("codex"), + "gpt-5.3-codex", + [ + { id: "reasoningEffort", value: "medium" }, + { id: "fastMode", value: true }, + ], + ), }, - stickyActiveProvider: "codex", + stickyActiveProvider: ProviderInstanceId.make("codex"), }); const mounted = await mountChatView({ @@ -3945,27 +4209,27 @@ describe("ChatView timeline estimator parity (full app)", () => { ); const draftId = draftIdFromPath(threadPath); + // See the note on the sibling sticky-codex test: arrays match strictly + // under `toMatchObject`, so use `arrayContaining` to keep the assertion + // scoped to the sticky trait (`fastMode`) that must carry over. expect(composerDraftFor(draftId)).toMatchObject({ modelSelectionByProvider: { codex: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.3-codex", - options: { - fastMode: true, - }, + options: expect.arrayContaining([{ id: "fastMode", value: true }]), }, }, activeProvider: "codex", }); - useComposerDraftStore.getState().setModelSelection(draftId, { - provider: "codex", - model: "gpt-5.4", - options: { - reasoningEffort: "low", - fastMode: true, - }, - }); + useComposerDraftStore.getState().setModelSelection( + draftId, + createModelSelection(ProviderInstanceId.make("codex"), "gpt-5.4", [ + { id: "reasoningEffort", value: "low" }, + { id: "fastMode", value: true }, + ]), + ); await newThreadButton.click(); @@ -3976,14 +4240,10 @@ describe("ChatView timeline estimator parity (full app)", () => { ); expect(composerDraftFor(draftId)).toMatchObject({ modelSelectionByProvider: { - codex: { - provider: "codex", - model: "gpt-5.4", - options: { - reasoningEffort: "low", - fastMode: true, - }, - }, + codex: createModelSelection(ProviderInstanceId.make("codex"), "gpt-5.4", [ + { id: "reasoningEffort", value: "low" }, + { id: "fastMode", value: true }, + ]), }, activeProvider: "codex", }); @@ -4134,8 +4394,7 @@ describe("ChatView timeline estimator parity (full app)", () => { }); try { - await waitForServerConfigToApply(); - await waitForCommandPaletteShortcutLabel(); + await Promise.all([waitForServerConfigToApply(), waitForCommandPaletteShortcutLabel()]); const palette = page.getByTestId("command-palette"); await openCommandPaletteFromTrigger(); @@ -4187,8 +4446,7 @@ describe("ChatView timeline estimator parity (full app)", () => { }); try { - await waitForServerConfigToApply(); - await waitForCommandPaletteShortcutLabel(); + await Promise.all([waitForServerConfigToApply(), waitForCommandPaletteShortcutLabel()]); const palette = page.getByTestId("command-palette"); await openCommandPaletteFromTrigger(); @@ -4261,13 +4519,13 @@ describe("ChatView timeline estimator parity (full app)", () => { }); try { - await waitForServerConfigToApply(); - await waitForCommandPaletteShortcutLabel(); + await Promise.all([waitForServerConfigToApply(), waitForCommandPaletteShortcutLabel()]); const palette = page.getByTestId("command-palette"); await openCommandPaletteFromTrigger(); await expect.element(palette).toBeInTheDocument(); await palette.getByText("Add project", { exact: true }).click(); + await palette.getByText("Local folder", { exact: true }).click(); const browseInput = await waitForCommandPaletteInput(ADD_PROJECT_SUBMENU_PLACEHOLDER); await page.getByPlaceholder(ADD_PROJECT_SUBMENU_PLACEHOLDER).fill("~/Development/"); @@ -4314,6 +4572,126 @@ describe("ChatView timeline estimator parity (full app)", () => { } }); + it("shows clone destination controls after resolving an add project repository", async () => { + const mounted = await mountChatView({ + viewport: DEFAULT_VIEWPORT, + snapshot: createSnapshotForTargetUser({ + targetMessageId: "msg-user-command-palette-add-project-remote" as MessageId, + targetText: "command palette add project remote", + }), + configureFixture: (nextFixture) => { + nextFixture.serverConfig = { + ...nextFixture.serverConfig, + keybindings: [ + { + command: "commandPalette.toggle", + shortcut: { + key: "k", + metaKey: false, + ctrlKey: false, + shiftKey: false, + altKey: false, + modKey: true, + }, + whenAst: { + type: "not", + node: { type: "identifier", name: "terminalFocus" }, + }, + }, + ], + }; + }, + resolveRpc: (body) => { + if (body._tag === WS_METHODS.filesystemBrowse) { + return { + parentPath: "~/", + entries: [{ name: "Development", fullPath: "~/Development" }], + }; + } + + if (body._tag === WS_METHODS.sourceControlLookupRepository) { + return { + provider: "github", + nameWithOwner: "t3-oss/t3-env", + url: "https://github.com/t3-oss/t3-env", + sshUrl: "git@github.com:t3-oss/t3-env.git", + }; + } + + if (body._tag === WS_METHODS.sourceControlCloneRepository) { + return { + cwd: body.destinationPath, + remoteUrl: body.remoteUrl, + repository: null, + }; + } + + if (body._tag === ORCHESTRATION_WS_METHODS.dispatchCommand) { + return { + sequence: fixture.snapshot.snapshotSequence + 1, + }; + } + + return undefined; + }, + }); + + try { + await Promise.all([waitForServerConfigToApply(), waitForCommandPaletteShortcutLabel()]); + const palette = page.getByTestId("command-palette"); + await openCommandPaletteFromTrigger(); + + await expect.element(palette).toBeInTheDocument(); + await palette.getByText("Add project", { exact: true }).click(); + await palette.getByText("GitHub repository", { exact: true }).click(); + + const repositoryInput = await waitForCommandPaletteInput( + "Enter GitHub repository (owner/repo)", + ); + await page.getByPlaceholder("Enter GitHub repository (owner/repo)").fill("t3-oss/t3-env"); + await dispatchInputKey(repositoryInput, { key: "Enter" }); + + await vi.waitFor( + () => { + const clonePathInput = document.querySelector( + 'input[placeholder="Enter path (e.g. ~/projects/my-app)"]', + ); + expect(clonePathInput?.value).toBe("~/"); + expect(document.body.textContent).toContain("Repository"); + expect(document.body.textContent).toContain("t3-oss/t3-env"); + expect(document.body.textContent).toContain("https://github.com/t3-oss/t3-env"); + expect(document.body.textContent).toContain("Select where to clone"); + expect(document.body.textContent).toContain("Development"); + expect(document.body.textContent).toContain("Clone"); + }, + { timeout: 8_000, interval: 16 }, + ); + + await page + .getByPlaceholder("Enter path (e.g. ~/projects/my-app)") + .fill("~/Development/t3env"); + const clonePathInput = await waitForCommandPaletteInput( + "Enter path (e.g. ~/projects/my-app)", + ); + await dispatchInputKey(clonePathInput, { key: "Enter" }); + + await vi.waitFor( + () => { + const cloneRequest = wsRequests.find( + (request) => request._tag === WS_METHODS.sourceControlCloneRepository, + ) as { destinationPath?: string; remoteUrl?: string } | undefined; + expect(cloneRequest).toMatchObject({ + remoteUrl: "git@github.com:t3-oss/t3-env.git", + destinationPath: "~/Development/t3env", + }); + }, + { timeout: 8_000, interval: 16 }, + ); + } finally { + await mounted.cleanup(); + } + }); + it("opens add project browse mode from the sidebar add button", async () => { const mounted = await mountChatView({ viewport: DEFAULT_VIEWPORT, @@ -4340,6 +4718,7 @@ describe("ChatView timeline estimator parity (full app)", () => { const palette = page.getByTestId("command-palette"); await expect.element(palette).toBeInTheDocument(); + await palette.getByText("Local folder", { exact: true }).click(); const browseInput = await waitForCommandPaletteInput(ADD_PROJECT_SUBMENU_PLACEHOLDER); await expect.element(browseInput).toHaveValue("~/"); @@ -4402,6 +4781,7 @@ describe("ChatView timeline estimator parity (full app)", () => { const palette = page.getByTestId("command-palette"); await expect.element(palette).toBeInTheDocument(); + await palette.getByText("Local folder", { exact: true }).click(); const browseInput = await waitForCommandPaletteInput(ADD_PROJECT_SUBMENU_PLACEHOLDER); await expect.element(browseInput).toHaveValue("~/Development/"); @@ -4461,6 +4841,7 @@ describe("ChatView timeline estimator parity (full app)", () => { await page.getByTestId("sidebar-add-project-trigger").click(); await expect.element(palette).toBeInTheDocument(); + await palette.getByText("Local folder", { exact: true }).click(); const browseInput = await waitForCommandPaletteInput(ADD_PROJECT_SUBMENU_PLACEHOLDER); await page.getByPlaceholder(ADD_PROJECT_SUBMENU_PLACEHOLDER).fill("~/Desktop/fresh-project"); @@ -4540,6 +4921,7 @@ describe("ChatView timeline estimator parity (full app)", () => { await page.getByTestId("sidebar-add-project-trigger").click(); await expect.element(palette).toBeInTheDocument(); + await palette.getByText("Local folder", { exact: true }).click(); const browseInput = await waitForCommandPaletteInput(ADD_PROJECT_SUBMENU_PLACEHOLDER); await page.getByPlaceholder(ADD_PROJECT_SUBMENU_PLACEHOLDER).fill("~/Development/codex/"); @@ -4671,6 +5053,7 @@ describe("ChatView timeline estimator parity (full app)", () => { .element(palette.getByText("This device", { exact: true }).first()) .toBeInTheDocument(); await palette.getByText("Staging", { exact: true }).click(); + await palette.getByText("Local folder", { exact: true }).click(); const browseInput = await waitForCommandPaletteInput(ADD_PROJECT_SUBMENU_PLACEHOLDER); await expect.element(browseInput).toHaveValue("~/workspaces/"); @@ -4764,6 +5147,7 @@ describe("ChatView timeline estimator parity (full app)", () => { const palette = page.getByTestId("command-palette"); await expect.element(palette).toBeInTheDocument(); + await palette.getByText("Local folder", { exact: true }).click(); const browseInput = palette.getByPlaceholder(ADD_PROJECT_SUBMENU_PLACEHOLDER); await browseInput.fill("~/Applications/access"); @@ -4881,6 +5265,7 @@ describe("ChatView timeline estimator parity (full app)", () => { await expect.element(palette).toBeInTheDocument(); await palette.getByText("Add project", { exact: true }).click(); + await palette.getByText("Local folder", { exact: true }).click(); const browseInput = await waitForCommandPaletteInput(ADD_PROJECT_SUBMENU_PLACEHOLDER); await page.getByPlaceholder(ADD_PROJECT_SUBMENU_PLACEHOLDER).fill("~/Development/"); @@ -5452,7 +5837,10 @@ describe("ChatView timeline estimator parity (full app)", () => { const mounted = await mountChatView({ viewport: WIDE_FOOTER_VIEWPORT, snapshot: createSnapshotWithPlanFollowUpPrompt({ - modelSelection: { provider: "codex", model: "gpt-5.3-codex-spark" }, + modelSelection: { + instanceId: ProviderInstanceId.make("codex"), + model: "gpt-5.3-codex-spark", + }, planMarkdown: "# Imaginary Long-Range Plan: T3 Code Adaptive Orchestration and Safe-Delay Execution Initiative", }), @@ -5482,7 +5870,10 @@ describe("ChatView timeline estimator parity (full app)", () => { const mounted = await mountChatView({ viewport: WIDE_FOOTER_VIEWPORT, snapshot: createSnapshotWithPlanFollowUpPrompt({ - modelSelection: { provider: "codex", model: "gpt-5.3-codex-spark" }, + modelSelection: { + instanceId: ProviderInstanceId.make("codex"), + model: "gpt-5.3-codex-spark", + }, planMarkdown: "# Imaginary Long-Range Plan: T3 Code Adaptive Orchestration and Safe-Delay Execution Initiative", }), @@ -5606,14 +5997,17 @@ describe("ChatView timeline estimator parity (full app)", () => { projects: snapshot.projects.map((project) => project.id === PROJECT_ID ? Object.assign({}, project, { - defaultModelSelection: { provider: "codex", model: "gpt-5.4" }, + defaultModelSelection: { + instanceId: ProviderInstanceId.make("codex"), + model: "gpt-5.4", + }, }) : project, ), threads: snapshot.threads.map((thread) => thread.id === THREAD_ID ? Object.assign({}, thread, { - modelSelection: { provider: "codex", model: "gpt-5.4" }, + modelSelection: { instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.4" }, }) : thread, ), @@ -5664,43 +6058,36 @@ describe("ChatView timeline estimator parity (full app)", () => { providers: [ { ...nextFixture.serverConfig.providers[0]!, - provider: "codex", models: [ { slug: "gpt-5.1-codex-max", name: "GPT-5.1 Codex Max", isCustom: false, - capabilities: { - supportsFastMode: true, - supportsThinkingToggle: false, - reasoningEffortLevels: [], - promptInjectedEffortLevels: [], - contextWindowOptions: [], - }, + capabilities: createModelCapabilities({ + optionDescriptors: [ + { id: "fastMode", label: "Fast Mode", type: "boolean" as const }, + ], + }), }, { slug: "gpt-5.3-codex", name: "GPT-5.3 Codex", isCustom: false, - capabilities: { - supportsFastMode: true, - supportsThinkingToggle: false, - reasoningEffortLevels: [], - promptInjectedEffortLevels: [], - contextWindowOptions: [], - }, + capabilities: createModelCapabilities({ + optionDescriptors: [ + { id: "fastMode", label: "Fast Mode", type: "boolean" as const }, + ], + }), }, { slug: "gpt-5.4", name: "GPT-5.4", isCustom: false, - capabilities: { - supportsFastMode: true, - supportsThinkingToggle: false, - reasoningEffortLevels: [], - promptInjectedEffortLevels: [], - contextWindowOptions: [], - }, + capabilities: createModelCapabilities({ + optionDescriptors: [ + { id: "fastMode", label: "Fast Mode", type: "boolean" as const }, + ], + }), }, ], }, diff --git a/apps/web/src/components/ChatView.logic.test.ts b/apps/web/src/components/ChatView.logic.test.ts index 08266e22a38..fecaeddafa2 100644 --- a/apps/web/src/components/ChatView.logic.test.ts +++ b/apps/web/src/components/ChatView.logic.test.ts @@ -1,5 +1,12 @@ import { scopeThreadRef } from "@t3tools/client-runtime"; -import { EnvironmentId, ProjectId, ThreadId, TurnId } from "@t3tools/contracts"; +import { + EnvironmentId, + ProjectId, + ProviderDriverKind, + ProviderInstanceId, + ThreadId, + TurnId, +} from "@t3tools/contracts"; import { afterEach, describe, expect, it, vi } from "vitest"; import { type EnvironmentState, useStore } from "../store"; import { type Thread } from "../types"; @@ -220,7 +227,7 @@ const makeThread = (input?: { codexThreadId: null, projectId: ProjectId.make("project-1"), title: "Thread", - modelSelection: { provider: "codex" as const, model: "gpt-5.4" }, + modelSelection: { instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.4" }, runtimeMode: "full-access" as const, interactionMode: "default" as const, session: null, @@ -253,7 +260,7 @@ function setStoreThreads(threads: ReadonlyArray>) name: "Project", cwd: "/tmp/project", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.4", }, createdAt: "2026-03-29T00:00:00.000Z", @@ -452,7 +459,7 @@ describe("hasServerAcknowledgedLocalDispatch", () => { }; const previousSession = { - provider: "codex" as const, + provider: ProviderDriverKind.make("codex"), status: "ready" as const, createdAt: "2026-03-29T00:00:00.000Z", updatedAt: "2026-03-29T00:00:10.000Z", @@ -466,7 +473,7 @@ describe("hasServerAcknowledgedLocalDispatch", () => { codexThreadId: null, projectId, title: "Thread", - modelSelection: { provider: "codex", model: "gpt-5.4" }, + modelSelection: { instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.4" }, runtimeMode: "full-access", interactionMode: "default", session: previousSession, @@ -503,7 +510,7 @@ describe("hasServerAcknowledgedLocalDispatch", () => { codexThreadId: null, projectId, title: "Thread", - modelSelection: { provider: "codex", model: "gpt-5.4" }, + modelSelection: { instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.4" }, runtimeMode: "full-access", interactionMode: "default", session: previousSession, @@ -549,7 +556,7 @@ describe("hasServerAcknowledgedLocalDispatch", () => { codexThreadId: null, projectId, title: "Thread", - modelSelection: { provider: "codex", model: "gpt-5.4" }, + modelSelection: { instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.4" }, runtimeMode: "full-access", interactionMode: "default", session: previousSession, @@ -592,7 +599,7 @@ describe("hasServerAcknowledgedLocalDispatch", () => { codexThreadId: null, projectId, title: "Thread", - modelSelection: { provider: "codex", model: "gpt-5.4" }, + modelSelection: { instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.4" }, runtimeMode: "full-access", interactionMode: "default", session: previousSession, @@ -635,7 +642,7 @@ describe("hasServerAcknowledgedLocalDispatch", () => { codexThreadId: null, projectId, title: "Thread", - modelSelection: { provider: "codex", model: "gpt-5.4" }, + modelSelection: { instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.4" }, runtimeMode: "full-access", interactionMode: "default", session: previousSession, @@ -685,7 +692,7 @@ describe("hasServerAcknowledgedLocalDispatch", () => { codexThreadId: null, projectId, title: "Thread", - modelSelection: { provider: "codex", model: "gpt-5.4" }, + modelSelection: { instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.4" }, runtimeMode: "full-access", interactionMode: "default", session: previousSession, diff --git a/apps/web/src/components/ChatView.logic.ts b/apps/web/src/components/ChatView.logic.ts index 1c72f9a5a35..417313ef2c1 100644 --- a/apps/web/src/components/ChatView.logic.ts +++ b/apps/web/src/components/ChatView.logic.ts @@ -1,16 +1,15 @@ import { type EnvironmentId, + isProviderDriverKind, ProjectId, type ModelSelection, - type OrchestrationThreadActivity, - type ProviderKind, + type ProviderDriverKind, type ScopedThreadRef, type ThreadId, type TurnId, } from "@t3tools/contracts"; import { type ChatMessage, type SessionPhase, type Thread, type ThreadSession } from "../types"; import { type ComposerImageAttachment, type DraftThreadState } from "../composerDraftStore"; -import { deriveWorkLogEntries, type WorkLogEntry } from "../session-logic"; import { Schema } from "effect"; import { selectThreadByRef, useStore } from "../store"; import { @@ -181,30 +180,6 @@ export function cloneComposerImageForRetry( } } -/** - * Resolve which provider the health banner should reflect before a session - * starts. Once a session is active its provider takes precedence; otherwise - * fall back to the user's currently selected draft provider. - */ -export function resolveProviderHealthBannerProvider(opts: { - sessionProvider: ProviderKind | null; - selectedProvider: ProviderKind; -}): ProviderKind { - return opts.sessionProvider ?? opts.selectedProvider; -} - -/** - * Derive work-log entries that keep completed tool calls from previous turns - * visible while the user is composing a new message. Passing `undefined` for - * the turn-id filter causes `deriveWorkLogEntries` to include all activities - * rather than scoping to only the latest turn. - */ -export function deriveVisibleThreadWorkLogEntries( - activities: ReadonlyArray, -): WorkLogEntry[] { - return deriveWorkLogEntries(activities, undefined); -} - export function deriveComposerSendState(options: { prompt: string; imageCount: number; @@ -252,15 +227,39 @@ export function threadHasStarted(thread: Thread | null | undefined): boolean { ); } +// `threadProvider` is the open branded driver kind carried by the session. +// Unknown driver kinds degrade to `null` (i.e. "unlocked"), which is the safe +// rollback / fork behavior — the routing layer is the right place to surface +// "driver not installed" errors, not the lock state. +// +// `selectedProvider` takes the same open-string shape because the composer +// now tracks the picker selection as a `ProviderInstanceId` (e.g. +// `codex_personal`). Custom instance ids that don't directly match a +// registered driver resolve to `null` here, which matches the existing +// "unknown driver -> unlocked" semantics. Callers that want the lock to track +// a custom instance's underlying driver kind should resolve the instance id +// upstream and pass the correlated kind. export function deriveLockedProvider(input: { thread: Thread | null | undefined; - selectedProvider: ProviderKind | null; - threadProvider: ProviderKind | null; -}): ProviderKind | null { + selectedProvider: string | null; + threadProvider: string | null; +}): ProviderDriverKind | null { if (!threadHasStarted(input.thread)) { return null; } - return input.thread?.session?.provider ?? input.threadProvider ?? input.selectedProvider ?? null; + const sessionProvider = input.thread?.session?.provider ?? null; + if (sessionProvider) { + return sessionProvider; + } + const narrowedThreadProvider = + input.threadProvider && isProviderDriverKind(input.threadProvider) + ? input.threadProvider + : null; + const narrowedSelectedProvider = + input.selectedProvider && isProviderDriverKind(input.selectedProvider) + ? input.selectedProvider + : null; + return narrowedThreadProvider ?? narrowedSelectedProvider ?? null; } export async function waitForStartedServerThread( diff --git a/apps/web/src/components/ChatView.tsx b/apps/web/src/components/ChatView.tsx index 535c0d9fcae..9ef221e262a 100644 --- a/apps/web/src/components/ChatView.tsx +++ b/apps/web/src/components/ChatView.tsx @@ -1,14 +1,14 @@ import { type ApprovalRequestId, - DEFAULT_MODEL_BY_PROVIDER, - type ClaudeAgentEffort, + DEFAULT_MODEL, + defaultInstanceIdForDriver, type EnvironmentId, type MessageId, type ModelSelection, type ProjectScript, - type ProviderKind, type ProjectId, type ProviderApprovalDecision, + ProviderInstanceId, type ServerProvider, type ResolvedKeybindingsConfig, type ScopedThreadRef, @@ -17,6 +17,7 @@ import { type KeybindingCommand, OrchestrationThreadActivity, ProviderInteractionMode, + ProviderDriverKind, RuntimeMode, TerminalOpenInput, } from "@t3tools/contracts"; @@ -26,7 +27,11 @@ import { scopeProjectRef, scopeThreadRef, } from "@t3tools/client-runtime"; -import { applyClaudePromptEffortPrefix, createModelSelection } from "@t3tools/shared/model"; +import { + applyClaudePromptEffortPrefix, + createModelSelection, + resolvePromptInjectedEffort, +} from "@t3tools/shared/model"; import { projectScriptCwd, projectScriptRuntimeEnv } from "@t3tools/shared/projectScripts"; import { truncate } from "@t3tools/shared/String"; import { Debouncer } from "@tanstack/react-pacer"; @@ -99,9 +104,9 @@ import { BranchToolbar } from "./BranchToolbar"; import { resolveShortcutCommand, shortcutLabelForCommand } from "../keybindings"; import PlanSidebar from "./PlanSidebar"; import ThreadTerminalDrawer from "./ThreadTerminalDrawer"; -import { ChevronDownIcon } from "lucide-react"; +import { ChevronDownIcon, TriangleAlertIcon, WifiOffIcon } from "lucide-react"; import { cn, randomUUID } from "~/lib/utils"; -import { toastManager } from "./ui/toast"; +import { stackedThreadToast, toastManager } from "./ui/toast"; import { decodeProjectScriptKeybindingRule } from "~/lib/projectScriptKeybindings"; import { type NewProjectScriptInput } from "./ProjectScriptsControl"; import { @@ -112,10 +117,11 @@ import { import { newCommandId, newDraftId, newMessageId, newThreadId } from "~/lib/utils"; import { getProviderModelCapabilities, resolveSelectableProvider } from "../providerModels"; import { useSettings } from "../hooks/useSettings"; -import { resolveAppModelSelection } from "../modelSelection"; +import { resolveAppModelSelectionForInstance } from "../modelSelection"; import { isTerminalFocused } from "../lib/terminalFocus"; import { deriveLogicalProjectKeyFromSettings } from "../logicalProject"; import { + reconnectSavedEnvironment, useSavedEnvironmentRegistryStore, useSavedEnvironmentRuntimeStore, } from "../environments/runtime"; @@ -143,6 +149,7 @@ import { NoActiveThreadState } from "./NoActiveThreadState"; import { resolveEffectiveEnvMode, resolveEnvironmentOptionLabel } from "./BranchToolbar.logic"; import { ProviderStatusBanner } from "./chat/ProviderStatusBanner"; import { ThreadErrorBanner } from "./chat/ThreadErrorBanner"; +import { ComposerBannerStack, type ComposerBannerStackItem } from "./chat/ComposerBannerStack"; import { MAX_HIDDEN_MOUNTED_TERMINAL_THREADS, buildExpiredTerminalContextToastCopy, @@ -175,6 +182,13 @@ import { import { sanitizeThreadErrorMessage } from "~/rpc/transportError"; import { retainThreadDetailSubscription } from "../environments/runtime/service"; import { RightPanelSheet } from "./RightPanelSheet"; +import { Button } from "./ui/button"; +import { + buildVersionMismatchDismissalKey, + dismissVersionMismatch, + isVersionMismatchDismissed, + resolveServerConfigVersionMismatch, +} from "../versionSkew"; const IMAGE_ONLY_BOOTSTRAP_PROMPT = "[User attached one or more images without additional text. Respond using the conversation context and the attached image(s).]"; @@ -182,6 +196,11 @@ const EMPTY_ACTIVITIES: OrchestrationThreadActivity[] = []; const EMPTY_PROPOSED_PLANS: Thread["proposedPlans"] = []; const EMPTY_PROVIDERS: ServerProvider[] = []; const EMPTY_PENDING_USER_INPUT_ANSWERS: Record = {}; +type EnvironmentUnavailableState = { + readonly environmentId: EnvironmentId; + readonly label: string; + readonly connectionState: "connecting" | "disconnected" | "error"; +}; type ThreadPlanCatalogEntry = Pick; @@ -299,17 +318,15 @@ function useThreadPlanCatalog(threadIds: readonly ThreadId[]): ThreadPlanCatalog } function formatOutgoingPrompt(params: { - provider: ProviderKind; + provider: ProviderDriverKind; model: string | null; models: ReadonlyArray; effort: string | null; text: string; }): string { const caps = getProviderModelCapabilities(params.models, params.model, params.provider); - if (params.effort && caps.promptInjectedEffortLevels.includes(params.effort)) { - return applyClaudePromptEffortPrefix(params.text, params.effort as ClaudeAgentEffort | null); - } - return params.text; + const promptEffort = resolvePromptInjectedEffort(caps, params.effort); + return applyClaudePromptEffortPrefix(params.text, promptEffort); } const SCRIPT_TERMINAL_COLS = 120; const SCRIPT_TERMINAL_ROWS = 30; @@ -615,6 +632,7 @@ export default function ChatView(props: ChatViewProps) { (store) => store.setStickyModelSelection, ); const timestampFormat = settings.timestampFormat; + const autoOpenPlanSidebar = settings.autoOpenPlanSidebar; const navigate = useNavigate(); const rawSearch = useSearch({ strict: false, @@ -775,8 +793,8 @@ export default function ChatView(props: ChatViewProps) { threadId, draftThread, fallbackDraftProject?.defaultModelSelection ?? { - provider: "codex", - model: DEFAULT_MODEL_BY_PROVIDER.codex, + instanceId: ProviderInstanceId.make("codex"), + model: DEFAULT_MODEL, }, localDraftError, ) @@ -851,6 +869,78 @@ export default function ChatView(props: ChatViewProps) { const primaryEnvironmentId = usePrimaryEnvironmentId(); const savedEnvironmentRegistry = useSavedEnvironmentRegistryStore((s) => s.byId); const savedEnvironmentRuntimeById = useSavedEnvironmentRuntimeStore((s) => s.byId); + const activeSavedEnvironmentRecord = + activeThread && activeThread.environmentId !== primaryEnvironmentId + ? (savedEnvironmentRegistry[activeThread.environmentId] ?? null) + : null; + const activeSavedEnvironmentRuntime = activeSavedEnvironmentRecord + ? (savedEnvironmentRuntimeById[activeSavedEnvironmentRecord.environmentId] ?? null) + : null; + const activeSavedEnvironmentConnectionState = activeSavedEnvironmentRecord + ? (activeSavedEnvironmentRuntime?.connectionState ?? "disconnected") + : "connected"; + const activeEnvironmentUnavailable = + activeSavedEnvironmentRecord !== null && activeSavedEnvironmentConnectionState !== "connected"; + const activeSavedEnvironmentId = activeSavedEnvironmentRecord?.environmentId ?? null; + const activeEnvironmentUnavailableLabel = activeSavedEnvironmentRecord + ? resolveEnvironmentOptionLabel({ + isPrimary: false, + environmentId: activeSavedEnvironmentRecord.environmentId, + runtimeLabel: activeSavedEnvironmentRuntime?.descriptor?.label ?? null, + savedLabel: activeSavedEnvironmentRecord.label, + }) + : null; + const activeEnvironmentUnavailableState = useMemo(() => { + if ( + !activeEnvironmentUnavailable || + !activeEnvironmentUnavailableLabel || + !activeSavedEnvironmentId + ) { + return null; + } + + return { + environmentId: activeSavedEnvironmentId, + label: activeEnvironmentUnavailableLabel, + connectionState: + activeSavedEnvironmentConnectionState === "connecting" || + activeSavedEnvironmentConnectionState === "error" + ? activeSavedEnvironmentConnectionState + : "disconnected", + }; + }, [ + activeEnvironmentUnavailable, + activeEnvironmentUnavailableLabel, + activeSavedEnvironmentConnectionState, + activeSavedEnvironmentId, + ]); + const [reconnectingEnvironmentId, setReconnectingEnvironmentId] = useState( + null, + ); + const handleReconnectActiveEnvironment = useCallback( + async (environmentId: EnvironmentId, label: string) => { + setReconnectingEnvironmentId(environmentId); + try { + await reconnectSavedEnvironment(environmentId); + toastManager.add({ + type: "success", + title: "Environment reconnected", + description: `${label} is ready.`, + }); + } catch (error) { + toastManager.add( + stackedThreadToast({ + type: "error", + title: "Could not reconnect environment", + description: error instanceof Error ? error.message : "Failed to reconnect.", + }), + ); + } finally { + setReconnectingEnvironmentId(null); + } + }, + [], + ); const projectGroupingSettings = useSettings((settings) => ({ sidebarProjectGroupingMode: settings.sidebarProjectGroupingMode, sidebarProjectGroupingOverrides: settings.sidebarProjectGroupingOverrides, @@ -1017,7 +1107,10 @@ export default function ChatView(props: ChatViewProps) { const lastVisitedAt = activeThreadLastVisitedAt ? Date.parse(activeThreadLastVisitedAt) : NaN; if (!Number.isNaN(lastVisitedAt) && lastVisitedAt >= turnCompletedAt) return; - markThreadVisited(scopedThreadKey(scopeThreadRef(serverThread.environmentId, serverThread.id))); + markThreadVisited( + scopedThreadKey(scopeThreadRef(serverThread.environmentId, serverThread.id)), + activeLatestTurn.completedAt, + ); }, [ activeLatestTurn?.completedAt, activeThreadLastVisitedAt, @@ -1029,7 +1122,9 @@ export default function ChatView(props: ChatViewProps) { const selectedProviderByThreadId = composerActiveProvider ?? null; const threadProvider = - activeThread?.modelSelection.provider ?? activeProject?.defaultModelSelection?.provider ?? null; + activeThread?.modelSelection.instanceId ?? + activeProject?.defaultModelSelection?.instanceId ?? + null; const lockedProvider = deriveLockedProvider({ thread: activeThread, selectedProvider: selectedProviderByThreadId, @@ -1046,12 +1141,126 @@ export default function ChatView(props: ChatViewProps) { primaryEnvironmentId && activeThread?.environmentId === primaryEnvironmentId ? primaryServerConfig : (activeEnvRuntimeState?.serverConfig ?? primaryServerConfig); + const versionMismatch = resolveServerConfigVersionMismatch(serverConfig); + const versionMismatchDismissKey = + versionMismatch && activeThread + ? buildVersionMismatchDismissalKey(activeThread.environmentId, versionMismatch) + : null; + const [dismissedVersionMismatchKey, setDismissedVersionMismatchKey] = useState( + null, + ); + const versionMismatchDismissed = + versionMismatchDismissKey === dismissedVersionMismatchKey || + isVersionMismatchDismissed(versionMismatchDismissKey); + const showVersionMismatchBanner = + versionMismatch !== null && versionMismatchDismissKey !== null && !versionMismatchDismissed; + const hasMultipleRegisteredEnvironments = Object.keys(savedEnvironmentRegistry).length > 0; + const versionMismatchServerLabel = useMemo(() => { + if (!hasMultipleRegisteredEnvironments || !activeThread) { + return "server"; + } + + const isPrimary = activeThread.environmentId === primaryEnvironmentId; + const savedRecord = savedEnvironmentRegistry[activeThread.environmentId]; + const runtimeState = savedEnvironmentRuntimeById[activeThread.environmentId]; + return `${resolveEnvironmentOptionLabel({ + isPrimary, + environmentId: activeThread.environmentId, + runtimeLabel: runtimeState?.descriptor?.label ?? serverConfig?.environment.label ?? null, + savedLabel: savedRecord?.label ?? null, + })} server`; + }, [ + activeThread, + hasMultipleRegisteredEnvironments, + primaryEnvironmentId, + savedEnvironmentRegistry, + savedEnvironmentRuntimeById, + serverConfig?.environment.label, + ]); + const composerBannerItems = useMemo(() => { + const items: ComposerBannerStackItem[] = []; + if (activeEnvironmentUnavailableState) { + items.push({ + id: `environment-unavailable:${activeEnvironmentUnavailableState.environmentId}`, + variant: + activeEnvironmentUnavailableState.connectionState === "error" ? "error" : "warning", + icon: , + title: ( + <> + {activeEnvironmentUnavailableState.label} is{" "} + {activeEnvironmentUnavailableState.connectionState === "connecting" + ? "connecting" + : "disconnected"} + + ), + description: "Reconnect this environment before sending messages or running actions.", + actions: ( + <> + + + + ), + }); + } + if (showVersionMismatchBanner && versionMismatch && versionMismatchDismissKey) { + items.push({ + id: `version-mismatch:${versionMismatchDismissKey}`, + variant: "warning", + icon: , + title: "Client and server versions differ", + description: ( + <> + Client {versionMismatch.clientVersion} is connected to {versionMismatchServerLabel}{" "} + {versionMismatch.serverVersion}. Sync them if RPC calls or reconnects fail. + + ), + dismissLabel: "Dismiss version mismatch warning", + onDismiss: () => { + dismissVersionMismatch(versionMismatchDismissKey); + setDismissedVersionMismatchKey(versionMismatchDismissKey); + }, + }); + } + return items; + }, [ + activeEnvironmentUnavailableState, + handleReconnectActiveEnvironment, + navigate, + reconnectingEnvironmentId, + showVersionMismatchBanner, + versionMismatch, + versionMismatchDismissKey, + versionMismatchServerLabel, + ]); const providerStatuses = serverConfig?.providers ?? EMPTY_PROVIDERS; const unlockedSelectedProvider = resolveSelectableProvider( providerStatuses, - selectedProviderByThreadId ?? threadProvider ?? "codex", + selectedProviderByThreadId ?? threadProvider ?? ProviderDriverKind.make("codex"), ); - const selectedProvider: ProviderKind = lockedProvider ?? unlockedSelectedProvider; + const selectedProvider: ProviderDriverKind = lockedProvider ?? unlockedSelectedProvider; const phase = derivePhase(activeThread?.session ?? null); const threadActivities = activeThread?.activities ?? EMPTY_ACTIVITIES; const workLogEntries = useMemo( @@ -1423,10 +1632,24 @@ export default function ChatView(props: ChatViewProps) { const gitStatusQuery = useGitStatus({ environmentId, cwd: gitCwd }); const keybindings = useServerKeybindings(); const availableEditors = useServerAvailableEditors(); - const activeProviderStatus = useMemo( - () => providerStatuses.find((status) => status.provider === selectedProvider) ?? null, - [selectedProvider, providerStatuses], - ); + // Prefer an instance-id match so a custom Codex instance (e.g. + // `codex_personal`) surfaces its own status/message in the banner rather + // than the default Codex's. Falls back to first-match-by-kind when no + // saved instance id is available or the instance no longer exists. + const activeProviderInstanceId = + activeThread?.session?.providerInstanceId ?? + activeThread?.modelSelection.instanceId ?? + activeProject?.defaultModelSelection?.instanceId ?? + null; + const activeProviderStatus = useMemo(() => { + if (activeProviderInstanceId) { + return ( + providerStatuses.find((status) => status.instanceId === activeProviderInstanceId) ?? null + ); + } + const defaultInstanceId = defaultInstanceIdForDriver(selectedProvider); + return providerStatuses.find((status) => status.instanceId === defaultInstanceId) ?? null; + }, [activeProviderInstanceId, providerStatuses, selectedProvider]); const activeProjectCwd = activeProject?.cwd ?? null; const activeThreadWorktreePath = activeThread?.worktreePath ?? null; const activeWorkspaceRoot = activeThreadWorktreePath ?? activeProjectCwd ?? undefined; @@ -1855,11 +2078,13 @@ export default function ChatView(props: ChatViewProps) { title: `Deleted action "${deletedName ?? "Unknown"}"`, }); } catch (error) { - toastManager.add({ - type: "error", - title: "Could not delete action", - description: error instanceof Error ? error.message : "An unexpected error occurred.", - }); + toastManager.add( + stackedThreadToast({ + type: "error", + title: "Could not delete action", + description: error instanceof Error ? error.message : "An unexpected error occurred.", + }), + ); } }, [activeProject, persistProjectScripts], @@ -1941,7 +2166,7 @@ export default function ChatView(props: ChatViewProps) { if ( input.modelSelection !== undefined && (input.modelSelection.model !== serverThread.modelSelection.model || - input.modelSelection.provider !== serverThread.modelSelection.provider || + input.modelSelection.instanceId !== serverThread.modelSelection.instanceId || JSON.stringify(input.modelSelection.options ?? null) !== JSON.stringify(serverThread.modelSelection.options ?? null)) ) { @@ -2007,6 +2232,7 @@ export default function ChatView(props: ChatViewProps) { planSidebarOpenOnNextThreadRef.current = false; setPlanSidebarOpen(true); } else { + planSidebarOpenOnNextThreadRef.current = false; setPlanSidebarOpen(false); } planSidebarDismissedForTurnRef.current = null; @@ -2015,6 +2241,7 @@ export default function ChatView(props: ChatViewProps) { // Auto-open the plan sidebar when plan/todo steps arrive for the current turn. // Don't auto-open for plans carried over from a previous turn (the user can open manually). useEffect(() => { + if (!autoOpenPlanSidebar) return; if (!activePlan) return; if (planSidebarOpen) return; const latestTurnId = activeLatestTurn?.turnId ?? null; @@ -2022,7 +2249,13 @@ export default function ChatView(props: ChatViewProps) { const turnKey = activePlan.turnId ?? sidebarProposedPlan?.turnId ?? "__dismissed__"; if (planSidebarDismissedForTurnRef.current === turnKey) return; setPlanSidebarOpen(true); - }, [activePlan, activeLatestTurn?.turnId, planSidebarOpen, sidebarProposedPlan?.turnId]); + }, [ + activePlan, + activeLatestTurn?.turnId, + autoOpenPlanSidebar, + planSidebarOpen, + sidebarProposedPlan?.turnId, + ]); useEffect(() => { setIsRevertingCheckpoint(false); @@ -2321,6 +2554,13 @@ export default function ChatView(props: ChatViewProps) { const localApi = readLocalApi(); if (!api || !localApi || !activeThread || isRevertingCheckpoint) return; + if (activeEnvironmentUnavailable && activeEnvironmentUnavailableLabel) { + setThreadError( + activeThread.id, + `Reconnect ${activeEnvironmentUnavailableLabel} before reverting checkpoints.`, + ); + return; + } if (phase === "running" || isSendBusy || isConnecting) { setThreadError(activeThread.id, "Interrupt the current turn before reverting checkpoints."); return; @@ -2356,6 +2596,8 @@ export default function ChatView(props: ChatViewProps) { }, [ activeThread, + activeEnvironmentUnavailable, + activeEnvironmentUnavailableLabel, environmentId, isConnecting, isRevertingCheckpoint, @@ -2368,7 +2610,15 @@ export default function ChatView(props: ChatViewProps) { const onSend = async (e?: { preventDefault: () => void }) => { e?.preventDefault(); const api = readEnvironmentApi(environmentId); - if (!api || !activeThread || isSendBusy || isConnecting || sendInFlightRef.current) return; + if ( + !api || + !activeThread || + isSendBusy || + isConnecting || + activeEnvironmentUnavailable || + sendInFlightRef.current + ) + return; if (activePendingProgress) { onAdvanceActivePendingUserInput(); return; @@ -2426,11 +2676,13 @@ export default function ChatView(props: ChatViewProps) { expiredTerminalContextCount, "empty", ); - toastManager.add({ - type: "warning", - title: toastCopy.title, - description: toastCopy.description, - }); + toastManager.add( + stackedThreadToast({ + type: "warning", + title: toastCopy.title, + description: toastCopy.description, + }), + ); } return; } @@ -2512,11 +2764,13 @@ export default function ChatView(props: ChatViewProps) { expiredTerminalContextCount, "omitted", ); - toastManager.add({ - type: "warning", - title: toastCopy.title, - description: toastCopy.description, - }); + toastManager.add( + stackedThreadToast({ + type: "warning", + title: toastCopy.title, + description: toastCopy.description, + }), + ); } promptRef.current = ""; clearComposerDraftContent(composerDraftTarget); @@ -2543,10 +2797,8 @@ export default function ChatView(props: ChatViewProps) { } const title = truncate(titleSeed); const threadCreateModelSelection = createModelSelection( - ctxSelectedProvider, - ctxSelectedModel || - activeProject.defaultModelSelection?.model || - DEFAULT_MODEL_BY_PROVIDER.codex, + ctxSelectedModelSelection.instanceId, + ctxSelectedModel || activeProject.defaultModelSelection?.model || DEFAULT_MODEL, ctxSelectedModelSelection.options, ); @@ -2943,7 +3195,7 @@ export default function ChatView(props: ChatViewProps) { // Optimistically open the plan sidebar when implementing (not refining). // "default" mode here means the agent is executing the plan, which produces // step-tracking activities that the sidebar will display. - if (nextInteractionMode === "default") { + if (nextInteractionMode === "default" && autoOpenPlanSidebar) { planSidebarDismissedForTurnRef.current = null; setPlanSidebarOpen(true); } @@ -2972,6 +3224,7 @@ export default function ChatView(props: ChatViewProps) { runtimeMode, setComposerDraftInteractionMode, setThreadError, + autoOpenPlanSidebar, environmentId, ], ); @@ -2986,6 +3239,7 @@ export default function ChatView(props: ChatViewProps) { !isServerThread || isSendBusy || isConnecting || + activeEnvironmentUnavailable || sendInFlightRef.current ) { return; @@ -3064,8 +3318,8 @@ export default function ChatView(props: ChatViewProps) { return waitForStartedServerThread(scopeThreadRef(activeThread.environmentId, nextThreadId)); }) .then(() => { - // Signal that the plan sidebar should open on the new thread. - planSidebarOpenOnNextThreadRef.current = true; + // Signal that the plan sidebar should open on the new thread when enabled. + planSidebarOpenOnNextThreadRef.current = autoOpenPlanSidebar; return navigate({ to: "/$environmentId/$threadId", params: { @@ -3082,12 +3336,16 @@ export default function ChatView(props: ChatViewProps) { threadId: nextThreadId, }) .catch(() => undefined); - toastManager.add({ - type: "error", - title: "Could not start implementation thread", - description: - err instanceof Error ? err.message : "An error occurred while creating the new thread.", - }); + toastManager.add( + stackedThreadToast({ + type: "error", + title: "Could not start implementation thread", + description: + err instanceof Error + ? err.message + : "An error occurred while creating the new thread.", + }), + ); }) .then(finish, finish); }, [ @@ -3096,31 +3354,58 @@ export default function ChatView(props: ChatViewProps) { activeThreadBranch, activeThread, beginLocalDispatch, + activeEnvironmentUnavailable, isConnecting, isSendBusy, isServerThread, navigate, resetLocalDispatch, runtimeMode, + autoOpenPlanSidebar, environmentId, ]); const onProviderModelSelect = useCallback( - (provider: ProviderKind, model: string) => { + (instanceId: ProviderInstanceId, model: string) => { if (!activeThread) return; - if (lockedProvider !== null && provider !== lockedProvider) { + // Look up the configured instance so model normalization and custom + // model lookup stay scoped to that exact instance. Unknown instance ids + // are rejected by returning early; the server remains authoritative too. + const entry = providerStatuses.find((snapshot) => snapshot.instanceId === instanceId); + const resolvedDriverKind = entry?.driver ?? null; + if ( + lockedProvider !== null && + resolvedDriverKind !== null && + resolvedDriverKind !== lockedProvider + ) { scheduleComposerFocus(); return; } - const resolvedProvider = resolveSelectableProvider(providerStatuses, provider); - const resolvedModel = resolveAppModelSelection( - resolvedProvider, + if (lockedProvider !== null && activeThread.session?.providerInstanceId) { + const currentEntry = providerStatuses.find( + (snapshot) => snapshot.instanceId === activeThread.session?.providerInstanceId, + ); + if ( + currentEntry?.continuation?.groupKey && + entry?.continuation?.groupKey && + currentEntry.continuation.groupKey !== entry.continuation.groupKey + ) { + scheduleComposerFocus(); + return; + } + } + const resolvedModel = resolveAppModelSelectionForInstance( + instanceId, settings, providerStatuses, model, ); + if (!resolvedModel) { + scheduleComposerFocus(); + return; + } const nextModelSelection: ModelSelection = { - provider: resolvedProvider, + instanceId, model: resolvedModel, }; setComposerDraftModelSelection( @@ -3215,14 +3500,14 @@ export default function ChatView(props: ChatViewProps) { {/* Top bar */}
{/* Input bar */} -
- -
- - {isGitRepo && ( - +
+ +
+ - )} + onProviderModelSelect={onProviderModelSelect} + toggleInteractionMode={toggleInteractionMode} + handleRuntimeModeChange={handleRuntimeModeChange} + handleInteractionModeChange={handleInteractionModeChange} + togglePlanSidebar={togglePlanSidebar} + focusComposer={focusComposer} + scheduleComposerFocus={scheduleComposerFocus} + setThreadError={setThreadError} + onExpandImage={onExpandTimelineImage} + /> +
+
+ {isGitRepo && ( + + )} +
+ {pullRequestDialogState ? ( = {}): Thread { codexThreadId: null, projectId: PROJECT_ID, title: "Thread", - modelSelection: { provider: "codex", model: "gpt-5" }, + modelSelection: { instanceId: ProviderInstanceId.make("codex"), model: "gpt-5" }, runtimeMode: "full-access", interactionMode: "default", session: null, diff --git a/apps/web/src/components/CommandPalette.logic.ts b/apps/web/src/components/CommandPalette.logic.ts index 450f678dd5c..3f4997e215c 100644 --- a/apps/web/src/components/CommandPalette.logic.ts +++ b/apps/web/src/components/CommandPalette.logic.ts @@ -17,6 +17,7 @@ export interface CommandPaletteItem { readonly description?: string; readonly timestamp?: string; readonly icon: ReactNode; + readonly disabled?: boolean; /** Optional content rendered inline before the title text. */ readonly titleLeadingContent?: ReactNode; /** Optional content rendered inline after the title text (before the timestamp). */ diff --git a/apps/web/src/components/CommandPalette.tsx b/apps/web/src/components/CommandPalette.tsx index a9f9f8007cf..ab29afe36d0 100644 --- a/apps/web/src/components/CommandPalette.tsx +++ b/apps/web/src/components/CommandPalette.tsx @@ -2,13 +2,18 @@ import { scopeProjectRef, scopeThreadRef } from "@t3tools/client-runtime"; import { - DEFAULT_MODEL_BY_PROVIDER, + DEFAULT_MODEL, type EnvironmentId, type FilesystemBrowseResult, type ProjectId, + ProviderInstanceId, + type SourceControlDiscoveryResult, + type SourceControlProviderKind, + type SourceControlRepositoryInfo, } from "@t3tools/contracts"; import { useQuery, useQueryClient } from "@tanstack/react-query"; import { useNavigate, useParams } from "@tanstack/react-router"; +import { Option } from "effect"; import { ArrowDownIcon, ArrowLeftIcon, @@ -16,6 +21,7 @@ import { CornerLeftUpIcon, FolderIcon, FolderPlusIcon, + LinkIcon, MessageSquareIcon, SettingsIcon, SquarePenIcon, @@ -24,6 +30,7 @@ import { useCallback, useDeferredValue, useEffect, + useLayoutEffect, useMemo, useRef, useState, @@ -41,6 +48,10 @@ import { import { useHandleNewThread } from "../hooks/useHandleNewThread"; import { useSettings } from "../hooks/useSettings"; import { readLocalApi } from "../localApi"; +import { + getSourceControlDiscoverySnapshot, + refreshSourceControlDiscovery, +} from "../lib/sourceControlDiscoveryState"; import { startNewThreadInProjectFromContext, startNewThreadFromContext, @@ -88,6 +99,7 @@ import { } from "./CommandPalette.logic"; import { resolveEnvironmentOptionLabel } from "./BranchToolbar.logic"; import { CommandPaletteResults } from "./CommandPaletteResults"; +import { AzureDevOpsIcon, BitbucketIcon, GitHubIcon, GitLabIcon } from "./Icons"; import { ProjectFavicon } from "./ProjectFavicon"; import { ThreadRowLeadingStatus, ThreadRowTrailingStatus } from "./ThreadStatusIndicators"; import { useServerKeybindings } from "../rpc/serverState"; @@ -103,6 +115,7 @@ import { import { Button } from "./ui/button"; import { Kbd, KbdGroup } from "./ui/kbd"; import { stackedThreadToast, toastManager } from "./ui/toast"; +import { Tooltip, TooltipPopup, TooltipTrigger } from "./ui/tooltip"; import { ComposerHandleContext, useComposerHandleContext } from "../composerHandleContext"; import type { ChatComposerHandle } from "./chat/ChatComposer"; @@ -138,6 +151,181 @@ interface AddProjectEnvironmentOption { readonly isPrimary: boolean; } +type AddProjectRemoteProviderKind = Extract< + SourceControlProviderKind, + "github" | "gitlab" | "bitbucket" | "azure-devops" +>; +type AddProjectRemoteSource = AddProjectRemoteProviderKind | "url"; + +type AddProjectCloneFlow = + | { + readonly step: "repository"; + readonly environmentId: EnvironmentId; + readonly source: AddProjectRemoteSource; + } + | { + readonly step: "confirm"; + readonly environmentId: EnvironmentId; + readonly source: AddProjectRemoteSource; + readonly repositoryInput: string; + readonly repository: SourceControlRepositoryInfo | null; + readonly remoteUrl: string; + }; + +const REMOTE_PROJECT_SOURCES: ReadonlyArray = [ + "url", + "github", + "gitlab", + "bitbucket", + "azure-devops", +]; +const REMOTE_PROJECT_PROVIDER_SOURCES: ReadonlyArray = [ + "github", + "gitlab", + "bitbucket", + "azure-devops", +]; + +function remoteProjectSourceLabel(source: AddProjectRemoteSource): string { + switch (source) { + case "github": + return "GitHub"; + case "gitlab": + return "GitLab"; + case "bitbucket": + return "Bitbucket"; + case "azure-devops": + return "Azure DevOps"; + case "url": + return "Git URL"; + } +} + +function remoteProjectSourcePathHint(source: AddProjectRemoteSource): string { + switch (source) { + case "github": + return "owner/repo"; + case "gitlab": + return "group/project"; + case "bitbucket": + return "workspace/repository"; + case "azure-devops": + return "project/repository"; + case "url": + return "URL"; + } +} + +function remoteProjectSourceProvider( + source: AddProjectRemoteSource, +): AddProjectRemoteProviderKind | null { + return source === "url" ? null : source; +} + +function remoteProjectSourceIcon(source: AddProjectRemoteSource, className: string): ReactNode { + switch (source) { + case "github": + return ; + case "gitlab": + return ; + case "bitbucket": + return ; + case "azure-devops": + return ; + case "url": + return ; + } +} + +function remoteProjectInputPlaceholder(flow: AddProjectCloneFlow | null): string | null { + if (!flow) return null; + if (flow.step === "confirm") return null; + if (flow.source === "url") { + return "Enter Git clone URL"; + } + return `Enter ${remoteProjectSourceLabel(flow.source)} repository (${remoteProjectSourcePathHint(flow.source)})`; +} + +function sourceProviderKind(source: AddProjectRemoteSource): AddProjectRemoteProviderKind | null { + return source === "url" ? null : source; +} + +function sortAddProjectProviderSources( + readinessBySource: AddProjectRemoteSourceReadiness, +): ReadonlyArray { + return REMOTE_PROJECT_PROVIDER_SOURCES.toSorted((left, right) => { + const leftReady = readinessBySource[left].ready; + const rightReady = readinessBySource[right].ready; + if (leftReady !== rightReady) { + return leftReady ? -1 : 1; + } + return remoteProjectSourceLabel(left).localeCompare(remoteProjectSourceLabel(right)); + }); +} + +type AddProjectRemoteSourceReadiness = Record< + AddProjectRemoteSource, + { readonly ready: boolean; readonly hint: string | null } +>; + +function buildAddProjectRemoteSourceReadiness( + discovery: SourceControlDiscoveryResult | null, +): AddProjectRemoteSourceReadiness { + const unavailable = { + ready: false, + hint: "Provider status unavailable. Open Settings -> Source Control and rescan.", + } as const; + const defaultReadiness: AddProjectRemoteSourceReadiness = { + url: { ready: true, hint: null }, + github: unavailable, + gitlab: unavailable, + bitbucket: unavailable, + "azure-devops": unavailable, + }; + + if (!discovery) { + return defaultReadiness; + } + + const providerByKind = new Map( + discovery.sourceControlProviders.map((provider) => [provider.kind, provider]), + ); + const readiness = { ...defaultReadiness }; + + for (const source of REMOTE_PROJECT_SOURCES) { + const kind = sourceProviderKind(source); + if (!kind) continue; + const provider = providerByKind.get(kind); + if (!provider) { + readiness[source] = unavailable; + continue; + } + if (provider.status !== "available") { + readiness[source] = { ready: false, hint: provider.installHint }; + continue; + } + if (provider.auth.status === "unauthenticated") { + readiness[source] = { + ready: false, + hint: + Option.getOrNull(provider.auth.detail) ?? + `${provider.label} is not authenticated. Open Settings -> Source Control for setup guidance.`, + }; + continue; + } + readiness[source] = { ready: true, hint: null }; + } + + return readiness; +} + +function errorMessage(error: unknown): string { + if (error instanceof Error && error.message.trim().length > 0) { + return error.message; + } + return "An error occurred."; +} + export function CommandPalette({ children }: { children: ReactNode }) { const open = useCommandPaletteStore((store) => store.open); const setOpen = useCommandPaletteStore((store) => store.setOpen); @@ -228,6 +416,9 @@ function OpenCommandPaletteDialog() { null, ); const [isPickingProjectFolder, setIsPickingProjectFolder] = useState(false); + const [addProjectCloneFlow, setAddProjectCloneFlow] = useState(null); + const [isRemoteProjectLookingUp, setIsRemoteProjectLookingUp] = useState(false); + const [isRemoteProjectCloning, setIsRemoteProjectCloning] = useState(false); const primaryEnvironmentId = usePrimaryEnvironmentId(); const primaryEnvironmentLabel = readPrimaryEnvironmentDescriptor()?.label ?? null; const savedEnvironmentRegistry = useSavedEnvironmentRegistryStore((state) => state.byId); @@ -296,7 +487,10 @@ function OpenCommandPaletteDialog() { : null; return getEnvironmentBrowsePlatform(os); }, [browseEnvironmentId, primaryEnvironmentId, savedEnvironmentRuntimeById]); - const isBrowsing = isFilesystemBrowseQuery(query, browseEnvironmentPlatform); + const isRemoteProjectCloneFlow = addProjectCloneFlow !== null; + const isRemoteProjectRepositoryStep = addProjectCloneFlow?.step === "repository"; + const isBrowsing = + !isRemoteProjectRepositoryStep && isFilesystemBrowseQuery(query, browseEnvironmentPlatform); const paletteMode = getCommandPaletteMode({ currentView, isBrowsing }); const getAddProjectInitialQueryForEnvironment = useCallback( (environmentId: EnvironmentId | null): string => { @@ -542,6 +736,7 @@ function OpenCommandPaletteDialog() { } function popView(): void { + setAddProjectCloneFlow(null); if (viewStack.length <= 1) { setAddProjectEnvironmentId(null); } @@ -561,6 +756,7 @@ function OpenCommandPaletteDialog() { const startAddProjectBrowse = useCallback( (environmentId: EnvironmentId): void => { setAddProjectEnvironmentId(environmentId); + setAddProjectCloneFlow(null); pushPaletteView({ addonIcon: , groups: [], @@ -570,6 +766,158 @@ function OpenCommandPaletteDialog() { [getAddProjectInitialQueryForEnvironment], ); + const startAddProjectClone = useCallback( + (environmentId: EnvironmentId, source: AddProjectRemoteSource): void => { + setAddProjectEnvironmentId(environmentId); + setAddProjectCloneFlow({ step: "repository", environmentId, source }); + pushPaletteView({ + addonIcon: remoteProjectSourceIcon(source, ADDON_ICON_CLASS), + groups: [], + initialQuery: "", + }); + }, + [], + ); + + const openSourceControlSettings = useCallback(() => { + setOpen(false); + void navigate({ to: "/settings/source-control" }); + }, [navigate, setOpen]); + + const buildAddProjectSourceGroups = useCallback( + ( + environmentId: EnvironmentId, + readinessBySource: AddProjectRemoteSourceReadiness, + ): CommandPaletteView["groups"] => { + const sourceItems: Array = [ + { + kind: "action", + value: `action:add-project:${environmentId}:local`, + searchTerms: ["local", "folder", "directory", "browse"], + title: "Local folder", + description: "Browse a folder on disk", + icon: , + keepOpen: true, + run: async () => { + startAddProjectBrowse(environmentId); + }, + }, + ]; + + const orderedSources: ReadonlyArray = [ + "url", + ...sortAddProjectProviderSources(readinessBySource), + ]; + + for (const source of orderedSources) { + const label = remoteProjectSourceLabel(source); + const title = source === "url" ? "Git URL" : `${label} repository`; + const description = + source === "url" + ? "Clone from a remote URL" + : `Clone ${label} ${remoteProjectSourcePathHint(source)}`; + const readiness = readinessBySource[source]; + const disabledHint = readiness.hint; + + const titleTrailingContent = readiness.ready ? undefined : ( + + + { + openSourceControlSettings(); + }} + > + Setup Required + + } + /> + + {disabledHint ?? "Open Settings -> Source Control to configure this provider."} + + + + ); + + if (!readiness.ready) { + sourceItems.push({ + kind: "action", + value: `action:add-project:${environmentId}:${source}:not-ready`, + searchTerms: ["clone", "remote", "repository", "repo", "git", label, "setup required"], + title, + description, + disabled: true, + icon: remoteProjectSourceIcon(source, ITEM_ICON_CLASS), + ...(titleTrailingContent ? { titleTrailingContent } : {}), + run: async () => {}, + }); + continue; + } + + sourceItems.push({ + kind: "action", + value: `action:add-project:${environmentId}:${source}`, + searchTerms: ["clone", "remote", "repository", "repo", "git", label], + title, + description, + icon: remoteProjectSourceIcon(source, ITEM_ICON_CLASS), + ...(titleTrailingContent ? { titleTrailingContent } : {}), + keepOpen: true, + run: async () => { + startAddProjectClone(environmentId, source); + }, + }); + } + + return [{ value: `sources:${environmentId}`, label: "Sources", items: sourceItems }]; + }, + [openSourceControlSettings, startAddProjectBrowse, startAddProjectClone], + ); + + const startAddProjectSourceSelection = useCallback( + (environmentId: EnvironmentId): void => { + setAddProjectEnvironmentId(environmentId); + setAddProjectCloneFlow(null); + const target = { environmentId }; + const initialDiscovery = getSourceControlDiscoverySnapshot(target).data; + pushPaletteView({ + addonIcon: , + groups: buildAddProjectSourceGroups( + environmentId, + buildAddProjectRemoteSourceReadiness(initialDiscovery), + ), + }); + + if (initialDiscovery) { + return; + } + + void refreshSourceControlDiscovery(target).then((discovery) => { + setViewStack((previousViews) => { + const currentTopView = previousViews.at(-1); + if (currentTopView?.groups[0]?.value !== `sources:${environmentId}`) { + return previousViews; + } + return [ + ...previousViews.slice(0, -1), + { + addonIcon: , + groups: buildAddProjectSourceGroups( + environmentId, + buildAddProjectRemoteSourceReadiness(discovery), + ), + }, + ]; + }); + }); + }, + [buildAddProjectSourceGroups], + ); + const addProjectEnvironmentItems: CommandPaletteActionItem[] = addProjectEnvironmentOptions.map( (option) => ({ kind: "action", @@ -580,7 +928,7 @@ function OpenCommandPaletteDialog() { icon: , keepOpen: true, run: async () => { - startAddProjectBrowse(option.environmentId); + startAddProjectSourceSelection(option.environmentId); }, }), ); @@ -617,15 +965,15 @@ function OpenCommandPaletteDialog() { return; } - startAddProjectBrowse(environmentId); + void startAddProjectSourceSelection(environmentId); }, [ addProjectEnvironmentGroups, addProjectEnvironmentOptions.length, defaultAddProjectEnvironmentId, - startAddProjectBrowse, + startAddProjectSourceSelection, ]); - useEffect(() => { + useLayoutEffect(() => { if (openIntent?.kind !== "add-project") { return; } @@ -675,29 +1023,34 @@ function OpenCommandPaletteDialog() { }); } - if (addProjectEnvironmentOptions.length > 1) { - actionItems.push({ - kind: "submenu", - value: "action:add-project", - searchTerms: ["add project", "folder", "directory", "browse", "environment"], - title: "Add project", - icon: , - addonIcon: , - groups: addProjectEnvironmentGroups, - }); - } else { - actionItems.push({ - kind: "action", - value: "action:add-project", - searchTerms: ["add project", "folder", "directory", "browse"], - title: "Add project", - icon: , - keepOpen: true, - run: async () => { - openAddProjectFlow(); - }, - }); - } + actionItems.push({ + kind: "action", + value: "action:add-project", + searchTerms: [ + "add project", + "folder", + "directory", + "browse", + "clone", + "remote", + "repository", + "repo", + "git", + "github", + "gitlab", + "bitbucket", + "azure", + "devops", + "url", + "environment", + ], + title: "Add project", + icon: , + keepOpen: true, + run: async () => { + openAddProjectFlow(); + }, + }); actionItems.push({ kind: "action", @@ -788,8 +1141,8 @@ function OpenCommandPaletteDialog() { workspaceRoot: cwd, createWorkspaceRootIfMissing: true, defaultModelSelection: { - provider: "codex", - model: DEFAULT_MODEL_BY_PROVIDER.codex, + instanceId: ProviderInstanceId.make("codex"), + model: DEFAULT_MODEL, }, createdAt: new Date().toISOString(), }); @@ -821,6 +1174,137 @@ function OpenCommandPaletteDialog() { ], ); + function getDefaultCloneParentPath(environmentId: EnvironmentId): string { + return getAddProjectInitialQueryForEnvironment(environmentId); + } + + async function submitAddProjectCloneFlow(destinationPathInput?: string): Promise { + if (!addProjectCloneFlow) { + return; + } + + const api = readEnvironmentApi(addProjectCloneFlow.environmentId); + if (!api) { + toastManager.add( + stackedThreadToast({ + type: "error", + title: "Unable to clone project", + description: "Environment API is not available.", + }), + ); + return; + } + + if (addProjectCloneFlow.step === "repository") { + const rawRepository = query.trim(); + if (rawRepository.length === 0 || isRemoteProjectLookingUp) { + return; + } + + const provider = remoteProjectSourceProvider(addProjectCloneFlow.source); + if (!provider) { + const destinationPath = getDefaultCloneParentPath(addProjectCloneFlow.environmentId); + setAddProjectCloneFlow({ + step: "confirm", + environmentId: addProjectCloneFlow.environmentId, + source: addProjectCloneFlow.source, + repositoryInput: rawRepository, + repository: null, + remoteUrl: rawRepository, + }); + setHighlightedItemValue(null); + setQuery(destinationPath); + setBrowseGeneration((generation) => generation + 1); + return; + } + + setIsRemoteProjectLookingUp(true); + try { + const repository = await api.sourceControl.lookupRepository({ + provider, + repository: rawRepository, + }); + const destinationPath = getDefaultCloneParentPath(addProjectCloneFlow.environmentId); + setAddProjectCloneFlow({ + step: "confirm", + environmentId: addProjectCloneFlow.environmentId, + source: addProjectCloneFlow.source, + repositoryInput: rawRepository, + repository, + remoteUrl: repository.sshUrl, + }); + setHighlightedItemValue(null); + setQuery(destinationPath); + setBrowseGeneration((generation) => generation + 1); + } catch (error) { + toastManager.add( + stackedThreadToast({ + type: "error", + title: "Repository lookup failed", + description: errorMessage(error), + }), + ); + } finally { + setIsRemoteProjectLookingUp(false); + } + return; + } + + const rawDestination = (destinationPathInput ?? query).trim(); + if (rawDestination.length === 0 || isRemoteProjectCloning) { + return; + } + + if (isUnsupportedWindowsProjectPath(rawDestination, browseEnvironmentPlatform)) { + toastManager.add( + stackedThreadToast({ + type: "error", + title: "Clone failed", + description: "Windows-style paths are only supported on Windows.", + }), + ); + return; + } + + if (isExplicitRelativeProjectPath(rawDestination) && !currentProjectCwdForBrowse) { + toastManager.add( + stackedThreadToast({ + type: "error", + title: "Clone failed", + description: "Relative paths require an active project.", + }), + ); + return; + } + + const destinationPath = resolveProjectPathForDispatch( + rawDestination, + currentProjectCwdForBrowse, + ); + if (destinationPath.length === 0) { + return; + } + + setIsRemoteProjectCloning(true); + try { + const result = await api.sourceControl.cloneRepository({ + remoteUrl: addProjectCloneFlow.remoteUrl, + destinationPath, + }); + await handleAddProject(result.cwd); + } catch (error) { + toastManager.add( + stackedThreadToast({ + type: "error", + title: "Clone failed", + description: errorMessage(error), + }), + ); + } finally { + setIsRemoteProjectCloning(false); + } + } + function browseTo(name: string): void { const nextQuery = appendBrowsePathSegment(query, name); setHighlightedItemValue(null); @@ -859,13 +1343,38 @@ function OpenCommandPaletteDialog() { browseUp, browseTo, }); + const cloneDestinationBrowseGroups = useMemo( + () => + browseGroups.map((group) => + group.value === "directories" ? { ...group, label: "Select where to clone" } : group, + ), + [browseGroups], + ); + + const remoteProjectContext = useMemo(() => { + if (addProjectCloneFlow?.step !== "confirm") { + return null; + } - let displayedGroups = filteredGroups; - if (isBrowsing) { + return { + title: addProjectCloneFlow.repository?.nameWithOwner ?? addProjectCloneFlow.repositoryInput, + description: addProjectCloneFlow.repository?.url ?? addProjectCloneFlow.remoteUrl, + icon: remoteProjectSourceIcon(addProjectCloneFlow.source, ITEM_ICON_CLASS), + }; + }, [addProjectCloneFlow]); + + let displayedGroups: CommandPaletteView["groups"] = filteredGroups; + if (addProjectCloneFlow?.step === "repository") { + displayedGroups = []; + } else if (addProjectCloneFlow?.step === "confirm") { + displayedGroups = relativePathNeedsActiveProject ? [] : cloneDestinationBrowseGroups; + } else if (isBrowsing) { displayedGroups = relativePathNeedsActiveProject ? [] : browseGroups; } - const inputPlaceholder = getCommandPaletteInputPlaceholder(paletteMode); + const inputPlaceholder = + remoteProjectInputPlaceholder(addProjectCloneFlow) ?? + getCommandPaletteInputPlaceholder(paletteMode); const isSubmenu = paletteMode === "submenu" || paletteMode === "submenu-browse"; const hasHighlightedBrowseItem = highlightedItemValue?.startsWith("browse:") ?? false; const canSubmitBrowsePath = isBrowsing && !relativePathNeedsActiveProject; @@ -877,8 +1386,25 @@ function OpenCommandPaletteDialog() { (hasTrailingPathSeparator(query) ? !browseResult : exactBrowseEntry === null); const useMetaForMod = isMacPlatform(navigator.platform); const submitModifierLabel = useMetaForMod ? "\u2318" : "Ctrl"; - const submitActionLabel = willCreateProjectPath ? "Create & Add" : "Add"; + const isCloneDestinationStep = addProjectCloneFlow?.step === "confirm"; + const submitActionLabel = isCloneDestinationStep + ? willCreateProjectPath + ? "Create & Clone" + : "Clone" + : willCreateProjectPath + ? "Create & Add" + : "Add"; const addShortcutLabel = hasHighlightedBrowseItem ? `${submitModifierLabel} Enter` : "Enter"; + const remoteProjectButtonLabel = addProjectCloneFlow + ? addProjectCloneFlow.source === "url" + ? "Continue" + : "Lookup" + : null; + const isRemoteProjectPending = isRemoteProjectLookingUp || isRemoteProjectCloning; + const canSubmitRemoteProjectFlow = + addProjectCloneFlow?.step === "repository" && + query.trim().length > 0 && + !isRemoteProjectPending; const fileManagerName = getLocalFileManagerName(navigator.platform); const canOpenProjectFromFileManager = isBrowsing && @@ -916,6 +1442,12 @@ function OpenCommandPaletteDialog() { } function handleKeyDown(event: KeyboardEvent): void { + if (addProjectCloneFlow?.step === "repository" && event.key === "Enter") { + event.preventDefault(); + void submitAddProjectCloneFlow(); + return; + } + const shouldSubmitBrowsePath = canSubmitBrowsePath && event.key === "Enter" && @@ -923,7 +1455,11 @@ function OpenCommandPaletteDialog() { if (shouldSubmitBrowsePath) { event.preventDefault(); - void handleAddProject(resolvedAddProjectPath); + if (isCloneDestinationStep) { + void submitAddProjectCloneFlow(resolvedAddProjectPath); + } else { + void handleAddProject(resolvedAddProjectPath); + } return; } @@ -934,6 +1470,10 @@ function OpenCommandPaletteDialog() { } function executeItem(item: CommandPaletteActionItem | CommandPaletteSubmenuItem): void { + if (item.disabled) { + return; + } + if (item.kind === "submenu") { pushView(item); return; @@ -995,11 +1535,14 @@ function OpenCommandPaletteDialog() { composerHandleRef?.current?.focusAtEnd(); return false; }} + onBackdropPointerDown={() => { + setOpen(false); + }} > { setHighlightedItemValue(typeof value === "string" ? value : null); @@ -1009,7 +1552,15 @@ function OpenCommandPaletteDialog() { >
- {isBrowsing ? ( + {addProjectCloneFlow?.step === "repository" ? ( + + ) : isBrowsing ? (
+ {remoteProjectContext ? ( +
+
+ Repository +
+
+ {remoteProjectContext.icon} + + + {remoteProjectContext.title} + + + {remoteProjectContext.description} + + +
+
+ ) : null}
@@ -1090,7 +1698,14 @@ function OpenCommandPaletteDialog() { Navigate - {!canSubmitBrowsePath || hasHighlightedBrowseItem ? ( + {addProjectCloneFlow?.step === "repository" ? ( + + Enter + + {remoteProjectButtonLabel ?? "Continue"} + + + ) : !canSubmitBrowsePath || hasHighlightedBrowseItem ? ( Enter Select diff --git a/apps/web/src/components/CommandPaletteResults.tsx b/apps/web/src/components/CommandPaletteResults.tsx index 8cdf0694a08..4ad08db0824 100644 --- a/apps/web/src/components/CommandPaletteResults.tsx +++ b/apps/web/src/components/CommandPaletteResults.tsx @@ -43,15 +43,19 @@ export function CommandPaletteResults(props: CommandPaletteResultsProps) { {group.label} - {(item) => ( - - )} + {(item) => + item.disabled ? ( + + ) : ( + + ) + } ))} @@ -59,6 +63,33 @@ export function CommandPaletteResults(props: CommandPaletteResultsProps) { ); } +function DisabledCommandPaletteResultRow(props: { + item: CommandPaletteActionItem | CommandPaletteSubmenuItem; +}) { + return ( +
+ {props.item.icon} + {props.item.description ? ( + + + {props.item.titleLeadingContent} + {props.item.title} + + + {props.item.description} + + + ) : ( + + {props.item.titleLeadingContent} + {props.item.title} + + )} + {props.item.titleTrailingContent} +
+ ); +} + function CommandPaletteResultRow(props: { item: CommandPaletteActionItem | CommandPaletteSubmenuItem; isActive: boolean; @@ -89,7 +120,6 @@ function CommandPaletteResultRow(props: { {props.item.titleLeadingContent} {props.item.title} - {props.item.titleTrailingContent} {props.item.description} @@ -99,9 +129,9 @@ function CommandPaletteResultRow(props: { {props.item.titleLeadingContent} {props.item.title} - {props.item.titleTrailingContent} )} + {props.item.titleTrailingContent} {props.item.timestamp ? ( {props.item.timestamp} diff --git a/apps/web/src/components/ComposerPromptEditor.tsx b/apps/web/src/components/ComposerPromptEditor.tsx index 8113099638e..c9696b0c737 100644 --- a/apps/web/src/components/ComposerPromptEditor.tsx +++ b/apps/web/src/components/ComposerPromptEditor.tsx @@ -152,10 +152,7 @@ function ComposerMentionDecorator(props: { path: string }) { return ( - + {props.path} @@ -275,7 +272,7 @@ function ComposerSkillDecorator(props: { skillLabel: string; skillDescription: s return ( - + {props.skillDescription} @@ -1493,7 +1490,7 @@ function ComposerPromptEditorInner({ const rootElement = editor.getRootElement(); if (!rootElement) return; const boundedCursor = clampCollapsedComposerCursor(snapshotRef.current.value, nextCursor); - rootElement.focus(); + rootElement.focus({ preventScroll: true }); editor.update(() => { $setSelectionAtComposerOffset(boundedCursor); }); @@ -1624,7 +1621,7 @@ function ComposerPromptEditorInner({ contentEditable={ 0 ? null : ( -
+
{placeholder}
) diff --git a/apps/web/src/components/DiffPanel.tsx b/apps/web/src/components/DiffPanel.tsx index b8f44459f72..3d2bb8d7b56 100644 --- a/apps/web/src/components/DiffPanel.tsx +++ b/apps/web/src/components/DiffPanel.tsx @@ -8,6 +8,7 @@ import { ChevronLeftIcon, ChevronRightIcon, Columns2Icon, + PilcrowIcon, Rows3Icon, TextWrapIcon, } from "lucide-react"; @@ -173,6 +174,7 @@ export default function DiffPanel({ mode = "inline" }: DiffPanelProps) { const { resolvedTheme } = useTheme(); const [diffRenderMode, setDiffRenderMode] = useState("stacked"); const [diffWordWrap, setDiffWordWrap] = useState(settings.diffWordWrap); + const [diffIgnoreWhitespace, setDiffIgnoreWhitespace] = useState(settings.diffIgnoreWhitespace); const patchViewportRef = useRef(null); const turnStripRef = useRef(null); const previousDiffOpenRef = useRef(false); @@ -278,6 +280,7 @@ export default function DiffPanel({ mode = "inline" }: DiffPanelProps) { threadId: activeThreadId, fromTurnCount: activeCheckpointRange?.fromTurnCount ?? null, toTurnCount: activeCheckpointRange?.toTurnCount ?? null, + ignoreWhitespace: diffIgnoreWhitespace, cacheScope: selectedTurn ? `turn:${selectedTurn.turnId}` : conversationCacheScope, enabled: isGitRepo, }), @@ -318,9 +321,10 @@ export default function DiffPanel({ mode = "inline" }: DiffPanelProps) { useEffect(() => { if (diffOpen && !previousDiffOpenRef.current) { setDiffWordWrap(settings.diffWordWrap); + setDiffIgnoreWhitespace(settings.diffIgnoreWhitespace); } previousDiffOpenRef.current = diffOpen; - }, [diffOpen, settings.diffWordWrap]); + }, [diffOpen, settings.diffIgnoreWhitespace, settings.diffWordWrap]); useEffect(() => { if (!selectedFilePath || !patchViewportRef.current) { @@ -552,6 +556,18 @@ export default function DiffPanel({ mode = "inline" }: DiffPanelProps) { > + { + setDiffIgnoreWhitespace(Boolean(pressed)); + }} + > + +
); diff --git a/apps/web/src/components/GitActionsControl.browser.tsx b/apps/web/src/components/GitActionsControl.browser.tsx index 24af3bed146..a0e25787413 100644 --- a/apps/web/src/components/GitActionsControl.browser.tsx +++ b/apps/web/src/components/GitActionsControl.browser.tsx @@ -100,12 +100,14 @@ vi.mock("~/editorPreferences", () => ({ vi.mock("~/lib/gitReactQuery", () => ({ gitInitMutationOptions: vi.fn(() => ({ __kind: "init" })), gitMutationKeys: { + publishRepository: vi.fn(() => ["publish-repository"]), pull: vi.fn(() => ["pull"]), runStackedAction: vi.fn(() => ["run-stacked-action"]), }, gitPullMutationOptions: vi.fn(() => ({ __kind: "pull" })), gitRunStackedActionMutationOptions: vi.fn(() => ({ __kind: "run-stacked-action" })), invalidateGitQueries: invalidateGitQueriesSpy, + sourceControlPublishRepositoryMutationOptions: vi.fn(() => ({ __kind: "publish-repository" })), })); vi.mock("~/lib/gitStatusState", () => ({ @@ -113,7 +115,15 @@ vi.mock("~/lib/gitStatusState", () => ({ resetGitStatusStateForTests: () => undefined, useGitStatus: vi.fn(() => ({ data: { - branch: BRANCH_NAME, + isRepo: true, + sourceControlProvider: { + kind: "github", + name: "GitHub", + baseUrl: "https://github.com", + }, + hasPrimaryRemote: true, + isDefaultRef: false, + refName: BRANCH_NAME, hasWorkingTreeChanges: false, workingTree: { files: [], insertions: 0, deletions: 0 }, hasUpstream: true, diff --git a/apps/web/src/components/GitActionsControl.logic.test.ts b/apps/web/src/components/GitActionsControl.logic.test.ts index c6a50b82c27..7950753330e 100644 --- a/apps/web/src/components/GitActionsControl.logic.test.ts +++ b/apps/web/src/components/GitActionsControl.logic.test.ts @@ -1,4 +1,4 @@ -import type { GitStatusResult } from "@t3tools/contracts"; +import type { VcsStatusResult } from "@t3tools/contracts"; import { assert, describe, it } from "vitest"; import { buildGitActionProgressStages, @@ -11,12 +11,12 @@ import { resolveThreadBranchUpdate, } from "./GitActionsControl.logic"; -function status(overrides: Partial = {}): GitStatusResult { +function status(overrides: Partial = {}): VcsStatusResult { return { isRepo: true, - hasOriginRemote: true, - isDefaultBranch: false, - branch: "feature/test", + hasPrimaryRemote: true, + isDefaultRef: false, + refName: "feature/test", hasWorkingTreeChanges: false, workingTree: { files: [], @@ -31,7 +31,7 @@ function status(overrides: Partial = {}): GitStatusResult { }; } -describe("when: branch is clean and has an open PR", () => { +describe("when: ref is clean and has an open PR", () => { it("resolveQuickAction opens the existing PR", () => { const quick = resolveQuickAction( status({ @@ -39,8 +39,8 @@ describe("when: branch is clean and has an open PR", () => { number: 10, title: "Open PR", url: "https://example.com/pr/10", - baseBranch: "main", - headBranch: "feature/test", + baseRef: "main", + headRef: "feature/test", state: "open", }, }), @@ -56,8 +56,8 @@ describe("when: branch is clean and has an open PR", () => { number: 11, title: "Existing PR", url: "https://example.com/pr/11", - baseBranch: "main", - headBranch: "feature/test", + baseRef: "main", + headRef: "feature/test", state: "open", }, }), @@ -150,7 +150,7 @@ describe("when: git status is unavailable", () => { }); }); -describe("when: branch is clean, ahead, and has an open PR", () => { +describe("when: ref is clean, ahead, and has an open PR", () => { it("resolveQuickAction prefers push", () => { const quick = resolveQuickAction( status({ @@ -159,8 +159,8 @@ describe("when: branch is clean, ahead, and has an open PR", () => { number: 13, title: "Open PR", url: "https://example.com/pr/13", - baseBranch: "main", - headBranch: "feature/test", + baseRef: "main", + headRef: "feature/test", state: "open", }, }), @@ -177,8 +177,8 @@ describe("when: branch is clean, ahead, and has an open PR", () => { number: 12, title: "Existing PR", url: "https://example.com/pr/12", - baseBranch: "main", - headBranch: "feature/test", + baseRef: "main", + headRef: "feature/test", state: "open", }, }), @@ -212,7 +212,7 @@ describe("when: branch is clean, ahead, and has an open PR", () => { }); }); -describe("when: branch is clean, ahead, and has no open PR", () => { +describe("when: ref is clean, ahead, and has no open PR", () => { it("resolveQuickAction pushes and creates a PR", () => { const quick = resolveQuickAction(status({ aheadCount: 2, pr: null }), false); assert.deepInclude(quick, { @@ -253,7 +253,53 @@ describe("when: branch is clean, ahead, and has no open PR", () => { }); }); -describe("when: branch is clean, up to date, and has no open PR", () => { +describe("when: source control provider uses merge requests", () => { + it("uses GitLab MR terminology in quick actions and menu items", () => { + const gitlabStatus = status({ + aheadCount: 2, + sourceControlProvider: { + kind: "gitlab", + name: "GitLab", + baseUrl: "https://gitlab.com", + }, + }); + + const quick = resolveQuickAction(gitlabStatus, false); + const items = buildMenuItems(gitlabStatus, false); + + assert.deepInclude(quick, { + kind: "run_action", + action: "create_pr", + label: "Push & create MR", + }); + assert.deepInclude(items[2], { + id: "pr", + label: "Create MR", + }); + }); +}); + +describe("when: ref is clean, up to date, and has no open PR", () => { + it("enables create PR when synced with upstream but ahead of default", () => { + const syncedFeature = status({ + aheadCount: 0, + behindCount: 0, + aheadOfDefaultCount: 1, + pr: null, + }); + + const quick = resolveQuickAction(syncedFeature, false); + assert.deepInclude(quick, { + label: "Create PR", + disabled: false, + kind: "run_action", + action: "create_pr", + }); + + const items = buildMenuItems(syncedFeature, false); + assert.equal(items.find((item) => item.id === "pr")?.disabled, false); + }); + it("resolveQuickAction returns disabled no-action state", () => { const quick = resolveQuickAction( status({ aheadCount: 0, behindCount: 0, hasWorkingTreeChanges: false, pr: null }), @@ -293,7 +339,7 @@ describe("when: branch is clean, up to date, and has no open PR", () => { }); }); -describe("when: branch is behind upstream", () => { +describe("when: ref is behind upstream", () => { it("resolveQuickAction returns pull", () => { const quick = resolveQuickAction(status({ behindCount: 2 }), false); assert.deepInclude(quick, { kind: "run_pull", label: "Pull", disabled: false }); @@ -330,11 +376,11 @@ describe("when: branch is behind upstream", () => { }); }); -describe("when: branch has diverged from upstream", () => { +describe("when: ref has diverged from upstream", () => { it("resolveQuickAction returns a disabled sync hint", () => { const quick = resolveQuickAction(status({ aheadCount: 2, behindCount: 1 }), false); assert.deepEqual(quick, { - label: "Sync branch", + label: "Sync ref", disabled: true, kind: "show_hint", hint: "Branch has diverged from upstream. Rebase/merge first.", @@ -375,8 +421,8 @@ describe("when: working tree has local changes", () => { number: 16, title: "Existing PR", url: "https://example.com/pr/16", - baseBranch: "main", - headBranch: "feature/test", + baseRef: "main", + headRef: "feature/test", state: "open", }, }), @@ -418,12 +464,54 @@ describe("when: working tree has local changes", () => { }, ]); }); + + it("buildMenuItems enables push for ahead commits while local changes remain uncommitted", () => { + const items = buildMenuItems( + status({ + refName: "feature/test", + hasWorkingTreeChanges: true, + aheadCount: 1, + workingTree: { + files: [{ path: ".vercel/project.json", insertions: 1, deletions: 0 }], + insertions: 1, + deletions: 0, + }, + }), + false, + ); + assert.deepEqual(items, [ + { + id: "commit", + label: "Commit", + disabled: false, + icon: "commit", + kind: "open_dialog", + dialogAction: "commit", + }, + { + id: "push", + label: "Push", + disabled: false, + icon: "push", + kind: "open_dialog", + dialogAction: "push", + }, + { + id: "pr", + label: "Create PR", + disabled: true, + icon: "pr", + kind: "open_dialog", + dialogAction: "create_pr", + }, + ]); + }); }); -describe("when: on default branch without open PR", () => { +describe("when: on default ref without open PR", () => { it("resolveQuickAction returns commit and push when local changes exist", () => { const quick = resolveQuickAction( - status({ branch: "main", hasWorkingTreeChanges: true }), + status({ refName: "main", hasWorkingTreeChanges: true }), false, true, ); @@ -435,9 +523,9 @@ describe("when: on default branch without open PR", () => { }); }); - it("resolveQuickAction returns push when branch is ahead", () => { + it("resolveQuickAction returns push when ref is ahead", () => { const quick = resolveQuickAction( - status({ branch: "main", aheadCount: 2, pr: null }), + status({ refName: "main", aheadCount: 2, pr: null }), false, true, ); @@ -450,7 +538,7 @@ describe("when: on default branch without open PR", () => { }); }); -describe("when: working tree has local changes and branch is behind upstream", () => { +describe("when: working tree has local changes and ref is behind upstream", () => { it("resolveQuickAction still prefers commit, push, and create PR", () => { const quick = resolveQuickAction( status({ hasWorkingTreeChanges: true, behindCount: 1 }), @@ -497,14 +585,14 @@ describe("when: working tree has local changes and branch is behind upstream", ( describe("when: HEAD is detached and there are no local changes", () => { it("resolveQuickAction shows detached head hint", () => { const quick = resolveQuickAction( - status({ branch: null, hasWorkingTreeChanges: false, hasUpstream: false }), + status({ refName: null, hasWorkingTreeChanges: false, hasUpstream: false }), false, ); assert.deepInclude(quick, { kind: "show_hint", label: "Commit", disabled: true }); }); it("buildMenuItems keeps commit, push, and PR disabled", () => { - const items = buildMenuItems(status({ branch: null, hasWorkingTreeChanges: false }), false); + const items = buildMenuItems(status({ refName: null, hasWorkingTreeChanges: false }), false); assert.deepEqual(items, [ { id: "commit", @@ -534,7 +622,7 @@ describe("when: HEAD is detached and there are no local changes", () => { }); }); -describe("when: branch has no upstream configured", () => { +describe("when: ref has no upstream configured", () => { it("resolveQuickAction is disabled when clean, no upstream, and no local commits are ahead", () => { const quick = resolveQuickAction( status({ hasUpstream: false, pr: null, aheadCount: 0 }), @@ -557,8 +645,8 @@ describe("when: branch has no upstream configured", () => { number: 14, title: "Existing PR", url: "https://example.com/pr/14", - baseBranch: "main", - headBranch: "feature/test", + baseRef: "main", + headRef: "feature/test", state: "open", }, }), @@ -580,8 +668,8 @@ describe("when: branch has no upstream configured", () => { number: 15, title: "Existing PR", url: "https://example.com/pr/15", - baseBranch: "main", - headBranch: "feature/test", + baseRef: "main", + headRef: "feature/test", state: "open", }, }), @@ -642,7 +730,7 @@ describe("when: branch has no upstream configured", () => { }); }); - it("resolveQuickAction disables push-and-pr flows when no origin remote exists", () => { + it("resolveQuickAction publishes when no origin remote exists", () => { const quick = resolveQuickAction( status({ hasUpstream: false, @@ -654,10 +742,9 @@ describe("when: branch has no upstream configured", () => { false, ); assert.deepEqual(quick, { - kind: "show_hint", - label: "Push", - hint: 'Add an "origin" remote before pushing or creating a PR.', - disabled: true, + kind: "open_publish", + label: "Publish repository", + disabled: false, }); }); @@ -691,7 +778,7 @@ describe("when: branch has no upstream configured", () => { ]); }); - it("buildMenuItems disables push and create PR when no origin remote exists", () => { + it("buildMenuItems hides push and create PR when no origin remote exists", () => { const items = buildMenuItems( status({ hasUpstream: false, pr: null, aheadCount: 2 }), false, @@ -706,29 +793,13 @@ describe("when: branch has no upstream configured", () => { kind: "open_dialog", dialogAction: "commit", }, - { - id: "push", - label: "Push", - disabled: true, - icon: "push", - kind: "open_dialog", - dialogAction: "push", - }, - { - id: "pr", - label: "Create PR", - disabled: true, - icon: "pr", - kind: "open_dialog", - dialogAction: "create_pr", - }, ]); }); - it("resolveQuickAction is disabled on default branch when no upstream exists and no commits are ahead", () => { + it("resolveQuickAction is disabled on default ref when no upstream exists and no commits are ahead", () => { const quick = resolveQuickAction( status({ - branch: "main", + refName: "main", hasUpstream: false, aheadCount: 0, pr: null, @@ -744,10 +815,10 @@ describe("when: branch has no upstream configured", () => { }); }); - it("resolveQuickAction uses push-only on default branch when no upstream exists and commits are ahead", () => { + it("resolveQuickAction uses push-only on default ref when no upstream exists and commits are ahead", () => { const quick = resolveQuickAction( status({ - branch: "main", + refName: "main", hasUpstream: false, aheadCount: 1, pr: null, @@ -763,7 +834,7 @@ describe("when: branch has no upstream configured", () => { }); }); - it("buildMenuItems still disables push and create PR when branch is behind", () => { + it("buildMenuItems still disables push and create PR when ref is behind", () => { const items = buildMenuItems( status({ hasUpstream: false, @@ -803,7 +874,7 @@ describe("when: branch has no upstream configured", () => { }); describe("requiresDefaultBranchConfirmation", () => { - it("requires confirmation for push actions on default branch", () => { + it("requires confirmation for push actions on default ref", () => { assert.isFalse(requiresDefaultBranchConfirmation("commit", true)); assert.isTrue(requiresDefaultBranchConfirmation("push", true)); assert.isTrue(requiresDefaultBranchConfirmation("create_pr", true)); @@ -823,9 +894,9 @@ describe("resolveDefaultBranchActionDialogCopy", () => { }); assert.deepEqual(copy, { - title: "Push to default branch?", + title: "Push to default ref?", description: - 'This action will push local commits on "main". You can continue on this branch or create a feature branch and run the same action there.', + 'This action will push local commits on "main". You can continue on this ref or create a feature ref and run the same action there.', continueLabel: "Push to main", }); }); @@ -838,9 +909,9 @@ describe("resolveDefaultBranchActionDialogCopy", () => { }); assert.deepEqual(copy, { - title: "Push & create PR from default branch?", + title: "Push & create PR from default ref?", description: - 'This action will push local commits and create a PR on "main". You can continue on this branch or create a feature branch and run the same action there.', + 'This action will push local commits and create a pull request on "main". You can continue on this ref or create a feature ref and run the same action there.', continueLabel: "Push & create PR", }); }); @@ -853,9 +924,9 @@ describe("resolveDefaultBranchActionDialogCopy", () => { }); assert.deepEqual(copy, { - title: "Commit, push & create PR from default branch?", + title: "Commit, push & create PR from default ref?", description: - 'This action will commit, push, and create a PR on "main". You can continue on this branch or create a feature branch and run the same action there.', + 'This action will commit, push, and create a pull request on "main". You can continue on this ref or create a feature ref and run the same action there.', continueLabel: "Commit, push & create PR", }); }); @@ -884,7 +955,7 @@ describe("buildGitActionProgressStages", () => { "Pushing to origin/feature/test...", "Preparing PR...", "Generating PR content...", - "Creating GitHub pull request...", + "Creating pull request...", ]); }); @@ -898,7 +969,7 @@ describe("buildGitActionProgressStages", () => { assert.deepEqual(stages, [ "Preparing PR...", "Generating PR content...", - "Creating GitHub pull request...", + "Creating pull request...", ]); }); @@ -928,7 +999,7 @@ describe("buildGitActionProgressStages", () => { "Pushing to origin/feature/test...", "Preparing PR...", "Generating PR content...", - "Creating GitHub pull request...", + "Creating pull request...", ]); }); }); @@ -944,7 +1015,7 @@ describe("resolveThreadBranchUpdate", () => { commit: { status: "created", commitSha: "89abcdef01234567", - subject: "feat: add branch sync", + subject: "feat: add ref sync", }, push: { status: "pushed", branch: "feature/fix-toast-copy" }, pr: { status: "skipped_not_requested" }, @@ -968,7 +1039,7 @@ describe("resolveThreadBranchUpdate", () => { commit: { status: "created", commitSha: "89abcdef01234567", - subject: "feat: add branch sync", + subject: "feat: add ref sync", }, push: { status: "pushed", branch: "feature/fix-toast-copy" }, pr: { status: "skipped_not_requested" }, @@ -985,8 +1056,8 @@ describe("resolveThreadBranchUpdate", () => { describe("resolveLiveThreadBranchUpdate", () => { it("returns a branch update when live git status differs from stored thread metadata", () => { const update = resolveLiveThreadBranchUpdate({ - threadBranch: "feature/old-branch", - gitStatus: status({ branch: "effect-atom" }), + threadBranch: "feature/old-ref", + gitStatus: status({ refName: "effect-atom" }), }); assert.deepEqual(update, { @@ -996,35 +1067,35 @@ describe("resolveLiveThreadBranchUpdate", () => { it("returns null when live git status is unavailable", () => { const update = resolveLiveThreadBranchUpdate({ - threadBranch: "feature/old-branch", + threadBranch: "feature/old-ref", gitStatus: null, }); assert.equal(update, null); }); - it("returns null when the stored thread branch already matches git status", () => { + it("returns null when the stored thread ref already matches git status", () => { const update = resolveLiveThreadBranchUpdate({ threadBranch: "effect-atom", - gitStatus: status({ branch: "effect-atom" }), + gitStatus: status({ refName: "effect-atom" }), }); assert.equal(update, null); }); - it("returns null when git status is detached HEAD but the thread already has a branch", () => { + it("returns null when git status is detached HEAD but the thread already has a ref", () => { const update = resolveLiveThreadBranchUpdate({ threadBranch: "effect-atom", - gitStatus: status({ branch: null }), + gitStatus: status({ refName: null }), }); assert.equal(update, null); }); - it("does not regress a semantic thread branch back to a temporary worktree branch", () => { + it("does not regress a semantic thread ref back to a temporary worktree ref", () => { const update = resolveLiveThreadBranchUpdate({ threadBranch: "t3code/github-query-rate-limit", - gitStatus: status({ branch: "t3code/bda76797" }), + gitStatus: status({ refName: "t3code/bda76797" }), }); assert.equal(update, null); @@ -1032,31 +1103,31 @@ describe("resolveLiveThreadBranchUpdate", () => { }); describe("resolveAutoFeatureBranchName", () => { - it("uses semantic preferred branch names when available", () => { - const branch = resolveAutoFeatureBranchName(["main", "feature/other"], "fix toast copy"); - assert.equal(branch, "feature/fix-toast-copy"); + it("uses semantic preferred ref names when available", () => { + const ref = resolveAutoFeatureBranchName(["main", "feature/other"], "fix toast copy"); + assert.equal(ref, "feature/fix-toast-copy"); }); - it("normalizes preferred names that already include a branch namespace", () => { - const branch = resolveAutoFeatureBranchName(["main"], "feature/refine-toolbar-actions"); - assert.equal(branch, "feature/refine-toolbar-actions"); + it("normalizes preferred names that already include a ref namespace", () => { + const ref = resolveAutoFeatureBranchName(["main"], "feature/refine-toolbar-actions"); + assert.equal(ref, "feature/refine-toolbar-actions"); }); - it("increments suffix when the preferred branch name already exists", () => { - const branch = resolveAutoFeatureBranchName( + it("increments suffix when the preferred ref name already exists", () => { + const ref = resolveAutoFeatureBranchName( ["main", "feature/fix-toast-copy", "feature/fix-toast-copy-2"], "fix toast copy", ); - assert.equal(branch, "feature/fix-toast-copy-3"); + assert.equal(ref, "feature/fix-toast-copy-3"); }); - it("treats existing branch names as case-insensitive for collision checks", () => { - const branch = resolveAutoFeatureBranchName(["Feature/Ticket-1"], "feature/ticket-1"); - assert.equal(branch, "feature/ticket-1-2"); + it("treats existing ref names as case-insensitive for collision checks", () => { + const ref = resolveAutoFeatureBranchName(["Feature/Ticket-1"], "feature/ticket-1"); + assert.equal(ref, "feature/ticket-1-2"); }); it("falls back to feature/update when no preferred name is provided", () => { - const branch = resolveAutoFeatureBranchName(["main"]); - assert.equal(branch, "feature/update"); + const ref = resolveAutoFeatureBranchName(["main"]); + assert.equal(ref, "feature/update"); }); }); diff --git a/apps/web/src/components/GitActionsControl.logic.ts b/apps/web/src/components/GitActionsControl.logic.ts index e4e611fb87c..3f6bae61cdd 100644 --- a/apps/web/src/components/GitActionsControl.logic.ts +++ b/apps/web/src/components/GitActionsControl.logic.ts @@ -1,9 +1,14 @@ import type { GitRunStackedActionResult, GitStackedAction, - GitStatusResult, + VcsStatusResult, } from "@t3tools/contracts"; import { isTemporaryWorktreeBranch } from "@t3tools/shared/git"; +import { + DEFAULT_CHANGE_REQUEST_TERMINOLOGY, + getChangeRequestTerminology, + type ChangeRequestTerminology, +} from "../sourceControlPresentation"; export type GitActionIconName = "commit" | "push" | "pr"; @@ -21,7 +26,7 @@ export interface GitActionMenuItem { export interface GitQuickAction { label: string; disabled: boolean; - kind: "run_action" | "run_pull" | "open_pr" | "show_hint"; + kind: "run_action" | "run_pull" | "open_pr" | "open_publish" | "show_hint"; action?: GitStackedAction; hint?: string; } @@ -38,6 +43,14 @@ export type DefaultBranchConfirmableAction = | "commit_push" | "commit_push_pr"; +function resolveChangeRequestTerminology( + gitStatus: VcsStatusResult | null, +): ChangeRequestTerminology { + return gitStatus?.sourceControlProvider + ? getChangeRequestTerminology(gitStatus.sourceControlProvider) + : DEFAULT_CHANGE_REQUEST_TERMINOLOGY; +} + export function buildGitActionProgressStages(input: { action: GitStackedAction; hasCustomCommitMessage: boolean; @@ -45,13 +58,15 @@ export function buildGitActionProgressStages(input: { pushTarget?: string; featureBranch?: boolean; shouldPushBeforePr?: boolean; + terminology?: ChangeRequestTerminology; }): string[] { - const branchStages = input.featureBranch ? ["Preparing feature branch..."] : []; + const terminology = input.terminology ?? DEFAULT_CHANGE_REQUEST_TERMINOLOGY; + const branchStages = input.featureBranch ? ["Preparing feature ref..."] : []; const pushStage = input.pushTarget ? `Pushing to ${input.pushTarget}...` : "Pushing..."; const prStages = [ - "Preparing PR...", - "Generating PR content...", - "Creating GitHub pull request...", + `Preparing ${terminology.shortLabel}...`, + `Generating ${terminology.shortLabel} content...`, + `Creating ${terminology.singular}...`, ]; if (input.action === "push") { @@ -77,22 +92,23 @@ export function buildGitActionProgressStages(input: { } export function buildMenuItems( - gitStatus: GitStatusResult | null, + gitStatus: VcsStatusResult | null, isBusy: boolean, - hasOriginRemote = true, + hasPrimaryRemote = true, ): GitActionMenuItem[] { if (!gitStatus) return []; + const terminology = resolveChangeRequestTerminology(gitStatus); - const hasBranch = gitStatus.branch !== null; + const hasBranch = gitStatus.refName !== null; const hasChanges = gitStatus.hasWorkingTreeChanges; const hasOpenPr = gitStatus.pr?.state === "open"; const isBehind = gitStatus.behindCount > 0; - const canPushWithoutUpstream = hasOriginRemote && !gitStatus.hasUpstream; + const hasDefaultBranchDelta = (gitStatus.aheadOfDefaultCount ?? gitStatus.aheadCount) > 0; + const canPushWithoutUpstream = hasPrimaryRemote && !gitStatus.hasUpstream; const canCommit = !isBusy && hasChanges; const canPush = !isBusy && hasBranch && - !hasChanges && !isBehind && gitStatus.aheadCount > 0 && (gitStatus.hasUpstream || canPushWithoutUpstream); @@ -101,20 +117,26 @@ export function buildMenuItems( hasBranch && !hasChanges && !hasOpenPr && - gitStatus.aheadCount > 0 && + hasDefaultBranchDelta && !isBehind && (gitStatus.hasUpstream || canPushWithoutUpstream); const canOpenPr = !isBusy && hasOpenPr; + const commitItem: GitActionMenuItem = { + id: "commit", + label: "Commit", + disabled: !canCommit, + icon: "commit", + kind: "open_dialog", + dialogAction: "commit", + }; + + if (!hasPrimaryRemote) { + return [commitItem]; + } + return [ - { - id: "commit", - label: "Commit", - disabled: !canCommit, - icon: "commit", - kind: "open_dialog", - dialogAction: "commit", - }, + commitItem, { id: "push", label: "Push", @@ -126,14 +148,14 @@ export function buildMenuItems( hasOpenPr ? { id: "pr", - label: "View PR", + label: `View ${terminology.shortLabel}`, disabled: !canOpenPr, icon: "pr", kind: "open_pr", } : { id: "pr", - label: "Create PR", + label: `Create ${terminology.shortLabel}`, disabled: !canCreatePr, icon: "pr", kind: "open_dialog", @@ -143,10 +165,10 @@ export function buildMenuItems( } export function resolveQuickAction( - gitStatus: GitStatusResult | null, + gitStatus: VcsStatusResult | null, isBusy: boolean, - isDefaultBranch = false, - hasOriginRemote = true, + isDefaultRef = false, + hasPrimaryRemote = true, ): GitQuickAction { if (isBusy) { return { label: "Commit", disabled: true, kind: "show_hint", hint: "Git action in progress." }; @@ -161,31 +183,33 @@ export function resolveQuickAction( }; } - const hasBranch = gitStatus.branch !== null; + const hasBranch = gitStatus.refName !== null; const hasChanges = gitStatus.hasWorkingTreeChanges; const hasOpenPr = gitStatus.pr?.state === "open"; const isAhead = gitStatus.aheadCount > 0; + const hasDefaultBranchDelta = (gitStatus.aheadOfDefaultCount ?? gitStatus.aheadCount) > 0; const isBehind = gitStatus.behindCount > 0; const isDiverged = isAhead && isBehind; + const terminology = resolveChangeRequestTerminology(gitStatus); if (!hasBranch) { return { label: "Commit", disabled: true, kind: "show_hint", - hint: "Create and checkout a branch before pushing or opening a PR.", + hint: `Create and checkout a ref before pushing or opening a ${terminology.singular}.`, }; } if (hasChanges) { - if (!gitStatus.hasUpstream && !hasOriginRemote) { + if (!gitStatus.hasUpstream && !hasPrimaryRemote) { return { label: "Commit", disabled: false, kind: "run_action", action: "commit" }; } - if (hasOpenPr || isDefaultBranch) { + if (hasOpenPr || isDefaultRef) { return { label: "Commit & push", disabled: false, kind: "run_action", action: "commit_push" }; } return { - label: "Commit, push & PR", + label: `Commit, push & ${terminology.shortLabel}`, disabled: false, kind: "run_action", action: "commit_push_pr", @@ -193,20 +217,19 @@ export function resolveQuickAction( } if (!gitStatus.hasUpstream) { - if (!hasOriginRemote) { + if (!hasPrimaryRemote) { if (hasOpenPr && !isAhead) { - return { label: "View PR", disabled: false, kind: "open_pr" }; + return { label: `View ${terminology.shortLabel}`, disabled: false, kind: "open_pr" }; } return { - label: "Push", - disabled: true, - kind: "show_hint", - hint: 'Add an "origin" remote before pushing or creating a PR.', + label: "Publish repository", + disabled: false, + kind: "open_publish", }; } if (!isAhead) { if (hasOpenPr) { - return { label: "View PR", disabled: false, kind: "open_pr" }; + return { label: `View ${terminology.shortLabel}`, disabled: false, kind: "open_pr" }; } return { label: "Push", @@ -215,16 +238,16 @@ export function resolveQuickAction( hint: "No local commits to push.", }; } - if (hasOpenPr || isDefaultBranch) { + if (hasOpenPr || isDefaultRef) { return { label: "Push", disabled: false, kind: "run_action", - action: isDefaultBranch ? "commit_push" : "push", + action: isDefaultRef ? "commit_push" : "push", }; } return { - label: "Push & create PR", + label: `Push & create ${terminology.shortLabel}`, disabled: false, kind: "run_action", action: "create_pr", @@ -233,7 +256,7 @@ export function resolveQuickAction( if (isDiverged) { return { - label: "Sync branch", + label: "Sync ref", disabled: true, kind: "show_hint", hint: "Branch has diverged from upstream. Rebase/merge first.", @@ -249,16 +272,16 @@ export function resolveQuickAction( } if (isAhead) { - if (hasOpenPr || isDefaultBranch) { + if (hasOpenPr || isDefaultRef) { return { label: "Push", disabled: false, kind: "run_action", - action: isDefaultBranch ? "commit_push" : "push", + action: isDefaultRef ? "commit_push" : "push", }; } return { - label: "Push & create PR", + label: `Push & create ${terminology.shortLabel}`, disabled: false, kind: "run_action", action: "create_pr", @@ -266,7 +289,16 @@ export function resolveQuickAction( } if (hasOpenPr && gitStatus.hasUpstream) { - return { label: "View PR", disabled: false, kind: "open_pr" }; + return { label: `View ${terminology.shortLabel}`, disabled: false, kind: "open_pr" }; + } + + if (hasDefaultBranchDelta && !isDefaultRef) { + return { + label: `Create ${terminology.shortLabel}`, + disabled: false, + kind: "run_action", + action: "create_pr", + }; } return { @@ -279,9 +311,9 @@ export function resolveQuickAction( export function requiresDefaultBranchConfirmation( action: GitStackedAction, - isDefaultBranch: boolean, + isDefaultRef: boolean, ): boolean { - if (!isDefaultBranch) return false; + if (!isDefaultRef) return false; return ( action === "push" || action === "create_pr" || @@ -294,20 +326,22 @@ export function resolveDefaultBranchActionDialogCopy(input: { action: DefaultBranchConfirmableAction; branchName: string; includesCommit: boolean; + terminology?: ChangeRequestTerminology; }): DefaultBranchActionDialogCopy { const branchLabel = input.branchName; - const suffix = ` on "${branchLabel}". You can continue on this branch or create a feature branch and run the same action there.`; + const suffix = ` on "${branchLabel}". You can continue on this ref or create a feature ref and run the same action there.`; + const terminology = input.terminology ?? DEFAULT_CHANGE_REQUEST_TERMINOLOGY; if (input.action === "push" || input.action === "commit_push") { if (input.includesCommit) { return { - title: "Commit & push to default branch?", + title: "Commit & push to default ref?", description: `This action will commit and push changes${suffix}`, continueLabel: `Commit & push to ${branchLabel}`, }; } return { - title: "Push to default branch?", + title: "Push to default ref?", description: `This action will push local commits${suffix}`, continueLabel: `Push to ${branchLabel}`, }; @@ -315,15 +349,15 @@ export function resolveDefaultBranchActionDialogCopy(input: { if (input.includesCommit) { return { - title: "Commit, push & create PR from default branch?", - description: `This action will commit, push, and create a PR${suffix}`, - continueLabel: `Commit, push & create PR`, + title: `Commit, push & create ${terminology.shortLabel} from default ref?`, + description: `This action will commit, push, and create a ${terminology.singular}${suffix}`, + continueLabel: `Commit, push & create ${terminology.shortLabel}`, }; } return { - title: "Push & create PR from default branch?", - description: `This action will push local commits and create a PR${suffix}`, - continueLabel: "Push & create PR", + title: `Push & create ${terminology.shortLabel} from default ref?`, + description: `This action will push local commits and create a ${terminology.singular}${suffix}`, + continueLabel: `Push & create ${terminology.shortLabel}`, }; } @@ -341,31 +375,31 @@ export function resolveThreadBranchUpdate( export function resolveLiveThreadBranchUpdate(input: { threadBranch: string | null; - gitStatus: GitStatusResult | null; + gitStatus: VcsStatusResult | null; }): { branch: string | null } | null { if (!input.gitStatus) { return null; } - if (input.gitStatus.branch === null && input.threadBranch !== null) { + if (input.gitStatus.refName === null && input.threadBranch !== null) { return null; } - if (input.threadBranch === input.gitStatus.branch) { + if (input.threadBranch === input.gitStatus.refName) { return null; } if ( input.threadBranch !== null && - input.gitStatus.branch !== null && + input.gitStatus.refName !== null && !isTemporaryWorktreeBranch(input.threadBranch) && - isTemporaryWorktreeBranch(input.gitStatus.branch) + isTemporaryWorktreeBranch(input.gitStatus.refName) ) { return null; } return { - branch: input.gitStatus.branch, + branch: input.gitStatus.refName, }; } diff --git a/apps/web/src/components/GitActionsControl.tsx b/apps/web/src/components/GitActionsControl.tsx index 31342d8406b..f8801f98d29 100644 --- a/apps/web/src/components/GitActionsControl.tsx +++ b/apps/web/src/components/GitActionsControl.tsx @@ -3,13 +3,34 @@ import type { GitActionProgressEvent, GitRunStackedActionResult, GitStackedAction, - GitStatusResult, - ProviderKind, + SourceControlCloneProtocol, + SourceControlProviderDiscoveryItem, + SourceControlProviderKind, + SourceControlPublishRepositoryResult, + SourceControlRepositoryVisibility, + VcsStatusResult, } from "@t3tools/contracts"; +import type { ProviderKind } from "../providerKind"; import { useIsMutating, useMutation, useQueryClient } from "@tanstack/react-query"; +import { useNavigate } from "@tanstack/react-router"; +import { Option } from "effect"; import { useCallback, useEffect, useEffectEvent, useMemo, useRef, useState } from "react"; -import { ChevronDownIcon, CloudUploadIcon, GitCommitIcon, InfoIcon } from "lucide-react"; -import { GitHubIcon } from "./Icons"; +import { flushSync } from "react-dom"; +import { + CheckIcon, + ChevronDownIcon, + CloudUploadIcon, + ExternalLinkIcon, + GitCommitIcon, + InfoIcon, + LockIcon, + GlobeIcon, +} from "lucide-react"; +import { Radio as RadioPrimitive } from "@base-ui/react/radio"; +import { AzureDevOpsIcon, BitbucketIcon, GitHubIcon, GitLabIcon } from "~/components/Icons"; +import { RadioGroup } from "~/components/ui/radio-group"; +import { Spinner } from "~/components/ui/spinner"; +import { cn } from "~/lib/utils"; import { buildGitActionProgressStages, buildMenuItems, @@ -23,7 +44,8 @@ import { resolveQuickAction, resolveThreadBranchUpdate, } from "./GitActionsControl.logic"; -import { resolveGitTextGenerationModelSelection, useAppSettings } from "~/appSettings"; +import { AnimatedHeight } from "./AnimatedHeight"; +import { useAppSettings } from "~/appSettings"; import { Button } from "~/components/ui/button"; import { Checkbox } from "~/components/ui/checkbox"; import { @@ -36,24 +58,29 @@ import { DialogTitle, } from "~/components/ui/dialog"; import { Group, GroupSeparator } from "~/components/ui/group"; +import { Input } from "~/components/ui/input"; import { Menu, MenuItem, MenuPopup, MenuTrigger } from "~/components/ui/menu"; import { Popover, PopoverPopup, PopoverTrigger } from "~/components/ui/popover"; import { ScrollArea } from "~/components/ui/scroll-area"; import { Textarea } from "~/components/ui/textarea"; import { stackedThreadToast, toastManager, type ThreadToastData } from "~/components/ui/toast"; +import { Tooltip, TooltipPopup, TooltipTrigger } from "~/components/ui/tooltip"; import { openInPreferredEditor } from "~/editorPreferences"; import { gitInitMutationOptions, gitMutationKeys, gitPullMutationOptions, gitRunStackedActionMutationOptions, + sourceControlPublishRepositoryMutationOptions, } from "~/lib/gitReactQuery"; import { refreshGitStatus, useGitStatus } from "~/lib/gitStatusState"; +import { useSourceControlDiscovery } from "~/lib/sourceControlDiscoveryState"; import { newCommandId, randomUUID } from "~/lib/utils"; import { resolvePathLinkTarget } from "~/terminal-links"; import { type DraftId, useComposerDraftStore } from "~/composerDraftStore"; import { readEnvironmentApi } from "~/environmentApi"; import { readLocalApi } from "~/localApi"; +import { getSourceControlPresentation } from "~/sourceControlPresentation"; import { useStore } from "~/store"; import { createThreadSelectorByRef } from "~/storeSelectors"; @@ -74,6 +101,11 @@ interface PendingDefaultBranchAction { filePaths?: string[]; } +type PublishProviderKind = Extract< + SourceControlProviderKind, + "github" | "gitlab" | "bitbucket" | "azure-devops" +>; + type GitActionToastId = ReturnType; interface ActiveGitActionProgress { @@ -93,7 +125,7 @@ interface RunGitActionWithToastInput { commitMessage?: string; onConfirmed?: () => void; skipDefaultBranchPrompt?: boolean; - statusOverride?: GitStatusResult | null; + statusOverride?: VcsStatusResult | null; featureBranch?: boolean; progressToastId?: GitActionToastId; filePaths?: string[]; @@ -101,6 +133,88 @@ interface RunGitActionWithToastInput { const GIT_STATUS_WINDOW_REFRESH_DEBOUNCE_MS = 250; +const PUBLISH_PROVIDER_OPTIONS = [ + { + value: "github", + label: "GitHub", + description: "github.com", + host: "github.com", + pathPlaceholder: "owner/repo", + Icon: GitHubIcon, + }, + { + value: "gitlab", + label: "GitLab", + description: "gitlab.com", + host: "gitlab.com", + pathPlaceholder: "group/project", + Icon: GitLabIcon, + }, + { + value: "bitbucket", + label: "Bitbucket", + description: "bitbucket.org", + host: "bitbucket.org", + pathPlaceholder: "workspace/repository", + Icon: BitbucketIcon, + }, + { + value: "azure-devops", + label: "Azure DevOps", + description: "dev.azure.com", + host: "dev.azure.com", + pathPlaceholder: "project/repository", + Icon: AzureDevOpsIcon, + }, +] as const satisfies ReadonlyArray<{ + readonly value: PublishProviderKind; + readonly label: string; + readonly description: string; + readonly host: string; + readonly pathPlaceholder: string; + readonly Icon: typeof GitHubIcon; +}>; + +function publishProviderOption(provider: PublishProviderKind) { + return ( + PUBLISH_PROVIDER_OPTIONS.find((option) => option.value === provider) ?? + PUBLISH_PROVIDER_OPTIONS[0] + ); +} + +function isPublishProviderKind( + provider: SourceControlProviderKind, +): provider is PublishProviderKind { + return PUBLISH_PROVIDER_OPTIONS.some((option) => option.value === provider); +} + +function getPublishProviderReadiness(input: { + provider: PublishProviderKind; + sourceControlProviders: ReadonlyArray; +}): { readonly ready: boolean; readonly hint: string | null } { + const discovered = input.sourceControlProviders.find( + (provider) => provider.kind === input.provider, + ); + if (!discovered) { + return { + ready: false, + hint: "Provider status unavailable. Open Settings -> Source Control and rescan.", + }; + } + if (discovered.status !== "available") { + return { ready: false, hint: discovered.installHint }; + } + if (discovered.auth.status === "unauthenticated") { + return { + ready: false, + hint: + Option.getOrNull(discovered.auth.detail) ?? + `${discovered.label} is not authenticated. Open Settings -> Source Control for setup guidance.`, + }; + } + return { ready: true, hint: null }; +} + function formatElapsedDescription(startedAtMs: number | null): string | undefined { if (startedAtMs === null) { return undefined; @@ -125,22 +239,23 @@ function getMenuActionDisabledReason({ item, gitStatus, isBusy, - hasOriginRemote, + hasPrimaryRemote, }: { item: GitActionMenuItem; - gitStatus: GitStatusResult | null; + gitStatus: VcsStatusResult | null; isBusy: boolean; - hasOriginRemote: boolean; + hasPrimaryRemote: boolean; }): string | null { if (!item.disabled) return null; if (isBusy) return "Git action in progress."; if (!gitStatus) return "Git status is unavailable."; - const hasBranch = gitStatus.branch !== null; + const hasBranch = gitStatus.refName !== null; const hasChanges = gitStatus.hasWorkingTreeChanges; const hasOpenPr = gitStatus.pr?.state === "open"; const isAhead = gitStatus.aheadCount > 0; const isBehind = gitStatus.behindCount > 0; + const terminology = getSourceControlPresentation(gitStatus.sourceControlProvider).terminology; if (item.id === "commit") { if (!hasChanges) { @@ -151,7 +266,7 @@ function getMenuActionDisabledReason({ if (item.id === "push") { if (!hasBranch) { - return "Detached HEAD: checkout a branch before pushing."; + return "Detached HEAD: checkout a refName before pushing."; } if (hasChanges) { return "Commit or stash local changes before pushing."; @@ -159,7 +274,7 @@ function getMenuActionDisabledReason({ if (isBehind) { return "Branch is behind upstream. Pull/rebase before pushing."; } - if (!gitStatus.hasUpstream && !hasOriginRemote) { + if (!gitStatus.hasUpstream && !hasPrimaryRemote) { return 'Add an "origin" remote before pushing.'; } if (!isAhead) { @@ -169,51 +284,671 @@ function getMenuActionDisabledReason({ } if (hasOpenPr) { - return "View PR is currently unavailable."; + return `View ${terminology.singular} is currently unavailable.`; } if (!hasBranch) { - return "Detached HEAD: checkout a branch before creating a PR."; + return `Detached HEAD: checkout a refName before creating a ${terminology.singular}.`; } if (hasChanges) { - return "Commit local changes before creating a PR."; + return `Commit local changes before creating a ${terminology.singular}.`; } - if (!gitStatus.hasUpstream && !hasOriginRemote) { - return 'Add an "origin" remote before creating a PR.'; + if (!gitStatus.hasUpstream && !hasPrimaryRemote) { + return `Add an "origin" remote before creating a ${terminology.singular}.`; } if (!isAhead) { - return "No local commits to include in a PR."; + return `No local commits to include in a ${terminology.singular}.`; } if (isBehind) { - return "Branch is behind upstream. Pull/rebase before creating a PR."; + return `Branch is behind upstream. Pull/rebase before creating a ${terminology.singular}.`; } - return "Create PR is currently unavailable."; + return `Create ${terminology.singular} is currently unavailable.`; } const COMMIT_DIALOG_TITLE = "Commit changes"; const COMMIT_DIALOG_DESCRIPTION = "Review and confirm your commit. Leave the message blank to auto-generate one."; -function GitActionItemIcon({ icon }: { icon: GitActionIconName }) { +function GitActionItemIcon({ + icon, + SourceControlIcon, +}: { + icon: GitActionIconName; + SourceControlIcon: ReturnType["Icon"]; +}) { if (icon === "commit") return ; if (icon === "push") return ; - return ; + return ; } -function GitQuickActionIcon({ quickAction }: { quickAction: GitQuickAction }) { +function GitQuickActionIcon({ + quickAction, + SourceControlIcon, +}: { + quickAction: GitQuickAction; + SourceControlIcon: ReturnType["Icon"]; +}) { const iconClassName = "size-3.5"; - if (quickAction.kind === "open_pr") return ; + if (quickAction.kind === "open_pr") return ; + if (quickAction.kind === "open_publish") return ; if (quickAction.kind === "run_pull") return ; if (quickAction.kind === "run_action") { if (quickAction.action === "commit") return ; if (quickAction.action === "push" || quickAction.action === "commit_push") { return ; } - return ; + return ; } if (quickAction.label === "Commit") return ; return ; } +interface PublishRepositoryDialogProps { + readonly open: boolean; + readonly onOpenChange: (open: boolean) => void; + readonly environmentId: ScopedThreadRef["environmentId"] | null; + readonly gitCwd: string; +} + +function PublishRepositoryDialog(props: PublishRepositoryDialogProps) { + const queryClient = useQueryClient(); + const navigate = useNavigate(); + const sourceControlDiscovery = useSourceControlDiscovery(); + const [publishProvider, setPublishProvider] = useState("github"); + const [publishRepository, setPublishRepository] = useState(""); + const [publishVisibility, setPublishVisibility] = + useState("private"); + const [publishRemoteName, setPublishRemoteName] = useState("origin"); + const [publishProtocol, setPublishProtocol] = useState("ssh"); + const [publishWizardStep, setPublishWizardStep] = useState(0); + const [publishAdvancedOpen, setPublishAdvancedOpen] = useState(false); + const [publishError, setPublishError] = useState(null); + const [publishResult, setPublishResult] = useState( + null, + ); + const [hasUserEditedPublishRepository, setHasUserEditedPublishRepository] = useState(false); + const publishRepositoryMutation = useMutation( + sourceControlPublishRepositoryMutationOptions({ + environmentId: props.environmentId, + cwd: props.gitCwd, + queryClient, + }), + ); + const publishAccountByProvider = useMemo(() => { + const accounts: Record = { + github: null, + gitlab: null, + bitbucket: null, + "azure-devops": null, + }; + for (const provider of sourceControlDiscovery.data?.sourceControlProviders ?? []) { + if (isPublishProviderKind(provider.kind)) { + accounts[provider.kind] = Option.getOrNull(provider.auth.account); + } + } + return accounts; + }, [sourceControlDiscovery.data]); + const publishProviderReadiness = useMemo(() => { + const sourceControlProviders = sourceControlDiscovery.data?.sourceControlProviders ?? []; + return Object.fromEntries( + PUBLISH_PROVIDER_OPTIONS.map((option) => [ + option.value, + getPublishProviderReadiness({ + provider: option.value, + sourceControlProviders, + }), + ]), + ) as Record; + }, [sourceControlDiscovery.data]); + const hasReadyPublishProvider = useMemo( + () => PUBLISH_PROVIDER_OPTIONS.some((option) => publishProviderReadiness[option.value].ready), + [publishProviderReadiness], + ); + const sortedPublishProviderOptions = useMemo( + () => + PUBLISH_PROVIDER_OPTIONS.toSorted((left, right) => { + const leftReady = publishProviderReadiness[left.value].ready; + const rightReady = publishProviderReadiness[right.value].ready; + if (leftReady !== rightReady) { + return leftReady ? -1 : 1; + } + return left.label.localeCompare(right.label); + }), + [publishProviderReadiness], + ); + const selectedPublishProviderReadiness = publishProviderReadiness[publishProvider]; + const publishRepositoryPrefill = publishAccountByProvider[publishProvider] + ? `${publishAccountByProvider[publishProvider]}/` + : ""; + const currentPublishProvider = publishProviderOption(publishProvider); + const publishHost = currentPublishProvider.host; + const publishPathPlaceholder = currentPublishProvider.pathPlaceholder; + const publishProviderLabel = currentPublishProvider.label; + const publishWizardSteps = ["Provider", "Repository", "Summary"] as const; + const publishWizardStepSummaries = [ + publishProviderLabel, + publishResult?.repository.nameWithOwner ?? null, + null, + ] as const; + + useEffect(() => { + if (!props.open || hasUserEditedPublishRepository) { + return; + } + setPublishRepository(publishRepositoryPrefill); + }, [hasUserEditedPublishRepository, props.open, publishRepositoryPrefill]); + + const canSubmitPublishRepository = useMemo(() => { + if (!selectedPublishProviderReadiness.ready) return false; + if (publishRepositoryMutation.isPending) return false; + const repositoryParts = publishRepository.trim().split("/"); + const owner = repositoryParts[0]?.trim() ?? ""; + const rest = repositoryParts.slice(1); + const name = rest.join("/").trim(); + return owner.length > 0 && name.length > 0; + }, [publishRepository, publishRepositoryMutation.isPending, selectedPublishProviderReadiness]); + + useEffect(() => { + if (!props.open) { + return; + } + if (publishProviderReadiness[publishProvider].ready) { + return; + } + const firstReadyProvider = PUBLISH_PROVIDER_OPTIONS.find( + (option) => publishProviderReadiness[option.value].ready, + ); + if (firstReadyProvider) { + setPublishProvider(firstReadyProvider.value); + } + }, [props.open, publishProvider, publishProviderReadiness]); + + const submitPublishRepository = useCallback(() => { + if (!canSubmitPublishRepository) { + return; + } + + setPublishError(null); + + void publishRepositoryMutation + .mutateAsync({ + provider: publishProvider, + repository: publishRepository.trim(), + visibility: publishVisibility, + remoteName: publishRemoteName.trim() || "origin", + protocol: publishProtocol, + }) + .then((result) => { + flushSync(() => { + setPublishResult(result); + setPublishWizardStep(2); + }); + void refreshGitStatus({ environmentId: props.environmentId, cwd: props.gitCwd }).catch( + () => undefined, + ); + }) + .catch((err: unknown) => { + setPublishError(err instanceof Error ? err.message : "An error occurred."); + }); + }, [ + canSubmitPublishRepository, + props.environmentId, + props.gitCwd, + publishProtocol, + publishProvider, + publishRemoteName, + publishRepository, + publishRepositoryMutation, + publishVisibility, + ]); + + const resetState = useCallback(() => { + setPublishRemoteName("origin"); + setPublishRepository(""); + setHasUserEditedPublishRepository(false); + setPublishWizardStep(0); + setPublishAdvancedOpen(false); + setPublishError(null); + setPublishResult(null); + }, []); + + const handleOpenChange = useCallback( + (open: boolean) => { + props.onOpenChange(open); + if (!open) { + resetState(); + } + }, + [props, resetState], + ); + + const openSourceControlSettings = useCallback(() => { + handleOpenChange(false); + void navigate({ to: "/settings/source-control" }); + }, [handleOpenChange, navigate]); + + return ( + + +
+ + Publish repository + + Pick where to host it, then point us at a repo to push to. + +
+ {publishWizardSteps.map((label, index) => { + const isComplete = index < publishWizardStep; + const isClickable = + publishWizardStep !== 2 && + index < publishWizardSteps.length - 1 && + index <= publishWizardStep; + return ( + + ); + })} +
+
+ + + +
+ + Provider + + setPublishProvider(value as PublishProviderKind)} + aria-labelledby="publish-provider-cards-label" + className="grid grid-cols-2 gap-2.5" + > + {sortedPublishProviderOptions.map((option) => { + const readiness = publishProviderReadiness[option.value]; + const isSelected = publishProvider === option.value && readiness.ready; + if (!readiness.ready) { + return ( +
+ + + {option.label} + + + { + event.preventDefault(); + event.stopPropagation(); + openSourceControlSettings(); + }} + > + Setup Required + + } + /> + + {readiness.hint ?? + "Open Settings -> Source Control to configure this provider."} + + +
+ ); + } + + return ( + + + + {option.label} + + + ); + })} +
+
+ +
+
+ +
+ + + {publishHost}/ + + { + setPublishRepository(event.target.value); + setHasUserEditedPublishRepository(true); + }} + onKeyDown={(event) => { + if (event.key === "Enter") { + event.preventDefault(); + submitPublishRepository(); + } + }} + placeholder={publishPathPlaceholder} + disabled={publishRepositoryMutation.isPending} + className="w-full bg-transparent px-3 py-2 font-mono text-sm placeholder:text-muted-foreground/60 focus:outline-none" + /> +
+
+ +
+ + Visibility + + + setPublishVisibility(value as SourceControlRepositoryVisibility) + } + aria-labelledby="publish-visibility-cards-label" + disabled={publishRepositoryMutation.isPending} + className="grid grid-cols-2 gap-2.5" + > + {[ + { + value: "private" as const, + label: "Private", + description: "Only invited people", + Icon: LockIcon, + }, + { + value: "public" as const, + label: "Public", + description: "Anyone on the web", + Icon: GlobeIcon, + }, + ].map((option) => { + const isSelected = publishVisibility === option.value; + return ( + + + + + {option.label} + + + {option.description} + + + + ); + })} + +
+ +
+ + {publishAdvancedOpen ? ( +
+ +
+ + Protocol + + + setPublishProtocol(value as SourceControlCloneProtocol) + } + aria-labelledby="publish-protocol-label" + disabled={publishRepositoryMutation.isPending} + className="grid grid-cols-2 gap-2" + > + {(["ssh", "https"] as const).map((value) => { + const isSelected = publishProtocol === value; + return ( + + {value === "ssh" ? "SSH" : "HTTPS"} + + ); + })} + +
+
+ ) : null} +
+ + {publishRepositoryMutation.isPending ? ( +
+ + Publishing repository to {publishProviderLabel}... +
+ ) : null} + {publishError && !publishRepositoryMutation.isPending ? ( +
+

Publish failed

+

{publishError}

+
+ ) : null} +
+ +
+ {publishResult ? ( + <> +
+ + + +

+ {publishResult.status === "pushed" + ? "Repository published" + : "Repository created"} +

+

+ {publishResult.status === "pushed" + ? `${publishResult.branch} is now live on ${publishProviderLabel}.` + : `Remote "${publishResult.remoteName}" is set up. Make a commit and push it to share your code.`} +

+
+
+ + + {publishResult.repository.nameWithOwner} + +
+ + + ) : ( +
+ Publish result unavailable. +
+ )} +
+
+
+ + + {publishWizardStep === 2 ? ( + + ) : ( + <> + + {publishWizardStep < 1 ? ( + + ) : ( + + )} + + )} + +
+
+
+ ); +} + export default function GitActionsControl({ gitCwd, activeThreadRef, @@ -246,15 +981,9 @@ export default function GitActionsControl({ const [dialogCommitMessage, setDialogCommitMessage] = useState(""); const [excludedFiles, setExcludedFiles] = useState>(new Set()); const [isEditingFiles, setIsEditingFiles] = useState(false); + const [isPublishDialogOpen, setIsPublishDialogOpen] = useState(false); const [pendingDefaultBranchAction, setPendingDefaultBranchAction] = useState(null); - const gitProvider = provider ?? activeServerThread?.modelSelection.provider ?? "codex"; - const gitModel = model ?? activeServerThread?.modelSelection.model ?? ""; - const gitTextGenerationModel = resolveGitTextGenerationModelSelection( - gitProvider, - settings, - gitModel, - ); const activeGitActionProgressRef = useRef(null); let runGitActionWithToast: (input: RunGitActionWithToastInput) => Promise; @@ -336,9 +1065,15 @@ export default function GitActionsControl({ environmentId: activeEnvironmentId, cwd: gitCwd, }); + const sourceControlPresentation = useMemo( + () => getSourceControlPresentation(gitStatus?.sourceControlProvider), + [gitStatus?.sourceControlProvider], + ); + const changeRequestTerminology = sourceControlPresentation.terminology; + const SourceControlIcon = sourceControlPresentation.Icon; // Default to true while loading so we don't flash init controls. const isRepo = gitStatus?.isRepo ?? true; - const hasOriginRemote = gitStatus?.hasOriginRemote ?? false; + const hasPrimaryRemote = gitStatus?.hasPrimaryRemote ?? false; const gitStatusForActions = gitStatus; const allFiles = gitStatusForActions?.workingTree.files ?? []; @@ -367,7 +1102,11 @@ export default function GitActionsControl({ }) > 0; const isPullRunning = useIsMutating({ mutationKey: gitMutationKeys.pull(activeEnvironmentId, gitCwd) }) > 0; - const isGitActionRunning = isRunStackedActionRunning || isPullRunning; + const isPublishRunning = + useIsMutating({ + mutationKey: gitMutationKeys.publishRepository(activeEnvironmentId, gitCwd), + }) > 0; + const isGitActionRunning = isRunStackedActionRunning || isPullRunning || isPublishRunning; const isSelectingWorktreeBase = !activeServerThread && activeDraftThread?.envMode === "worktree" && @@ -396,18 +1135,18 @@ export default function GitActionsControl({ persistThreadBranchSync, ]); - const isDefaultBranch = useMemo(() => { - return gitStatusForActions?.isDefaultBranch ?? false; - }, [gitStatusForActions?.isDefaultBranch]); + const isDefaultRef = useMemo(() => { + return gitStatusForActions?.isDefaultRef ?? false; + }, [gitStatusForActions?.isDefaultRef]); const gitActionMenuItems = useMemo( - () => buildMenuItems(gitStatusForActions, isGitActionRunning, hasOriginRemote), - [gitStatusForActions, hasOriginRemote, isGitActionRunning], + () => buildMenuItems(gitStatusForActions, isGitActionRunning, hasPrimaryRemote), + [gitStatusForActions, hasPrimaryRemote, isGitActionRunning], ); const quickAction = useMemo( () => - resolveQuickAction(gitStatusForActions, isGitActionRunning, isDefaultBranch, hasOriginRemote), - [gitStatusForActions, hasOriginRemote, isDefaultBranch, isGitActionRunning], + resolveQuickAction(gitStatusForActions, isGitActionRunning, isDefaultRef, hasPrimaryRemote), + [gitStatusForActions, hasPrimaryRemote, isDefaultRef, isGitActionRunning], ); const quickActionDisabledReason = quickAction.disabled ? (quickAction.hint ?? "This action is currently unavailable.") @@ -417,6 +1156,7 @@ export default function GitActionsControl({ action: pendingDefaultBranchAction.action, branchName: pendingDefaultBranchAction.branchName, includesCommit: pendingDefaultBranchAction.includesCommit, + terminology: changeRequestTerminology, }) : null; @@ -482,7 +1222,7 @@ export default function GitActionsControl({ if (!prUrl) { toastManager.add({ type: "error", - title: "No open PR found.", + title: "No open pull request found.", data: threadToastData, }); return; @@ -491,7 +1231,7 @@ export default function GitActionsControl({ toastManager.add( stackedThreadToast({ type: "error", - title: "Unable to open PR link", + title: "Unable to open pull request link", description: err instanceof Error ? err.message : "An error occurred.", ...(threadToastData !== undefined ? { data: threadToastData } : {}), }), @@ -511,8 +1251,8 @@ export default function GitActionsControl({ filePaths, }: RunGitActionWithToastInput) => { const actionStatus = statusOverride ?? gitStatusForActions; - const actionBranch = actionStatus?.branch ?? null; - const actionIsDefaultBranch = featureBranch ? false : isDefaultBranch; + const actionBranch = actionStatus?.refName ?? null; + const actionIsDefaultBranch = featureBranch ? false : isDefaultRef; const actionCanCommit = action === "commit" || action === "commit_push" || action === "commit_push_pr"; const includesCommit = @@ -548,6 +1288,7 @@ export default function GitActionsControl({ hasCustomCommitMessage: !!commitMessage?.trim(), hasWorkingTreeChanges: !!actionStatus?.hasWorkingTreeChanges, featureBranch, + terminology: changeRequestTerminology, shouldPushBeforePr: action === "create_pr" && (!actionStatus?.hasUpstream || (actionStatus?.aheadCount ?? 0) > 0), @@ -778,6 +1519,10 @@ export default function GitActionsControl({ void openExistingPr(); return; } + if (quickAction.kind === "open_publish") { + setIsPublishDialogOpen(true); + return; + } if (quickAction.kind === "run_pull") { const promise = pullMutation.mutateAsync(); void toastManager.promise< @@ -789,8 +1534,8 @@ export default function GitActionsControl({ title: result.status === "pulled" ? "Pulled" : "Already up to date", description: result.status === "pulled" - ? `Updated ${result.branch} from ${result.upstreamBranch ?? "upstream"}` - : `${result.branch} is already synchronized.`, + ? `Updated ${result.refName} from ${result.upstreamRef ?? "upstream"}` + : `${result.refName} is already synchronized.`, data: threadToastData, }), error: (err) => ({ @@ -875,6 +1620,8 @@ export default function GitActionsControl({ [gitCwd, threadToastData], ); + const canPublishRepository = isRepo && gitStatusForActions !== null && !hasPrimaryRemote; + if (!gitCwd) return null; return ( @@ -903,7 +1650,10 @@ export default function GitActionsControl({ /> } > - + {quickAction.label} @@ -919,7 +1669,7 @@ export default function GitActionsControl({ disabled={isGitActionRunning || quickAction.disabled} onClick={runQuickAction} > - + {quickAction.label} @@ -948,7 +1698,7 @@ export default function GitActionsControl({ item, gitStatus: gitStatusForActions, isBusy: isGitActionRunning, - hasOriginRemote, + hasPrimaryRemote, }); if (item.disabled && disabledReason) { return ( @@ -959,7 +1709,10 @@ export default function GitActionsControl({ render={} > - + {item.label} @@ -978,18 +1731,30 @@ export default function GitActionsControl({ openDialogForMenuItem(item); }} > - + {item.label} ); })} - {gitStatusForActions?.branch === null && ( + {canPublishRepository ? ( + { + setIsPublishDialogOpen(true); + }} + > + + Publish repository... + + ) : null} + {gitStatusForActions?.refName === null && (

- Detached HEAD: create and checkout a branch to enable push and PR actions. + Detached HEAD: create and checkout a refName to enable push and pull request + actions.

)} {gitStatusForActions && - gitStatusForActions.branch !== null && + gitStatusForActions.refName !== null && !gitStatusForActions.hasWorkingTreeChanges && gitStatusForActions.behindCount > 0 && gitStatusForActions.aheadCount === 0 && ( @@ -1027,10 +1792,12 @@ export default function GitActionsControl({ Branch - {gitStatusForActions?.branch ?? "(detached HEAD)"} + {gitStatusForActions?.refName ?? "(detached HEAD)"} - {isDefaultBranch && ( - Warning: default branch + {isDefaultRef && ( + + Warning: default refName + )}
@@ -1166,7 +1933,7 @@ export default function GitActionsControl({ disabled={noneSelected} onClick={runDialogActionOnNewBranch} > - Commit on new branch + Commit on new refName - - diff --git a/apps/web/src/components/Icons.tsx b/apps/web/src/components/Icons.tsx index 62a7043d855..cd583f0c2aa 100644 --- a/apps/web/src/components/Icons.tsx +++ b/apps/web/src/components/Icons.tsx @@ -19,6 +19,180 @@ export const GitHubIcon: Icon = (props) => ( ); +export const GitIcon: Icon = (props) => ( + + + +); + +export const JujutsuIcon: Icon = (props) => { + const groupId = `${useId().replaceAll(":", "")}-jj-a`; + + return ( + + + + + + + + + + + + + + + + + ); +}; + +export const GitLabIcon: Icon = (props) => ( + + + + + + +); + +export const AzureDevOpsIcon: Icon = (props) => { + const id = useId().replaceAll(":", ""); + const gradientA = `${id}-azure-a`; + const gradientB = `${id}-azure-b`; + const gradientC = `${id}-azure-c`; + + return ( + + + + + + + + + + + + + + + + + + + + + + + + ); +}; + +export const BitbucketIcon: Icon = (props) => { + const id = useId().replaceAll(":", ""); + const gradientId = `${id}-bitbucket-a`; + + return ( + + + + + + + + + + ); +}; + export const CursorIcon: Icon = ({ className, monochrome: _monochrome, ...props }) => ( ( ); +// Editor / IDE icons — restored from the fork for the editor-picker UI. +// Kept as simple `currentColor`-based marks so they can be tinted via CSS. +export const WindsurfIcon: Icon = (props) => ( + + + + +); + +export const PositronIcon: Icon = (props) => ( + + + + + +); + +export const SublimeTextIcon: Icon = (props) => ( + + + +); + +export const WebStormIcon: Icon = (props) => ( + + + +); + +// Simple fork-style IntelliJ mark — distinct from the multi-color +// `IntelliJIdeaIcon` defined later in this file. +export const IntelliJIcon: Icon = (props) => ( + + + +); + +export const FleetIcon: Icon = (props) => ( + + + +); + +export const GhosttyIcon: Icon = (props) => ( + + + +); + export const VisualStudioCode: Icon = (props) => { const id = useId(); const maskId = `${id}-vscode-a`; @@ -299,54 +525,6 @@ export const Zed: Icon = (props) => { ); }; -export const WindsurfIcon: Icon = (props) => ( - - - - -); - -export const PositronIcon: Icon = (props) => ( - - - - - -); - -export const SublimeTextIcon: Icon = (props) => ( - - - -); - -export const WebStormIcon: Icon = (props) => ( - - - -); - -export const IntelliJIcon: Icon = (props) => ( - - - -); - -export const FleetIcon: Icon = (props) => ( - - - -); - -export const GhosttyIcon: Icon = (props) => ( - - - -); - export const OpenAI: Icon = ({ className, ...props }) => ( { ); }; -export const AmpIcon: Icon = ({ monochrome, ...props }) => ( - - - - - - -); - const ANTIGRAVITY_ICON_DATA_URL = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAYAAADDPmHLAAAAAXNSR0IArs4c6QAAAERlWElmTU0AKgAAAAgAAYdpAAQAAAABAAAAGgAAAAAAA6ABAAMAAAABAAEAAKACAAQAAAABAAAAgKADAAQAAAABAAAAgAAAAABIjgR3AAAjOElEQVR4Ae1dCYxkV3W9tfdW3T0z3bMvPcxge2zGy2BjY0MwxEAWEBACihSCEiMUSFCiRJEsRygIiIKySUlYAkmMQSxJQAKi4AQngDcMAe87nsF4xuPxrJ6ll+rqri3n3Pfv71e/f1XP4u7+Zdeb+XXvu+/9999/57z7lv+rOiVnF1ILnNYuvV3aAsW+JJIbbe6yXRpPWyh9XtFnCsZC+ePS42ysSCv7vEq2MbwQZbQp/rSTzrjhY0puVUacPc7mF7lQepj3TBqwVd44e9QWjbMCcbawYqeR7uftBH0hUOLSo7ZonPcdZ2tnb2qrhUBg5nZ5/DRfj57XLs0qFM1j9herjAMuavPjvs428eO+Hm2vdmltwWVBrUDx7S+U7lfcL9O3d7reCgzf/kLpflv5Zfr2lgAzUxwIUZvFo9I/v12an4+6BTvH4i8WGQeEb4vTzWaSbWF6VFo7md3i/jm+LRZkZogDwLeZfq4y7lpWZlNFX0SRKDh+3PRzlWwuK8Nvunm2uMZeyGbpvjwdnRXx8/nxqM44g+V3sc79nNfwuBXfZno76ae10tlClhbVGWfw0yXrbOFnXIObrZ1kmp++UJwX9PNbBcxmcZOt7JaeVNnU2F4lfbvpcZI2Hrx/01mMxam3Cv55fp6mc/2G9XU7wWxxkjbfbvGoZFmp3t6+wtDwim25XO6KdDp9JUyXpFKpMaT143gph6lGo7EX+D5Ur9d/XKlU7jl18sRT09OlGTQKQWQw8ONkNN3icZI2C1q2D6AlmIym+XHqCx4AOr127YZduXz+9wD2r1nBXblwC4AU36jMzn7m0KED94MYdZwRB34rGy+gAMdIplloGIhmMOmDTZufz3Rfpr08KQK/bv2mX0Rv/wLsL/UejiY4pzAFr/DbB5/b/70YIrQiBi9o5DDdl9Q1GIgWp6SNwZdRnfEm0C2+es26rXD3X0KPv4iFdMML0wLwCI9hWPitI4cPPo0SCW4U/GicF16QBATSwOUJpvsyqvvAm65y46axd2ez2c+yoG5YnBaoVqsfeHb/3q+hdJ8EPvi+zkq0JUEGGXyAeQIDbb7d4gY449T1gMfPbNq89eOZTOajsHXDIrYA2votg0PDxYmJU3fAK/BKhpNdNRpvmyeOACzACjGd0sAPgadNwd809knI63mlblj8FsDwesXg4NAmkOA7AQnsoj5uZjNpaRZXaQSwRF9S949YAqDnf6wLflObLkkEJNhZHBwaOHXqxO1neEEfY4kjgA+66bHgb9y05V2ZTPajZ1iBbvYXqAXoCQaKxafHx089EVOkAR2TNGdiJoLLYGCbbqBbHsbDY3T1mq39/cV7mLkblrcFpqYmrjh65PDTqAUngNHDJoE2OWRlzdYEPhMYokSweCix0M/09Q180WXvfi53CxALYuJhF2IV2FhFs1FnYDwkgBksky/NE4S9f926Da+H+7mQJ3XD8rcAsSAmqEmIEXRiaNj5eJquFbcMGvE+LJNJy4drpTP5fOEmL29XTUALEBNig6rMw8yzRWuaIrAMdpLJOBvzptesWbcLsru9yxZKVugPsFGcUDXD0iRra7rJpiHAbscSfWkeIJ0vFH7XMnZlslogwCbECrXzMTS9qdLM7AdmYrDMviT4Baz53+aydD+T1gLEhhihXkYCHz8f27Dq0YxMiJ4UxoeGVmwLz+wqiWyBAKMQsxg8We8wPc4DMJHBMoUkwUTjlS6p+5nUFggwmoddUF+zh9X3CWDAM9F0O4H50njYc0V4Zgcq3P2o46OGLZEaJHWN0xboHXhbTVUOMFK8kGD4MY9h2qTzncBoJovPk1gD7uTZnRb40IxAF3tExlaKbMexoS8txXRKGpW0jJdSsn+iIXvG6/JMqS4lsAFJTS3WKfccYDQPO9Tft6E13O35L4UygwU/c6ij8M2WoVMkX6ZaMSDyhleIvGmHyAUrUjKMOVJ2Ni2pckZSM+gskNXptJycEnnkeF1uOTQrdxyflcmqI0Kn3CvrGWAUYkaTdzALA20kQexbwUxk8E80vePW/688PyXXvzEluzaK5Kvo8eWUVMtpqYEEqfBISQpdfmUhLW8Yyco1xYLcdawin3m2JI+XKjqldk3SEZ/EyPDyJStvcQWfBvMATPBDXDxq8/MnSufdZdCx33x1Rt73K2lZ3ScAPYVx3kGOHTO4eOjcB1Md8XRa6rDVcGQyKbluVY9sL+Tlr56ZkNtOlbXlEnWT7StjQPu5ovgx3rA5ADPaSZbRj5vuF5hYneP3L12bk+vfnpMBbI7yBWv2d2AbAk/w08ioZIC0eAaZ+KpsA3JLb1o+snlI0s+IfK+zSGB4+TKKsc4DzANEwfRPpM5g0sUS+snJ3tVX5OXd7+iVNLZESrMOfPR1RwLchjp/EIBgZxR82OAB6DUaOEgMOAElwmg+LTduGJaT1RNy7+QMiJLQG2+ultWS0j+acyHGdmGwE6K6xa0QxhMbOOHbsiUr7/z1ouT681Kq56XcwCF5mZaCHmXIciovM6kcDidn03mppLNSSWWlClnFM5UqCFHDQbmukJM/Xjck6/IZfdie2AaYq5jh5ePKVD+uuu8B7CTLGI3PFZ9AjUu93t6U/Orbh2TFuh51++zCKdjTerDnu7Gfj8zoBbLqATAR0h7fgMRyEUcWJzVwkhsKnH7JQI/8zuig/PXBE7pnkMAmiFbJwDYco3G0ytwkMHqyH/dP9O2J0kmAS68akPMuK2IdD/et4IMAwT+6OhIACz/n+nFXdPMkQY6gA/A6QeeRwYGxRHXEuYmQAineivXknRNluWt8Ws9NVAM0V+a0MfMngVaEncy46SYtT6IkwR9elZVXv2mF1LJ5mZ1FdclvDOipOiiAQ8d93A5JwJsm+DkcJEAN4JIAuYAA9XQdZKirjSQQ6CREEUz5zZEheWhqRiYx3iS6UeKxi1YZg158YMZ5meOzJsN68dVDsnLzgExV09r76QGUAPQECrsjgXoAWIwAJEEVBCD4tQB46iRAo+kAERp1uXygD/sEffLfJyeT7AXisIvaFLgoAeIyxdmSgTpqwd4/uDInF75mlcxislcB4A0M5O4g3I4AKXiDNNIyONjrs3ARFdwZ3T8m+iBBzREgVQ+J0EjXQhI4L1CTApjzjlVDcvdEqZO8gI9XE55GABqbEvwzFkiLZF3aKL8Ysf2VwzK0gWM/Zu4NzNQBdkMPTPcgObNLKwE4B4D7BwlyJAF6fh5EqOHOqwC7FoBPT0Dw695Bb5DDikAaNXlF/4Dsgie4/dQEvEC7ZlvatohcrV3FmMZDN4Ii5zVF2xXSlHFZIuj9Pf1Z2X7ViHApNwv3X6uDADgaPAISsPenQAybB2SDeUAu8AIVEKFgHgBA16HX01UlgPMC9AQ8qlpmIZuRN64Ylh9OTIFwqESyQ1sMzQP4t9DqhFZ2/9wl1eto/LUvL8qKrcMyXc1JFSBXSQDwug5dSYA4pvUggPMCpAEHhixw483ncQBqDAHuqCkRHAEIOL2AAg8de8QoCxJe4NLikGztOSa7S9O6u7ikN77wxVphNc8eRwArfl5mS0iKTGM83nL5qNTzPTJbceBX61l4ARAARyMgAR2d8wIAHzYlAO4uhxvB86E58NH7awC6RtefIvjuUCJkKiiPcQ4vVRnExOHVg8NKgKS0R5t6tMSyFQH8E3y9zTWWNoljf3GkV1ZfOCLlWk4q6OnVOrxAQIAavQBJQA8AmYJMYS5AAnA+QA9AAjgSzPV+Hfc98EmCRprgo6kCEmD6CB9Sk1cNr5JvHjsqE7Va2wnU0rZM09V87Hw9zNSKAGGGxCrY9l19/grJrxqUchXbtyRBDRIkUA+AeA06vQAJICBAWsGn5CogpTByHoB+7SaAAJXjP72AAx5SQac3qbihBXEdXuAFNvUPy/a+otw3fiLJk8G2EHYsAdJYv63ZuQbuG+4f4FYANg/1AAC/Dt0Ogu/mAZj+2VwAm0M5EAH9G7DzIAkcAZzrB9AZEADAqwcg8KCMDiuYC9AL9KQysmt4RB6YONm2kZOc2JEEoPvvH+mToW2j6P05EAAPcvDgh16ARw06wVcPwGGAc4DAC3A1kCEJ1Im7/QDuBHIJqO4fE7xGPfAA9TnwG41ZLaeRQXnYccA+IbaHq3Lh0GoZPLhPxqscFjovdCQBgICs3DYimaEhmali8wegV2oggHqAvAJfBwlqsDd4EHydCHIe4OYA3AvgW0EcAgA9/mHvn74AXkAnfXDxbuxnr8ehwNMLgAiYRmLzGEdFRvtXyZb+IXn45FEQovMo0JEESOPB/Yod65z7x7iv4JMAetALYHsHh/MAJACmeuYBgk0h0EBXA5hK6BBQhwdo0AtwAqhDQUAAHSQIugOcPZ86iVDHTnoPHh/vGF6rBOi8/u+WwqdT7+RQG+4/P9wrA2NrZAYgzwJc5/4dAWoEHkedB7wAwacH4KErAbh/Lgk5kbNXwV3vhydg71ciYK2PiaA0uEagB8D50On6HREQB/iN2izeKMvJthUbpe/ZJ2W6lqhh4LQw6zgPAPxlYNMqyQyvxNqf7p9HQcf9qrp99nwHfl3dvyOAbnoCdM4BHAG4JMRzg+Af0MbmKPyBEgDPk7n8094PoINe38Ckj+6fPb8Bz6MS84SR4lq8dzgke8c7bxjoOAJwmB3cvh6Pffvx2JfLP/Z8EMCkjf1KAuf+dQ4AwNxKwBEAT/gBJjZ19B+SMBFs4GkfPYBgI0gwERQMBw24eDpK3QeAVD0FTxCSYFZ6erOyZXg9CHAE6afV8ZAvGaGzCIBOmuktSN/YRpnVnu+7/YKSga5fx35IrgTU/Sv4uFVMAFOcDBJ44FxXD6B9H2gEHoAJGOEFAOswAC/gRkrECToPkILunzo3m1KYMI6t2ip3P/soyuT5nRM6igBc/hXwQkZ2dI1UAvdfC3q/Ss/1O/CdB1D3H+wFcB3PIYAeAM/22PUDH8AIgceBLWEgizj2D9QDkARw/xwCeKAsmwPUmY5VwujKLfjmUVFOlk521GqgowjAXtq3Zb00eoYC188JXwETPjcHcJM/BzoJIOoBnBeg+0fXxUECuLcECD5dNocBGKHbEZBAz5kDX8tQz4Bmg1dREtTx2jG8Ss/AWlkztE5OlE6gxM4ZBjqKACnswPWMjeGpXw8mftjoUXdfQI80InD3j0tAgMNZu5KAOm5TPQAJwN7vXg6xl7roCbgSwBpANZIhRW+gPoJeACRQ4kDHqoC67gVwdQCPwKePPXgcvX5kuzx5MO4X23BaQkPnEADuPzuI173Xb5JKFT0eO4AhAXS8tzEfJODyLRj/2Tv1gZD2ZrcEZK/n+M9nAoSZcbAEn84D0Ko6wOUugQ4WWCJiqxD8oSfB0pBDATeLVLIZa7J69HzJ5/4X9eNP/XdG6BgCcPzPr10jqcFRqVYIPlw/x3yAz00ft9530sDXnq/AEzQCBgAD9083TcgZ1BNwCMA1aKpxIse5AIcMfIGE3x5SPXgjSL0Iy9U5AsrlnAAEGFgxJkMDq+XoiX3uHC092R8dQwBusxY2b4UnL0oNBAg3ehT8ZgLosi90+wSeBAhAJPAggev1BMeRwPV9bguTBG5QcJ4AVGH+gAicQGpZBF9JBRl4g1zfiIyMbJMjJ/YGpSYbfNaucwiQz0tu03Zd83Pp5cZ6fPdLXb0RwI374SPgoNcTKAcagAzBdw4f6ClKJEA9mAhS0CvUAiI4b0ALwFdvwIkk5gWBTk/AnUXBN41G1u6UzFN3gGQsMfmhMwgAl5xZsUrSqzbq2K87fOj5c+AHOsd7r+eHwIe91iNApI86uBwpnBfAAMF5ATwPHxo5SnDGQMJwiKAksbhjyIkldNiHR3dIT8+wlErH9VwYEx06hAB4c2f9mDR6V8H1Y4IXuH28C4bxNxj3FXjcDryDAR91/WHvByQc/wmhAW8a4/yauObQ1QBzOiJU9Qyk6fyAebhzSHJw/QAigKj5oY0yiLlAqXQMdpIi2aEzCIDlX3bzBWjuXl3iEXg73ESPM/1m8I0EoetX8AIXDj3AWK0+RJwFMPBbxkqRYFhwBKFl7p8NDcxJLyBYKeQL+ILKuovl0IH7/GITqyefAHDD6f5Byazd7nq/ru2dy2dv53rfPeq1nu9m+44A7KXsocFRLUl98oDUTu2TRukwVnPTSMPInu/HNdbg/YIxyQ6sx44vfkwIkOpQoDmcR1CvwDjZE8wPOECwDJKBqwyqw+suk2zu3zFcufKRIbGhIwiQWb0Zyz/sANL9KwFMBqCHbt8Dn0go8LDBHVf2fVdmf/4/Ujv+pNTLeIULT/GAmAOGYGZyku5ZKbnRi6Sw7ZelZ/PrJYPezMlcSAQU6eYDPM8NC+ol1GtwOODcoCE9q14ufYMbZPz5PYmfBySfAGj0zKYLsZ8ziNZHdZUABD44FHwAaBM9zsYJPiX28yv7bpXyg/8s1aOP4XyATrDV/yOPSscBptUnD8nMxHMy88ydUl53uQzs+qAUNrwaJLDJIUvGeQq+I4FG+WFlYh6Q7RuVIawGxo/t5gmJDmiNJAf0skK/ZDbu1Mne3AOe6JgPsJuWeojD/ZYf+EeZuu1GqR5+EOnondzF80GP3jrTmAePgmf33y0nv/tHMvXYVzAf4I4fZ/puZ0B/Swg6vUENE70q0qqQFayq9UcmMgUZ2vgqPEfCUJXwkGwPwPEfS78UHrU2agSGHsD1fL7dwzGXL3gQfKeTz4gD/Ol7/l7Kj3wJ+TFG65buGSIBItQxdEz86C8xV5iS4sXvAznYXJwkslvz0JkiJK8bvFkQEKxv7SVSwAOi6VP7wbnk9rPk1gxNypDefLFIYQU6MIAF+G6TB7oCTzL44ON28FZv+aGbHPjQ2/Z4d4nWn+z1mChO3vtpmXry6wq9rvkxJJAGulkEIlTpCZCXB3V+QzmDyeQAhgFUuHX5CUhJMAHQcPk+yWzehTZ0j3RtyacPeJQAbm8/7JEAYPZnt4AAn3c9X3vpObYyejQ9wAQ8ysyBu7FH4IAPhwNcg4+L+BN0KnVYABkyvTK4+Ro4H3qN5IbkEkDd/2ZJj5yHyRt7NhpSvQB7P6vtHbr8wncCj/1Upu/9lDRm8ZOf7cb6M8UDxKpPHZHxH/+tVDFJVBKAn+YF2Mf5+4LcKCIJKPkWQR8mknkMAzr/ONNrLlH+5BIADZDZgh8nh/t3z/Kdu3eun8MBScGDYzE8AUAv3/8ZqZ/aC/AX4bYwj6gcfkgmH/oXLC648xesDIC+9v7QA5hHwIOWwU0ysOEKeDBSJJlhEVrqhbhRNC9er0pvuRKF5TDRI/gEO+j90PXBTAA+CTD71C0yu/f7AB95Fi2kpPTkN2Vm/52BFwAJQk8ATpIEJARqp6+cYzVQ3Pp6zB35NxySGZJJAP4A05oLAvePHk7A6f6VBKwyx35K9n7M1sf3yczDX0DL4/WsxQycD8yckknsK9SmTzQNAdw65mHg61AAQy+GgcKKrag7B4zkhWQSAMut7PbXSipXDJd5oevn2I+ephsySoiazGCtXju+Z3FcfxQzeJjZg/fK9O5vcBHYTALGSYTAA/DXQ9J9a/AllmujpSQmnjwCoKek8I59evNV6PUEmwfdOt0+e3xw0P0DjNrh+2V2938E9iVqV8wBph77qlRPPqUk4DAQzglQBSOBeYSBl70Jr7PjjxQkcC6QPAKgATNbr5F0cQMaDJADfL6Fo2/lkASh7pZnZbj+xvTzIAOJsUQBk8zayb0gwZd1QqirAfZ8EkE9gEcCGHOrdkjfxiuRlrxhIFkEQOul+lZK9rzr0M85+QP4dPnB4UgQeAD0/uq+70kF+/aLMutfkEspKe/+T6kcuiccCugFQjLgfJ0QkhScDJ73Njx1xO/Wa+4FC1+yDMkiAJovs/VqyYxcoI/abdLn3uNnVekRnOtvTOHBzSNf0G1f2pc8wOPUy8dlCruOdSxBreeHQ4F5AlSMP2ZVWH8lJoTc1EqWF0gOAdj7e1dIbsdb0aHxPN5cv3kAAs+DYCPvzE+/LtUjjyK6mMu+BWjFCeH+H0j557do1ZQEBB71jHoCKQxKcce7sCTkuwbJCckhAHv/y16Hv02L/XO0Hl2/ewM36PVoVP7Tid/Rh2X28X8DEZa/N/E7gqWHb5bqqWcc6MAWHABH5zaKlBioas+ma6VnA/Y2+IwiISEZBGDvH1gjuZ3vxFyuJ5josWoBCSgJPr3B7ITMPPhP2Jo9BDIkoPqoQ/X53VJ65PPgI18QDQgAqZ6AHkF1PC0sFKW48734QxZ4tyEhc4EEtCDaAuNp7qK3Y+zfAfAR1aUfQWf1Ag9AAiDf7J5vyuy+26Avo+tHrZoC6lXGDuEs3jriKOWWgUYEDAdGAjChsOEa6cMbRzppaCpkeSKnSwDcwiIFuMMMXqLMXYTejy3fOfDRkmxN7fkEH2v+Iw/j7Z6bML3Gmz2JCnD3s5Mydf+n8b7hM24+gPqx0bT34x7c/ABeAN8hHLjkfZJd/N3B08LsdAmwOM1N148NkvwV79cdM53hc52vPd/r/XCzXOuX7/07qU88CzIsb7VjG4NDwdHHpXT/J/FC0XQwD3A5jQgqcc+Z4e1SvOyD2OnsRYbTwin2ki+EcRlbEjeOt27yl71Hshuvcl3FwNchwFw/qogeX37wc1LBa1qJcv1RBDgU7Pm2lB//ivZ4QsueT8n5gHoB1TEh3P5W6cOqYLnD8hEArZK74C2Sf8VvoMc71689X0lg4HMIwLzvp1/DrP9fobEpkxxQX5C1dP9nZWbvrXNDgZEgGAr08TCHgl2/L4XNr8NtcaBYnrA8BMANZ7HkK1z5IX3gw2erDnxKNKKN/Rj3K09/By95/EP4Dv/yNNMZXBVeoFE+IaUffUIqB38yRwIUQfpyo4gPi7g5lOoZkcGrP4xvPe9CwvIsDZeeAAR/6y9Iz2tv1HGfs6S5no/qeOBX998h0z/8BMb/48kc91vxAvOB2vh+mbzrz/ASyQO8QQ2OAEYEDgt4Wji0VYq/8DHJrca7j8vgCVoRgHW14OtmOzuJG8ysu0R6XnODpAfwsIfg63hv63y2FA5OqPZ9X0p3fQTv6h/U+NldcBnPIgmO75bJO/4Uzwvui/UEOifAR2blDim+9qOYHI690CTwsfP1sGFaEYAZYk8IzzxThS4PD3p6rvwDSQ+O6V6/gm+bPNrzWR38AOueb0npzg/jRY+EzvhP9965dMU3kSZvvwEPre7Qs6xRKTkc6EESjF4ifZe8H49CF+W7BHbZeTWPI0CrzK3s8wqNN+Bv7oxdK9m1l4cPetyIjx5P8PnjS3ioMvPA56T0g4/rS5iJXO7F31xrK0lw8mklwcwTX8VQj5+XQW72fpO2Y5gfuw77A9uQcM6TwlZYzbOj1duGeSe0zd0uES4xg+/dSQYPQ/QPLBjwcP/46RW+0VO+71P4/t6tGBr4Pn8cN9tdIMFpuJd66ahM/fAvpAqP0HvpBzAErlMS6JePrer5QcyLRkGAx3H/Zjxn2RZDIwAztctoaSXk40PtMw+gfO3oE9rLU1m86sU7xMy3Pn0MW6jfl5mHbpbaiacc8JhJv+gCSNColaX86FewYfSI9Fx8PX7z4BrsDQ+D6xgKsF9cPXQvCMLvE54T+YkRg2HmYs2fId5GAEuOOym0Yf26H1+NPt8yn5HETVV2fxvf1D2OYeAy3CR+5mViP276AQD/M7e9e243fkbVWZ7MIDb+V/l6+W03SHblyyWDt4VS+SJWOvgG83M/xtB3GHnOvgMQI+/eQuxa2aIEsHwhQ8wA2ajX649mMpmzIwALwKPTyl68xcPXtxk4EPJmCfyLHnx3y/rJe0VbVPFsg2Rwwdrh7MFnOcQIIgp8HJ56WfqauMya6KVpnlqter8lnLXEpCgEXL+te07u7qyrkYgT2Rb8wqkebJdzA5/35GFkuJpksq9r/HRa305qzJTL+J51NyS5BQKMQswWqqtPAJ7knxiNy/g41jPdkOgW8DBqh6Wl6XNX3lBoiOiWxvRGBQHfi/svGrsheS1AbIgRaqZ4BdKv6DycfQ8QzRgtROPTpdLNfsaunpwWCLAx3Fgx003OqywJwEQGyxQXD9OOHcPUVcTWmnpi9yMRLVAKsAmxQq18nZWcFzcPwAQ/xMX1ZKwz69PTpT/0M3f15W8BYkJsUJMoyFa5OEzDOYCfyTL6BTXpRw4fvAsXw5ZVNyShBYgFMUFdmnCKxFlVS6euwTwAI0y0YBmjUhlGpp06eeJDlrkrl7cFiIXX+6NewMfQKhpijd2H0AvYLgTlQke6XJ6e6O3t25/NZt9opXbl0rcA1v03HDt2+Ce4MoHna0WUcUeUCBo3AkTBRxlKDCMC49TpMcyWmpwc3zNQHCym0+lLmaEblrYFqtXqF5878MxNuGoUcCMCQWaaSVbQiECdP6rXBCptIcCBHmezPOmJiVP/VywOrQcJLmDGbliaFqjVat868Oy+P8fVCLABHiWCgW+gmwwrSQIQTAumG8BRyXxmM28gE+Onbh8YKPan05muJ7CWXERZrVa+SPAx7vvA+7r1+DgCsGZGhOCnN+ZAZWIrEliaEcDy0Y5t4lM/6untxZwgd50auh+L0gKYe9343IH9N6NwAm5Hq54fJQDrRPDDEB0CfFCjQFua2VmI6SonJyf2gJW39PT0vgrvDawKr9JVzrkF0K57Tp44/t7njx2xCZ8Put/7DXSTYW9HJZrAZ6UIHEnA4INp7p0yejC/ESdWB/iZ0dE11/T1D/wN8p7dG0Q4sRu0BUqlqck/OXr08N2ey/d7fivdJ4iRwaSRQkE3sH0CmG7gG1Es7hOAttg4iJBeuXJkJ4jwHrxI8uYuoKffApjk3Qrgv3z8+LFHALyBaT19Icn8zEOg7VwDvUkSWCMA1CYv4KcZ8CYN8DjwfRt1LQf7BXksGbdgeLgY84RLM5n0DvADXw7A34F5aYdp4HugVqs/gcndgxjjH56cGN+HJR5/9NB6rIFowBvAvozqdo6RoAn4oMnDl/EZJ1A8GHzwTTfw46RPCJ8A/rmmU/qHXc+XUZ3xVoFlLWdgw55O8POZ7kvqdhA86lEQfQK0AtzOMRktk3U1m/7dQEbYiJQM1qAWp406C2wXmMc/h/lJBjuXOm0++KbDHF7Xru/bqHdy8NvFdF9Sjx5sK9oofT1KAkuPymh5KEaD2Rnhj/DOC8wQDSzcAIymxcUtv12MoJrNQDfJ8w30qIwr28/fKn257HFt59fF0uOktRUl28ri1O2wNIu3kpbPyvClXx/1ADQwAxvfpNkoLTCNgZIXtsB49LCyLC/JQxvPo4weMKnNl9QtMH8nBt5/NJjNl9TjDrYX7Qa0r8fZ4sowm9WDcQaV7TwAG90y8wRe0LyAD4jpdiFKO9d0O5d2/0B0HvBWHtMYonFn7ZxPvw1Za4vHSdrsYJv5usUpfZ15fJvF7VxKBou7WPDpE4AZ/Mb241YIL0QSMFBnsHNMMi/zUFp+plGnjB4wzSuDNgYr08U699Paz+7A4r6kHnew3cxueivJfJZG3YKv0xbGDQzLSGmN7suozri5dep+vJXd8pm0azFueqCGdbB4K2nntkpfKnvYoAtc0M9nOmVUNxulD6gfb2W3c1mVqG42Sg0+GGajtIb1ZVS3cymjoFtanN3KtzwWp2SgPRribNE8SYwTgGjwbaYbUHFx2qJgW/5Wdl7T8pjuS+oaOAQwY7SBzeZLd0b8JytigPrSzvdtLMHipvuSOgPzvJgC28IPFvdlVGf8bA5exy/Lj1O3oBtBFolrcLPFSdp8u8WjkuX7Nj9uOiWDledi7jPO5qcnXTcg/Hr6NtMpo3pc3PKZZLmm+/nN7kvqFjRvtHGjcWY2WzvJND99oXhcub6Nuh+sbN/WCboBEq2rbzc9TtLm2xeK8zp+fj9O3YLlCUGzBMq4xvZtpvvydHS/bD+/XdtsFvfz+7ZO1MMG9yrv20xvJ/20VjqLt7Sobpf202PBZsaFwLD0c5Vx17IyrcIvNtkEAG7Oj5t+rpJtZmX47TfP9v9tVpxWeBtrbgAAAABJRU5ErkJggg=="; @@ -659,29 +816,38 @@ export const IntelliJIdeaIcon: Icon = (props) => { ); }; -export const OpenCodeIcon: Icon = ({ monochrome, ...props }) => ( - - - - - - - - - - - -); - -export const KiloIcon: Icon = (props) => ( - - - -); +export const OpenCodeIcon: Icon = ({ monochrome, ...props }) => { + if (monochrome) { + return ( + + + + + + + + + + + + ); + } + return ( + + + + + + + + + + + + + + ); +}; export const GithubCopilotIcon: Icon = ({ className, ...props }) => ( ( ); + +export const ACPRegistryIcon: Icon = ({ className, ...props }) => ( + + + +); + +export const PiAgentIcon: Icon = ({ className, ...props }) => ( + + + + + +); + +// Amp / Sourcegraph — orange ampersand-inspired wedge mark. +export const AmpIcon: Icon = ({ className, monochrome, ...props }) => ( + + + + +); + +// Kilo Code — orange wedge / triangle mark with stylized "K". +export const KiloIcon: Icon = ({ className, ...props }) => ( + + + + +); + +// Re-export the existing Google Gemini mark under the GeminiCli driver +// alias so providerDriverMeta can import a name that matches the driver +// kind. The underlying SVG is the same multicolor crystal mark. +export const GeminiCliIcon: Icon = Gemini; + +// Re-export GithubCopilotIcon under the shorter "CopilotIcon" alias used +// by providerDriverMeta to keep the meta entries terse. +export const CopilotIcon: Icon = GithubCopilotIcon; diff --git a/apps/web/src/components/KeybindingsToast.browser.tsx b/apps/web/src/components/KeybindingsToast.browser.tsx index 79600215d21..2772da10276 100644 --- a/apps/web/src/components/KeybindingsToast.browser.tsx +++ b/apps/web/src/components/KeybindingsToast.browser.tsx @@ -7,6 +7,8 @@ import { type MessageId, type OrchestrationReadModel, type ProjectId, + ProviderDriverKind, + ProviderInstanceId, type ServerConfig, type ServerLifecycleWelcomePayload, type ThreadId, @@ -72,7 +74,8 @@ function createBaseServerConfig(): ServerConfig { issues: [], providers: [ { - provider: "codex", + driver: ProviderDriverKind.make("codex"), + instanceId: ProviderInstanceId.make("codex"), enabled: true, installed: true, version: "0.116.0", @@ -95,18 +98,24 @@ function createBaseServerConfig(): ServerConfig { ...DEFAULT_SERVER_SETTINGS, enableAssistantStreaming: false, defaultThreadEnvMode: "local" as const, - textGenerationModelSelection: { provider: "codex" as const, model: "gpt-5.4-mini" }, + textGenerationModelSelection: { + instanceId: ProviderInstanceId.make("codex"), + model: "gpt-5.4-mini", + }, providers: { ...DEFAULT_SERVER_SETTINGS.providers, - codex: { enabled: true, binaryPath: "", homePath: "", customModels: [] }, - claudeAgent: { enabled: true, binaryPath: "", customModels: [], launchArgs: "" }, - cursor: { enabled: true, binaryPath: "", apiEndpoint: "", customModels: [] }, + codex: { + ...DEFAULT_SERVER_SETTINGS.providers.codex, + enabled: true, + }, + claudeAgent: { + ...DEFAULT_SERVER_SETTINGS.providers.claudeAgent, + enabled: true, + }, + cursor: { ...DEFAULT_SERVER_SETTINGS.providers.cursor, enabled: true }, opencode: { + ...DEFAULT_SERVER_SETTINGS.providers.opencode, enabled: true, - binaryPath: "", - serverUrl: "", - serverPassword: "", - customModels: [], }, }, }, @@ -122,7 +131,7 @@ function createMinimalSnapshot(): OrchestrationReadModel { title: "Project", workspaceRoot: "/repo/project", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5", }, scripts: [], @@ -137,7 +146,7 @@ function createMinimalSnapshot(): OrchestrationReadModel { projectId: PROJECT_ID, title: "Test thread", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5", }, interactionMode: "default", @@ -239,13 +248,13 @@ function resolveWsRpc(tag: string): unknown { if (tag === WS_METHODS.serverGetConfig) { return fixture.serverConfig; } - if (tag === WS_METHODS.gitListBranches) { + if (tag === WS_METHODS.vcsListRefs) { return { isRepo: true, - hasOriginRemote: true, + hasPrimaryRemote: true, nextCursor: null, totalCount: 1, - branches: [{ name: "main", current: true, isDefault: true, worktreePath: null }], + refs: [{ name: "main", current: true, isDefault: true, worktreePath: null }], }; } if (tag === WS_METHODS.projectsSearchEntries) { @@ -272,7 +281,7 @@ function sendServerConfigUpdatedPush(issues: ServerConfig["issues"]) { rpcHarness.emitStreamValue(WS_METHODS.subscribeServerConfig, { version: 1, type: "keybindingsUpdated", - payload: { issues }, + payload: { keybindings: fixture.serverConfig.keybindings, issues }, }); } diff --git a/apps/web/src/components/ProjectFavicon.tsx b/apps/web/src/components/ProjectFavicon.tsx index 38e07f59ced..ad47e01bb11 100644 --- a/apps/web/src/components/ProjectFavicon.tsx +++ b/apps/web/src/components/ProjectFavicon.tsx @@ -10,15 +10,29 @@ export function ProjectFavicon(input: { cwd: string; className?: string; }) { - const src = resolveEnvironmentHttpUrl({ - environmentId: input.environmentId, - pathname: "/api/project-favicon", - searchParams: { cwd: input.cwd }, - }); + const src = (() => { + try { + return resolveEnvironmentHttpUrl({ + environmentId: input.environmentId, + pathname: "/api/project-favicon", + searchParams: { cwd: input.cwd }, + }); + } catch { + return null; + } + })(); const [status, setStatus] = useState<"loading" | "loaded" | "error">(() => - loadedProjectFaviconSrcs.has(src) ? "loaded" : "loading", + src && loadedProjectFaviconSrcs.has(src) ? "loaded" : "loading", ); + if (!src) { + return ( + + ); + } + return ( <> {status !== "loaded" ? ( diff --git a/apps/web/src/components/ProviderLogo.tsx b/apps/web/src/components/ProviderLogo.tsx index a3bb15259c3..282fd56c6f1 100644 --- a/apps/web/src/components/ProviderLogo.tsx +++ b/apps/web/src/components/ProviderLogo.tsx @@ -1,8 +1,8 @@ import { type ComponentProps } from "react"; -import { type ProviderKind } from "@t3tools/contracts"; import { useAppSettings } from "../appSettings"; import { cn } from "../lib/utils"; +import type { ProviderKind } from "../providerKind"; import { type Icon, AmpIcon, @@ -26,7 +26,7 @@ const PROVIDER_ICON_BY_PROVIDER: Record = { kilo: KiloIcon, }; -export type ProviderLogoProps = Omit, "monochrome"> & { +export type ProviderLogoProps = ComponentProps & { provider: ProviderKind; }; @@ -39,8 +39,11 @@ export function ProviderLogo({ provider, className, style, ...props }: ProviderL return ( ); diff --git a/apps/web/src/components/PullRequestThreadDialog.tsx b/apps/web/src/components/PullRequestThreadDialog.tsx index ef5998e990f..ee1b212e976 100644 --- a/apps/web/src/components/PullRequestThreadDialog.tsx +++ b/apps/web/src/components/PullRequestThreadDialog.tsx @@ -7,8 +7,10 @@ import { gitPreparePullRequestThreadMutationOptions, gitResolvePullRequestQueryOptions, } from "~/lib/gitReactQuery"; +import { useGitStatus } from "~/lib/gitStatusState"; import { cn } from "~/lib/utils"; import { parsePullRequestReference } from "~/pullRequestReference"; +import { getSourceControlPresentation } from "~/sourceControlPresentation"; import { Button } from "./ui/button"; import { Dialog, @@ -52,6 +54,13 @@ export function PullRequestThreadDialog({ { wait: 450 }, (debouncerState) => ({ isPending: debouncerState.isPending }), ); + const { data: gitStatus = null } = useGitStatus({ environmentId, cwd }); + const sourceControlPresentation = useMemo( + () => getSourceControlPresentation(gitStatus?.sourceControlProvider), + [gitStatus?.sourceControlProvider], + ); + const terminology = sourceControlPresentation.terminology; + const SourceControlIcon = sourceControlPresentation.Icon; useEffect(() => { if (!open) return; @@ -168,9 +177,9 @@ export function PullRequestThreadDialog({ const validationMessage = !referenceDirty ? null : reference.trim().length === 0 - ? "Paste a GitHub pull request URL, `gh pr checkout 123`, or enter 123 / #123." + ? `Paste a ${terminology.singular} URL, checkout command, or enter 123 / #123.` : parsedReference === null - ? "Use a GitHub pull request URL, `gh pr checkout 123`, 123, or #123." + ? `Use a ${terminology.singular} URL, checkout command, 123, or #123.` : null; const errorMessage = validationMessage ?? @@ -178,11 +187,11 @@ export function PullRequestThreadDialog({ (resolvedPullRequest === null && resolvePullRequestQuery.isError ? resolvePullRequestQuery.error instanceof Error ? resolvePullRequestQuery.error.message - : "Failed to resolve pull request." + : `Failed to resolve ${terminology.singular}.` : preparePullRequestThreadMutation.error instanceof Error ? preparePullRequestThreadMutation.error.message : preparePullRequestThreadMutation.error - ? "Failed to prepare pull request thread." + ? `Failed to prepare ${terminology.singular} thread.` : null); return ( @@ -196,18 +205,23 @@ export function PullRequestThreadDialog({ > - Checkout Pull Request + + + Checkout {terminology.singular} + - Resolve a GitHub pull request, then create the draft thread in the main repo or in a - dedicated worktree. + Resolve a {sourceControlPresentation.providerName} {terminology.singular}, then create + the draft thread in the main repo or in a dedicated worktree. Pull request + + {terminology.singular} + { setReferenceDirty(true); @@ -245,7 +259,7 @@ export function PullRequestThreadDialog({ {isResolving ? (
- Resolving pull request... + Resolving {terminology.singular}...
) : null} diff --git a/apps/web/src/components/Sidebar.logic.test.ts b/apps/web/src/components/Sidebar.logic.test.ts index f92f2f628c3..926c117c1c0 100644 --- a/apps/web/src/components/Sidebar.logic.test.ts +++ b/apps/web/src/components/Sidebar.logic.test.ts @@ -1,4 +1,5 @@ import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import { ProviderDriverKind } from "@t3tools/contracts"; import { createThreadJumpHintVisibilityController, @@ -20,7 +21,13 @@ import { sortProjectsForSidebar, THREAD_JUMP_HINT_SHOW_DELAY_MS, } from "./Sidebar.logic"; -import { EnvironmentId, OrchestrationLatestTurn, ProjectId, ThreadId } from "@t3tools/contracts"; +import { + EnvironmentId, + OrchestrationLatestTurn, + ProjectId, + ProviderInstanceId, + ThreadId, +} from "@t3tools/contracts"; import { DEFAULT_INTERACTION_MODE, DEFAULT_RUNTIME_MODE, @@ -474,7 +481,7 @@ describe("resolveThreadStatusPill", () => { latestTurn: null, lastVisitedAt: undefined, session: { - provider: "codex" as const, + provider: ProviderDriverKind.make("codex"), status: "running" as const, createdAt: "2026-03-09T10:00:00.000Z", updatedAt: "2026-03-09T10:00:00.000Z", @@ -698,7 +705,7 @@ function makeProject(overrides: Partial = {}): Project { name: "Project", cwd: "/tmp/project", defaultModelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.4", ...defaultModelSelection, }, @@ -717,7 +724,7 @@ function makeThread(overrides: Partial = {}): Thread { projectId: ProjectId.make("project-1"), title: "Thread", modelSelection: { - provider: "codex", + instanceId: ProviderInstanceId.make("codex"), model: "gpt-5.4", ...overrides?.modelSelection, }, diff --git a/apps/web/src/components/Sidebar.tsx b/apps/web/src/components/Sidebar.tsx index 586c7ed1df8..f617472c99e 100644 --- a/apps/web/src/components/Sidebar.tsx +++ b/apps/web/src/components/Sidebar.tsx @@ -3,8 +3,7 @@ import { ArrowUpDownIcon, ChevronRightIcon, CloudIcon, - GitPullRequestIcon, - PlusIcon, + FolderPlusIcon, SearchIcon, SettingsIcon, SquarePenIcon, @@ -12,6 +11,7 @@ import { TriangleAlertIcon, } from "lucide-react"; import { + ChangeRequestStatusIcon, prStatusIndicator, resolveThreadPr, terminalStatusFromRunningIds, @@ -143,6 +143,7 @@ import { SidebarMenuSubItem, SidebarSeparator, SidebarTrigger, + useSidebar, } from "./ui/sidebar"; import { useThreadSelectionStore } from "../threadSelectionStore"; import { useCommandPaletteStore } from "../commandPaletteStore"; @@ -366,13 +367,13 @@ const SidebarThreadRow = memo(function SidebarThreadRow(props: SidebarThreadRowP }, }); const pr = resolveThreadPr(thread.branch, gitStatus.data); - const prStatus = prStatusIndicator(pr); + const prStatus = prStatusIndicator(pr, gitStatus.data?.sourceControlProvider); const terminalStatus = terminalStatusFromRunningIds(runningTerminalIds); const isConfirmingArchive = confirmingArchiveThreadKey === threadKey && !isThreadRunning; const threadMetaClassName = isConfirmingArchive ? "pointer-events-none opacity-0" : !isThreadRunning - ? "pointer-events-none transition-opacity duration-150 group-hover/menu-sub-item:opacity-0 group-focus-within/menu-sub-item:opacity-0" + ? "pointer-events-none transition-opacity duration-150 max-sm:pr-6 group-hover/menu-sub-item:opacity-0 group-focus-within/menu-sub-item:opacity-0" : "pointer-events-none"; const clearConfirmingArchive = useCallback(() => { setConfirmingArchiveThreadKey((current) => (current === threadKey ? null : current)); @@ -557,7 +558,7 @@ const SidebarThreadRow = memo(function SidebarThreadRow(props: SidebarThreadRowP className={`inline-flex items-center justify-center ${prStatus.colorClass} cursor-pointer rounded-sm outline-hidden focus-visible:ring-1 focus-visible:ring-ring`} onClick={handlePrClick} > - + } /> @@ -568,7 +569,7 @@ const SidebarThreadRow = memo(function SidebarThreadRow(props: SidebarThreadRowP {renamingThreadKey === threadKey ? ( )} -
+
{isConfirmingArchive ? ( + ) : canRetry ? ( + + ) : null} + {status === "paired" ? ( + + ) : null} +
+ +
+ ); +} + function errorMessageFromUnknown(error: unknown): string { if (error instanceof Error && error.message.trim().length > 0) { return error.message; diff --git a/apps/web/src/components/chat/ChatComposer.tsx b/apps/web/src/components/chat/ChatComposer.tsx index 3033872544e..d64d0316684 100644 --- a/apps/web/src/components/chat/ChatComposer.tsx +++ b/apps/web/src/components/chat/ChatComposer.tsx @@ -5,7 +5,6 @@ import type { ProjectEntry, ProviderApprovalDecision, ProviderInteractionMode, - ProviderKind, ResolvedKeybindingsConfig, RuntimeMode, ScopedThreadRef, @@ -14,6 +13,8 @@ import type { TurnId, } from "@t3tools/contracts"; import { + ProviderDriverKind, + ProviderInstanceId, PROVIDER_SEND_TURN_MAX_ATTACHMENTS, PROVIDER_SEND_TURN_MAX_IMAGE_BYTES, } from "@t3tools/contracts"; @@ -60,7 +61,6 @@ import { shouldUseCompactComposerFooter, } from "../composerFooterLayout"; import { type ComposerPromptEditorHandle, ComposerPromptEditor } from "../ComposerPromptEditor"; -import { type ModelOptionEntry } from "../../providerModelOptions"; import { ProviderModelPicker } from "./ProviderModelPicker"; import { type ComposerCommandItem, ComposerCommandMenu } from "./ComposerCommandMenu"; import { ComposerPendingApprovalActions } from "./ComposerPendingApprovalActions"; @@ -72,11 +72,10 @@ import { ComposerPlanFollowUpBanner } from "./ComposerPlanFollowUpBanner"; import { resolveComposerMenuActiveItemId } from "./composerMenuHighlight"; import { searchSlashCommandItems } from "./composerSlashCommandSearch"; import { - getComposerProviderControls, getComposerProviderState, renderProviderTraitsMenuContent, renderProviderTraitsPicker, -} from "./composerProviderRegistry"; +} from "./composerProviderState"; import { ContextWindowMeter } from "./ContextWindowMeter"; import { buildExpandedImagePreview, type ExpandedImagePreview } from "./ExpandedImagePreview"; import { basenameOfPath } from "../../vscode-icons"; @@ -97,7 +96,14 @@ import { XIcon, } from "lucide-react"; import { proposedPlanTitle } from "../../proposedPlan"; -import { resolveSelectableProvider, getProviderModels } from "../../providerModels"; +import { getProviderInteractionModeToggle } from "../../providerModels"; +import { + deriveProviderInstanceEntries, + resolveProviderDriverKindForInstanceSelection, + sortProviderInstanceEntries, + type ProviderInstanceEntry, +} from "../../providerInstances"; +import { type AppModelOption, getAppModelOptionsForInstance } from "../../modelSelection"; import type { UnifiedSettings } from "@t3tools/contracts/settings"; import type { SessionPhase, Thread } from "../../types"; import type { PendingUserInputDraftAnswer } from "../../pendingUserInput"; @@ -105,6 +111,7 @@ import type { PendingApproval, PendingUserInput } from "../../session-logic"; import { deriveLatestContextWindowSnapshot } from "../../lib/contextWindow"; import { formatProviderSkillDisplayName } from "../../providerSkillPresentation"; import { searchProviderSkills } from "../../providerSkillSearch"; +import { useMediaQuery } from "../../hooks/useMediaQuery"; const IMAGE_SIZE_LIMIT_LABEL = `${Math.round(PROVIDER_SEND_TURN_MAX_IMAGE_BYTES / (1024 * 1024))}MB`; @@ -132,6 +139,13 @@ const runtimeModeConfig: Record< const runtimeModeOptions = Object.keys(runtimeModeConfig) as RuntimeMode[]; const COMPOSER_PATH_QUERY_DEBOUNCE_MS = 120; const EMPTY_PROJECT_ENTRIES: ProjectEntry[] = []; +const COMPOSER_FLOATING_LAYER_SELECTOR = [ + '[data-slot="popover-popup"]', + '[data-slot="menu-popup"]', + '[data-slot="select-popup"]', + '[data-slot="combobox-popup"]', + '[data-slot="autocomplete-popup"]', +].join(","); const extendReplacementRangeForTrailingSpace = ( text: string, @@ -161,6 +175,10 @@ const terminalContextIdListsEqual = ( ): boolean => contexts.length === ids.length && contexts.every((context, index) => context.id === ids[index]); +function isInsideComposerFloatingLayer(element: Element): boolean { + return element.closest(COMPOSER_FLOATING_LAYER_SELECTOR) !== null; +} + const ComposerFooterModeControls = memo(function ComposerFooterModeControls(props: { showInteractionModeToggle: boolean; interactionMode: ProviderInteractionMode; @@ -283,7 +301,9 @@ const ComposerFooterPrimaryActions = memo(function ComposerFooterPrimaryActions( promptHasText: boolean; isSendBusy: boolean; isConnecting: boolean; + isEnvironmentUnavailable: boolean; hasSendableContent: boolean; + preserveComposerFocusOnPointerDown?: boolean; onPreviousPendingQuestion: () => void; onInterrupt: () => void; onImplementPlanInNewThread: () => void; @@ -302,8 +322,10 @@ const ComposerFooterPrimaryActions = memo(function ComposerFooterPrimaryActions( promptHasText={props.promptHasText} isSendBusy={props.isSendBusy} isConnecting={props.isConnecting} + isEnvironmentUnavailable={props.isEnvironmentUnavailable} isPreparingWorktree={props.isPreparingWorktree} hasSendableContent={props.hasSendableContent} + preserveComposerFocusOnPointerDown={props.preserveComposerFocusOnPointerDown ?? false} onPreviousPendingQuestion={props.onPreviousPendingQuestion} onInterrupt={props.onInterrupt} onImplementPlanInNewThread={props.onImplementPlanInNewThread} @@ -344,7 +366,7 @@ export interface ChatComposerHandle { selectedPromptEffort: string | null; selectedModelOptionsForDispatch: unknown; selectedModelSelection: ModelSelection; - selectedProvider: ProviderKind; + selectedProvider: ProviderDriverKind; selectedModel: string; selectedProviderModels: ReadonlyArray; }; @@ -373,6 +395,10 @@ export interface ChatComposerProps { isConnecting: boolean; isSendBusy: boolean; isPreparingWorktree: boolean; + environmentUnavailable: { + readonly label: string; + readonly connectionState: "connecting" | "disconnected" | "error"; + } | null; // Pending approvals / inputs activePendingApproval: PendingApproval | null; @@ -383,7 +409,7 @@ export interface ChatComposerProps { isLastQuestion: boolean; canAdvance: boolean; customAnswer: string; - activeQuestion: { id: string } | null; + activeQuestion: { id: string; multiSelect?: boolean | undefined } | null; } | null; activePendingResolvedAnswers: Record | null; activePendingIsResponding: boolean; @@ -404,7 +430,7 @@ export interface ChatComposerProps { interactionMode: ProviderInteractionMode; // Provider / model - lockedProvider: ProviderKind | null; + lockedProvider: ProviderDriverKind | null; providerStatuses: ServerProvider[]; activeProjectDefaultModelSelection: ModelSelection | null | undefined; activeThreadModelSelection: ModelSelection | null | undefined; @@ -447,7 +473,7 @@ export interface ChatComposerProps { cursorAdjacentToMention: boolean, ) => void; - onProviderModelSelect: (provider: ProviderKind, model: string) => void; + onProviderModelSelect: (instanceId: ProviderInstanceId, model: string) => void; toggleInteractionMode: () => void; handleRuntimeModeChange: (mode: RuntimeMode) => void; handleInteractionModeChange: (mode: ProviderInteractionMode) => void; @@ -480,6 +506,7 @@ export const ChatComposer = memo( isConnecting, isSendBusy, isPreparingWorktree, + environmentUnavailable, activePendingApproval, pendingApprovals, pendingUserInputs, @@ -564,29 +591,134 @@ export const ChatComposer = memo( // ------------------------------------------------------------------ // Model state // ------------------------------------------------------------------ + // Instance-aware projection of the wire provider list. One entry per + // configured instance (default built-in + any custom `providerInstances.*`), + // sorted default-first per driver kind for a stable picker order. + const providerInstanceEntries = useMemo>( + () => sortProviderInstanceEntries(deriveProviderInstanceEntries(providerStatuses)), + [providerStatuses], + ); const selectedProviderByThreadId = composerDraft.activeProvider ?? null; const threadProvider = - activeThreadModelSelection?.provider ?? activeProjectDefaultModelSelection?.provider ?? null; + activeThread?.session?.providerInstanceId ?? + activeThreadModelSelection?.instanceId ?? + activeProjectDefaultModelSelection?.instanceId ?? + null; + const explicitSelectedInstanceId = selectedProviderByThreadId ?? threadProvider; + + const unlockedSelectedProvider = + resolveProviderDriverKindForInstanceSelection( + providerInstanceEntries, + providerStatuses, + explicitSelectedInstanceId, + ) ?? ProviderDriverKind.make("codex"); + const selectedProvider: ProviderDriverKind = lockedProvider ?? unlockedSelectedProvider; + const lockedContinuationGroupKey = useMemo((): string | null => { + if (!lockedProvider || !activeThread) return null; + const lockedInstanceId = + activeThread.session?.providerInstanceId ?? activeThreadModelSelection?.instanceId; + if (!lockedInstanceId) return null; + return ( + providerInstanceEntries.find((entry) => entry.instanceId === lockedInstanceId) + ?.continuationGroupKey ?? null + ); + }, [ + activeThread, + activeThreadModelSelection?.instanceId, + lockedProvider, + providerInstanceEntries, + ]); - const unlockedSelectedProvider = resolveSelectableProvider( - providerStatuses, - selectedProviderByThreadId ?? threadProvider ?? "codex", - ); - const selectedProvider: ProviderKind = lockedProvider ?? unlockedSelectedProvider; + // Resolve which configured instance the composer is currently targeting. + // Priority: + // 1. The composer draft's `activeProvider` — the user's unsaved pick + // from the model picker (must win, otherwise the UI appears to + // ignore picker selections). + // 2. Thread's persisted instance id (server-side saved selection). + // 3. Project default's instance id. + // 4. First enabled entry matching the current driver kind. + // 5. First enabled entry overall / default instance for the kind. + // + const selectedInstanceId = useMemo(() => { + const candidates: Array = [ + composerDraft.activeProvider, + activeThread?.session?.providerInstanceId, + activeThreadModelSelection?.instanceId, + activeProjectDefaultModelSelection?.instanceId, + ]; + for (const candidate of candidates) { + if (!candidate) continue; + const match = providerInstanceEntries.find( + (entry) => entry.instanceId === candidate && entry.enabled, + ); + if (match) { + // When locked to a specific driver kind, ignore persisted instance + // ids from a different kind or continuation group. + if (lockedProvider && match.driverKind !== lockedProvider) continue; + if ( + lockedContinuationGroupKey && + match.continuationGroupKey !== lockedContinuationGroupKey + ) { + continue; + } + return match.instanceId; + } + } + if (explicitSelectedInstanceId) { + return ProviderInstanceId.make(explicitSelectedInstanceId); + } + const byKind = providerInstanceEntries.find( + (entry) => + entry.enabled && + entry.driverKind === selectedProvider && + (!lockedContinuationGroupKey || + entry.continuationGroupKey === lockedContinuationGroupKey), + ); + if (byKind) return byKind.instanceId; + const anyEnabled = providerInstanceEntries.find((entry) => entry.enabled); + return ( + anyEnabled?.instanceId ?? + providerInstanceEntries[0]?.instanceId ?? + activeThreadModelSelection?.instanceId ?? + activeProjectDefaultModelSelection?.instanceId ?? + ProviderInstanceId.make("codex") + ); + }, [ + activeProjectDefaultModelSelection?.instanceId, + activeThread?.session?.providerInstanceId, + activeThreadModelSelection?.instanceId, + composerDraft.activeProvider, + explicitSelectedInstanceId, + lockedContinuationGroupKey, + lockedProvider, + providerInstanceEntries, + selectedProvider, + ]); const { modelOptions: composerModelOptions, selectedModel } = useEffectiveComposerModelState({ threadRef: composerDraftTarget, providers: providerStatuses, selectedProvider, + selectedInstanceId, threadModelSelection: activeThreadModelSelection, projectModelSelection: activeProjectDefaultModelSelection, settings, }); - const selectedProviderModels = getProviderModels(providerStatuses, selectedProvider); + // Resolve the active instance's snapshot by `instanceId` so a custom + // instance gets its own slash commands, skills, and model list — not + // the first snapshot for the same driver kind. + const selectedProviderEntry = useMemo( + () => providerInstanceEntries.find((entry) => entry.instanceId === selectedInstanceId), + [providerInstanceEntries, selectedInstanceId], + ); const selectedProviderStatus = useMemo( - () => providerStatuses.find((provider) => provider.provider === selectedProvider), - [providerStatuses, selectedProvider], + () => selectedProviderEntry?.snapshot ?? null, + [selectedProviderEntry], + ); + const selectedProviderModels = useMemo>( + () => selectedProviderEntry?.models ?? [], + [selectedProviderEntry], ); const composerProviderState = useMemo( @@ -596,7 +728,7 @@ export const ChatComposer = memo( model: selectedModel, models: selectedProviderModels, prompt, - modelOptions: composerModelOptions, + modelOptions: composerModelOptions?.[selectedProvider], }), [composerModelOptions, prompt, selectedModel, selectedProvider, selectedProviderModels], ); @@ -604,38 +736,39 @@ export const ChatComposer = memo( const selectedPromptEffort = composerProviderState.promptEffort; const selectedModelOptionsForDispatch = composerProviderState.modelOptionsForDispatch; const composerProviderControls = useMemo( - () => getComposerProviderControls(selectedProvider), - [selectedProvider], + () => ({ + showInteractionModeToggle: getProviderInteractionModeToggle( + providerStatuses, + selectedProvider, + ), + }), + [providerStatuses, selectedProvider], ); const selectedModelSelection = useMemo( - () => createModelSelection(selectedProvider, selectedModel, selectedModelOptionsForDispatch), - [selectedModel, selectedModelOptionsForDispatch, selectedProvider], + () => + createModelSelection(selectedInstanceId, selectedModel, selectedModelOptionsForDispatch), + [selectedInstanceId, selectedModel, selectedModelOptionsForDispatch], ); const selectedModelForPicker = selectedModel; - const modelOptionsByProvider = useMemo< - Record> + // Instance-keyed option list so the picker can show each configured + // instance (built-in + custom) as a first-class sidebar entry. The + // options are server-reported models plus that exact instance's + // configured custom models; selected slugs are not injected into lists. + const modelOptionsByInstance = useMemo< + ReadonlyMap> >(() => { - const modelsFor = (kind: ProviderKind): ReadonlyArray => - (providerStatuses.find((p) => p.provider === kind)?.models ?? []).map( - ({ slug, name, isCustom }) => ({ slug, name, isCustom }), - ); - return { - codex: modelsFor("codex"), - claudeAgent: modelsFor("claudeAgent"), - copilot: modelsFor("copilot"), - cursor: modelsFor("cursor"), - opencode: modelsFor("opencode"), - geminiCli: modelsFor("geminiCli"), - amp: modelsFor("amp"), - kilo: modelsFor("kilo"), - }; - }, [providerStatuses]); + const out = new Map>(); + for (const entry of providerInstanceEntries) { + out.set(entry.instanceId, getAppModelOptionsForInstance(settings, entry)); + } + return out; + }, [providerInstanceEntries, settings]); const selectedModelForPickerWithCustomFallback = useMemo(() => { - const currentOptions = modelOptionsByProvider[selectedProvider]; + const currentOptions = modelOptionsByInstance.get(selectedInstanceId) ?? []; return currentOptions.some((option) => option.slug === selectedModelForPicker) ? selectedModelForPicker : (normalizeModelSlug(selectedModelForPicker, selectedProvider) ?? selectedModelForPicker); - }, [modelOptionsByProvider, selectedModelForPicker, selectedProvider]); + }, [modelOptionsByInstance, selectedInstanceId, selectedModelForPicker, selectedProvider]); // ------------------------------------------------------------------ // Context window @@ -662,17 +795,25 @@ export const ChatComposer = memo( const [isComposerFooterCompact, setIsComposerFooterCompact] = useState(false); const [isComposerPrimaryActionsCompact, setIsComposerPrimaryActionsCompact] = useState(false); const [isComposerModelPickerOpen, setIsComposerModelPickerOpen] = useState(false); + const [isComposerFocused, setIsComposerFocused] = useState(false); + const isMobileViewport = useMediaQuery("max-sm"); + const isComposerCollapsedMobile = isMobileViewport && !isComposerFocused; // ------------------------------------------------------------------ // Refs // ------------------------------------------------------------------ const composerEditorRef = useRef(null); const composerFormRef = useRef(null); + const composerSurfaceRef = useRef(null); const composerFormHeightRef = useRef(0); const composerSelectLockRef = useRef(false); const composerMenuOpenRef = useRef(false); const composerMenuItemsRef = useRef([]); const activeComposerMenuItemRef = useRef(null); + const composerBlurFrameRef = useRef(null); + const mobileComposerExpandFrameRef = useRef(null); + const mobileComposerExpandReleaseFrameRef = useRef(null); + const mobileComposerExpandInFlightRef = useRef(false); const dragDepthRef = useRef(0); // ------------------------------------------------------------------ @@ -817,6 +958,8 @@ export const ChatComposer = memo( isComposerApprovalState || pendingUserInputs.length > 0 || (showPlanFollowUpPrompt && activeProposedPlan !== null); + const showCollapsedMobilePromptRow = + isComposerCollapsedMobile && !isComposerApprovalState && pendingUserInputs.length === 0; const composerFooterHasWideActions = showPlanFollowUpPrompt || activePendingProgress !== null; const showPlanSidebarToggle = Boolean(activePlan || sidebarProposedPlan || planSidebarOpen); @@ -909,6 +1052,11 @@ export const ChatComposer = memo( : null, [activePendingIsResponding, activePendingProgress, activePendingResolvedAnswers], ); + const collapsedComposerPrimaryActionDisabled = + phase === "running" || isSendBusy || isConnecting || !composerSendState.hasSendableContent; + const collapsedComposerPrimaryActionLabel = "Send message"; + const showMobilePendingAnswerActions = + isMobileViewport && !isComposerCollapsedMobile && pendingPrimaryAction !== null; // ------------------------------------------------------------------ // Prompt helpers @@ -1438,6 +1586,69 @@ export const ChatComposer = memo( [composerHighlightedItemId, composerMenuItems], ); + const blurMobileComposerAfterSend = useCallback(() => { + if (!isMobileViewport) return; + if (composerBlurFrameRef.current !== null) { + window.cancelAnimationFrame(composerBlurFrameRef.current); + composerBlurFrameRef.current = null; + } + const activeElement = document.activeElement; + if (activeElement instanceof HTMLElement) { + activeElement.blur(); + } + setIsComposerFocused(false); + }, [isMobileViewport]); + + const shouldBlurMobileComposerOnSubmit = useCallback(() => { + if (!isMobileViewport) return false; + if (isSendBusy || isConnecting || phase === "running") return false; + if (activePendingProgress) { + return activePendingProgress.isLastQuestion && Boolean(activePendingResolvedAnswers); + } + return showPlanFollowUpPrompt || composerSendState.hasSendableContent; + }, [ + activePendingProgress, + activePendingResolvedAnswers, + composerSendState.hasSendableContent, + isConnecting, + isMobileViewport, + isSendBusy, + phase, + showPlanFollowUpPrompt, + ]); + + const submitComposer = useCallback( + (event?: { preventDefault: () => void }) => { + onSend(event); + if (shouldBlurMobileComposerOnSubmit()) { + blurMobileComposerAfterSend(); + } + }, + [blurMobileComposerAfterSend, onSend, shouldBlurMobileComposerOnSubmit], + ); + const expandMobileComposer = useCallback(() => { + if (composerBlurFrameRef.current !== null) { + window.cancelAnimationFrame(composerBlurFrameRef.current); + composerBlurFrameRef.current = null; + } + if (mobileComposerExpandFrameRef.current !== null) { + window.cancelAnimationFrame(mobileComposerExpandFrameRef.current); + } + if (mobileComposerExpandReleaseFrameRef.current !== null) { + window.cancelAnimationFrame(mobileComposerExpandReleaseFrameRef.current); + } + mobileComposerExpandInFlightRef.current = true; + setIsComposerFocused(true); + mobileComposerExpandFrameRef.current = window.requestAnimationFrame(() => { + mobileComposerExpandFrameRef.current = null; + composerEditorRef.current?.focusAtEnd(); + mobileComposerExpandReleaseFrameRef.current = window.requestAnimationFrame(() => { + mobileComposerExpandReleaseFrameRef.current = null; + mobileComposerExpandInFlightRef.current = false; + }); + }); + }, []); + // ------------------------------------------------------------------ // Callbacks: command key // ------------------------------------------------------------------ @@ -1468,7 +1679,7 @@ export const ChatComposer = memo( } } if (key === "Enter" && !event.shiftKey) { - void onSend(); + submitComposer(); return true; } return false; @@ -1578,6 +1789,50 @@ export const ChatComposer = memo( const handleImplementPlanInNewThreadPrimaryAction = useCallback(() => { void onImplementPlanInNewThread(); }, [onImplementPlanInNewThread]); + const scheduleComposerCollapseCheck = useCallback(() => { + if (!isMobileViewport) { + return; + } + if (mobileComposerExpandInFlightRef.current) { + return; + } + if (composerBlurFrameRef.current !== null) { + window.cancelAnimationFrame(composerBlurFrameRef.current); + } + composerBlurFrameRef.current = window.requestAnimationFrame(() => { + composerBlurFrameRef.current = null; + if (mobileComposerExpandInFlightRef.current) { + return; + } + const composerSurface = composerSurfaceRef.current; + const activeElement = document.activeElement; + if (activeElement instanceof Element && isInsideComposerFloatingLayer(activeElement)) { + return; + } + if ( + composerSurface && + activeElement instanceof Node && + composerSurface.contains(activeElement) + ) { + return; + } + setIsComposerFocused(false); + }); + }, [isMobileViewport]); + + useEffect(() => { + return () => { + if (composerBlurFrameRef.current !== null) { + window.cancelAnimationFrame(composerBlurFrameRef.current); + } + if (mobileComposerExpandFrameRef.current !== null) { + window.cancelAnimationFrame(mobileComposerExpandFrameRef.current); + } + if (mobileComposerExpandReleaseFrameRef.current !== null) { + window.cancelAnimationFrame(mobileComposerExpandReleaseFrameRef.current); + } + }; + }, []); // ------------------------------------------------------------------ // Imperative handle @@ -1691,7 +1946,7 @@ export const ChatComposer = memo( return (
@@ -1706,21 +1961,83 @@ export const ChatComposer = memo( onDrop={onComposerDrop} >
{ + const activeElement = event.target; + if ( + isComposerCollapsedMobile && + activeElement instanceof HTMLElement && + activeElement.closest('[data-chat-composer-collapsed-controls="true"]') + ) { + return; + } + if (composerBlurFrameRef.current !== null) { + window.cancelAnimationFrame(composerBlurFrameRef.current); + composerBlurFrameRef.current = null; + } + setIsComposerFocused(true); + }} + onBlurCapture={() => { + scheduleComposerCollapseCheck(); + }} > - {activePendingApproval ? ( -
+ {!isComposerCollapsedMobile && + (activePendingApproval ? ( +
+ +
+ ) : pendingUserInputs.length > 0 ? ( +
+ +
+ ) : showPlanFollowUpPrompt && activeProposedPlan ? ( +
+ +
+ ) : null)} + + {isComposerCollapsedMobile && activePendingApproval ? ( +
+
+ +
- ) : pendingUserInputs.length > 0 ? ( -
+ ) : isComposerCollapsedMobile && pendingUserInputs.length > 0 ? ( +
+
+
+ + {activePendingProgress?.activeQuestion?.multiSelect ? ( + + ) : null} +
+
- ) : showPlanFollowUpPrompt && activeProposedPlan ? ( -
- + ) : null} + + {showCollapsedMobilePromptRow ? ( +
+ +
) : null} @@ -1743,6 +2139,7 @@ export const ChatComposer = memo( className={cn( "relative px-3 pb-2 sm:px-4", hasComposerHeader ? "pt-2.5 sm:pt-3" : "pt-3.5 sm:pt-4", + isComposerCollapsedMobile && "hidden", )} > {composerMenuOpen && !isComposerApprovalState && ( @@ -1764,7 +2161,8 @@ export const ChatComposer = memo(
)} - {!isComposerApprovalState && + {!isComposerCollapsedMobile && + !isComposerApprovalState && pendingUserInputs.length === 0 && composerImages.length > 0 && (
@@ -1831,43 +2229,80 @@ export const ChatComposer = memo(
)} - +
+ + {showMobilePendingAnswerActions ? ( +
+ +
+ ) : null} +
{/* Bottom toolbar */} - {activePendingApproval ? ( + {isComposerCollapsedMobile ? null : activePendingApproval ? (
{ setIsComposerModelPickerOpen(open); }} - onProviderModelChange={onProviderModelSelect} + onInstanceModelChange={onProviderModelSelect} /> {isComposerFooterCompact ? ( @@ -1967,8 +2404,10 @@ export const ChatComposer = memo( promptHasText={prompt.trim().length > 0} isSendBusy={isSendBusy} isConnecting={isConnecting} + isEnvironmentUnavailable={environmentUnavailable !== null} isPreparingWorktree={isPreparingWorktree} hasSendableContent={composerSendState.hasSendableContent} + preserveComposerFocusOnPointerDown={isMobileViewport} onPreviousPendingQuestion={onPreviousActivePendingUserInputQuestion} onInterrupt={handleInterruptPrimaryAction} onImplementPlanInNewThread={handleImplementPlanInNewThreadPrimaryAction} diff --git a/apps/web/src/components/chat/ChatHeader.test.ts b/apps/web/src/components/chat/ChatHeader.test.ts new file mode 100644 index 00000000000..1b7b219bdb4 --- /dev/null +++ b/apps/web/src/components/chat/ChatHeader.test.ts @@ -0,0 +1,48 @@ +import { EnvironmentId } from "@t3tools/contracts"; +import { describe, expect, it } from "vitest"; + +import { shouldShowOpenInPicker } from "./ChatHeader"; + +describe("shouldShowOpenInPicker", () => { + const primaryEnvironmentId = EnvironmentId.make("environment-primary"); + + it("shows the picker for projects in the primary environment", () => { + expect( + shouldShowOpenInPicker({ + activeProjectName: "codething-mvp", + activeThreadEnvironmentId: primaryEnvironmentId, + primaryEnvironmentId, + }), + ).toBe(true); + }); + + it("hides the picker when hosted static mode has no primary environment", () => { + expect( + shouldShowOpenInPicker({ + activeProjectName: "codething-mvp", + activeThreadEnvironmentId: EnvironmentId.make("environment-remote"), + primaryEnvironmentId: null, + }), + ).toBe(false); + }); + + it("hides the picker for remote environments", () => { + expect( + shouldShowOpenInPicker({ + activeProjectName: "codething-mvp", + activeThreadEnvironmentId: EnvironmentId.make("environment-remote"), + primaryEnvironmentId, + }), + ).toBe(false); + }); + + it("hides the picker when there is no active project", () => { + expect( + shouldShowOpenInPicker({ + activeProjectName: undefined, + activeThreadEnvironmentId: primaryEnvironmentId, + primaryEnvironmentId, + }), + ).toBe(false); + }); +}); diff --git a/apps/web/src/components/chat/ChatHeader.tsx b/apps/web/src/components/chat/ChatHeader.tsx index 390457f01d1..aedcc3fb288 100644 --- a/apps/web/src/components/chat/ChatHeader.tsx +++ b/apps/web/src/components/chat/ChatHeader.tsx @@ -2,10 +2,10 @@ import { type EnvironmentId, type EditorId, type ProjectScript, - type ProviderKind, type ResolvedKeybindingsConfig, type ThreadId, } from "@t3tools/contracts"; +import type { ProviderKind } from "../../providerKind"; import { scopeThreadRef } from "@t3tools/client-runtime"; import { memo } from "react"; import GitActionsControl from "../GitActionsControl"; @@ -17,6 +17,7 @@ import ProjectScriptsControl, { type NewProjectScriptInput } from "../ProjectScr import { Toggle } from "../ui/toggle"; import { SidebarTrigger } from "../ui/sidebar"; import { OpenInPicker } from "./OpenInPicker"; +import { usePrimaryEnvironmentId } from "../../environments/primary"; interface ChatHeaderProps { activeThreadEnvironmentId: EnvironmentId; @@ -46,6 +47,18 @@ interface ChatHeaderProps { onToggleDiff: () => void; } +export function shouldShowOpenInPicker(input: { + readonly activeProjectName: string | undefined; + readonly activeThreadEnvironmentId: EnvironmentId; + readonly primaryEnvironmentId: EnvironmentId | null; +}): boolean { + return ( + Boolean(input.activeProjectName) && + input.primaryEnvironmentId !== null && + input.activeThreadEnvironmentId === input.primaryEnvironmentId + ); +} + export const ChatHeader = memo(function ChatHeader({ activeThreadEnvironmentId, activeThreadId, @@ -73,6 +86,13 @@ export const ChatHeader = memo(function ChatHeader({ onToggleTerminal, onToggleDiff, }: ChatHeaderProps) { + const primaryEnvironmentId = usePrimaryEnvironmentId(); + const showOpenInPicker = shouldShowOpenInPicker({ + activeProjectName, + activeThreadEnvironmentId, + primaryEnvironmentId, + }); + return (
@@ -106,7 +126,7 @@ export const ChatHeader = memo(function ChatHeader({ onDeleteScript={onDeleteProjectScript} /> )} - {activeProjectName && ( + {showOpenInPicker && ( } /> - {!isGitRepo + {!isGitRepo && !diffOpen ? "Diff panel is unavailable because this project is not a git repository." : diffToggleShortcutLabel ? `Toggle diff panel (${diffToggleShortcutLabel})` diff --git a/apps/web/src/components/chat/CompactComposerControlsMenu.browser.tsx b/apps/web/src/components/chat/CompactComposerControlsMenu.browser.tsx index 54181caa02e..49eb5fbb94b 100644 --- a/apps/web/src/components/chat/CompactComposerControlsMenu.browser.tsx +++ b/apps/web/src/components/chat/CompactComposerControlsMenu.browser.tsx @@ -1,7 +1,10 @@ import { + DEFAULT_MODEL, DEFAULT_MODEL_BY_PROVIDER, EnvironmentId, ModelSelection, + ProviderInstanceId, + ProviderDriverKind, ThreadId, } from "@t3tools/contracts"; import { scopedThreadKey, scopeThreadRef } from "@t3tools/client-runtime"; @@ -10,6 +13,7 @@ import "../../index.css"; import { page } from "vitest/browser"; import { afterEach, describe, expect, it, vi } from "vitest"; import { render } from "vitest-browser-react"; +import { createModelCapabilities, createModelSelection } from "@t3tools/shared/model"; import { CompactComposerControlsMenu } from "./CompactComposerControlsMenu"; import { TraitsMenuContent } from "./TraitsPicker"; @@ -17,12 +21,42 @@ import { useComposerDraftStore } from "../../composerDraftStore"; const LOCAL_ENVIRONMENT_ID = EnvironmentId.make("environment-local"); +function selectDescriptor( + id: string, + label: string, + options: ReadonlyArray<{ id: string; label: string; isDefault?: boolean }>, + promptInjectedValues?: ReadonlyArray, +) { + return { + id, + label, + type: "select" as const, + options: [...options], + ...(options.find((option) => option.isDefault)?.id + ? { currentValue: options.find((option) => option.isDefault)?.id } + : {}), + ...(promptInjectedValues && promptInjectedValues.length > 0 + ? { promptInjectedValues: [...promptInjectedValues] } + : {}), + }; +} + +function booleanDescriptor(id: string, label: string) { + return { + id, + label, + type: "boolean" as const, + }; +} + async function mountMenu(props?: { modelSelection?: ModelSelection; prompt?: string }) { const threadId = ThreadId.make("thread-compact-menu"); const threadRef = scopeThreadRef(LOCAL_ENVIRONMENT_ID, threadId); const threadKey = scopedThreadKey(threadRef); - const provider = props?.modelSelection?.provider ?? "claudeAgent"; - const model = props?.modelSelection?.model ?? DEFAULT_MODEL_BY_PROVIDER[provider]; + const provider = ProviderDriverKind.make("claudeAgent"); + const instanceId = ProviderInstanceId.make(props?.modelSelection?.instanceId ?? provider); + const model = + props?.modelSelection?.model ?? DEFAULT_MODEL_BY_PROVIDER[provider] ?? DEFAULT_MODEL; useComposerDraftStore.setState({ draftsByThreadKey: { @@ -33,13 +67,9 @@ async function mountMenu(props?: { modelSelection?: ModelSelection; prompt?: str persistedAttachments: [], terminalContexts: [], modelSelectionByProvider: { - [provider]: { - provider, - model, - ...(props?.modelSelection?.options ? { options: props.modelSelection.options } : {}), - }, + [instanceId]: createModelSelection(instanceId, model, props?.modelSelection?.options), }, - activeProvider: provider, + activeProvider: instanceId, runtimeMode: null, interactionMode: null, }, @@ -51,74 +81,58 @@ async function mountMenu(props?: { modelSelection?: ModelSelection; prompt?: str document.body.append(host); const onPromptChange = vi.fn(); const providerOptions = props?.modelSelection?.options; - const models = - provider === "claudeAgent" - ? [ - { - slug: "claude-opus-4-6", - name: "Claude Opus 4.6", - isCustom: false, - capabilities: { - reasoningEffortLevels: [ - { value: "low", label: "Low" }, - { value: "medium", label: "Medium" }, - { value: "high", label: "High", isDefault: true }, - { value: "max", label: "Max" }, - { value: "ultrathink", label: "Ultrathink" }, - ], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: ["ultrathink"], - }, - }, - { - slug: "claude-haiku-4-5", - name: "Claude Haiku 4.5", - isCustom: false, - capabilities: { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: true, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, - }, - { - slug: "claude-sonnet-4-6", - name: "Claude Sonnet 4.6", - isCustom: false, - capabilities: { - reasoningEffortLevels: [ - { value: "low", label: "Low" }, - { value: "medium", label: "Medium" }, - { value: "high", label: "High", isDefault: true }, - { value: "ultrathink", label: "Ultrathink" }, - ], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: ["ultrathink"], - }, - }, - ] - : [ - { - slug: "gpt-5.4", - name: "GPT-5.4", - isCustom: false, - capabilities: { - reasoningEffortLevels: [ - { value: "xhigh", label: "Extra High" }, - { value: "high", label: "High", isDefault: true }, - ], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, - }, - ]; + const models = [ + { + slug: "claude-opus-4-6", + name: "Claude Opus 4.6", + isCustom: false, + capabilities: createModelCapabilities({ + optionDescriptors: [ + selectDescriptor( + "effort", + "Reasoning", + [ + { id: "low", label: "Low" }, + { id: "medium", label: "Medium" }, + { id: "high", label: "High", isDefault: true }, + { id: "max", label: "Max" }, + { id: "ultrathink", label: "Ultrathink" }, + ], + ["ultrathink"], + ), + booleanDescriptor("fastMode", "Fast Mode"), + ], + }), + }, + { + slug: "claude-haiku-4-5", + name: "Claude Haiku 4.5", + isCustom: false, + capabilities: createModelCapabilities({ + optionDescriptors: [booleanDescriptor("thinking", "Thinking")], + }), + }, + { + slug: "claude-sonnet-4-6", + name: "Claude Sonnet 4.6", + isCustom: false, + capabilities: createModelCapabilities({ + optionDescriptors: [ + selectDescriptor( + "effort", + "Reasoning", + [ + { id: "low", label: "Low" }, + { id: "medium", label: "Medium" }, + { id: "high", label: "High", isDefault: true }, + { id: "ultrathink", label: "Ultrathink" }, + ], + ["ultrathink"], + ), + ], + }), + }, + ]; const screen = await render( { it("shows fast mode controls for Opus", async () => { await using _ = await mountMenu({ - modelSelection: { provider: "claudeAgent", model: "claude-opus-4-6" }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-opus-4-6", + ), }); await page.getByLabelText("More composer controls").click(); @@ -177,14 +194,17 @@ describe("CompactComposerControlsMenu", () => { await vi.waitFor(() => { const text = document.body.textContent ?? ""; expect(text).toContain("Fast Mode"); - expect(text).toContain("off"); - expect(text).toContain("on"); + expect(text).toContain("On"); + expect(text).toContain("Off"); }); }); it("hides fast mode controls for non-Opus Claude models", async () => { await using _ = await mountMenu({ - modelSelection: { provider: "claudeAgent", model: "claude-sonnet-4-6" }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-sonnet-4-6", + ), }); await page.getByLabelText("More composer controls").click(); @@ -196,7 +216,10 @@ describe("CompactComposerControlsMenu", () => { it("shows only the provided effort options", async () => { await using _ = await mountMenu({ - modelSelection: { provider: "claudeAgent", model: "claude-sonnet-4-6" }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-sonnet-4-6", + ), }); await page.getByLabelText("More composer controls").click(); @@ -213,11 +236,11 @@ describe("CompactComposerControlsMenu", () => { it("shows a Claude thinking on/off section for Haiku", async () => { await using _ = await mountMenu({ - modelSelection: { - provider: "claudeAgent", - model: "claude-haiku-4-5", - options: { thinking: true }, - }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-haiku-4-5", + [{ id: "thinking", value: true }], + ), }); await page.getByLabelText("More composer controls").click(); @@ -225,18 +248,18 @@ describe("CompactComposerControlsMenu", () => { await vi.waitFor(() => { const text = document.body.textContent ?? ""; expect(text).toContain("Thinking"); - expect(text).toContain("On (default)"); + expect(text).toContain("On"); expect(text).toContain("Off"); }); }); it("shows prompt-controlled Ultrathink state with selectable effort controls", async () => { await using _ = await mountMenu({ - modelSelection: { - provider: "claudeAgent", - model: "claude-opus-4-6", - options: { effort: "high" }, - }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-opus-4-6", + [{ id: "effort", value: "high" }], + ), prompt: "Ultrathink:\nInvestigate this", }); @@ -244,18 +267,18 @@ describe("CompactComposerControlsMenu", () => { await vi.waitFor(() => { const text = document.body.textContent ?? ""; - expect(text).toContain("Effort"); + expect(text).toContain("Reasoning"); expect(text).not.toContain("ultrathink"); }); }); it("warns when ultrathink appears in prompt body text", async () => { await using _ = await mountMenu({ - modelSelection: { - provider: "claudeAgent", - model: "claude-opus-4-6", - options: { effort: "high" }, - }, + modelSelection: createModelSelection( + ProviderInstanceId.make("claudeAgent"), + "claude-opus-4-6", + [{ id: "effort", value: "high" }], + ), prompt: "Ultrathink:\nplease ultrathink about this problem", }); @@ -263,7 +286,9 @@ describe("CompactComposerControlsMenu", () => { await vi.waitFor(() => { const text = document.body.textContent ?? ""; - expect(text).toContain("Remove Ultrathink from the prompt to change effort."); + expect(text).toContain( + 'Your prompt contains "ultrathink" in the text. Remove it to change this option.', + ); }); }); diff --git a/apps/web/src/components/chat/ComposerBannerStack.tsx b/apps/web/src/components/chat/ComposerBannerStack.tsx new file mode 100644 index 00000000000..9901237fdf0 --- /dev/null +++ b/apps/web/src/components/chat/ComposerBannerStack.tsx @@ -0,0 +1,195 @@ +import { useEffect, useRef, useState, type CSSProperties, type ReactNode } from "react"; +import { XIcon } from "lucide-react"; + +import { cn } from "~/lib/utils"; +import { Alert, AlertAction, AlertDescription, AlertTitle } from "../ui/alert"; +import { Button } from "../ui/button"; + +const DISMISS_TRANSITION_MS = 220; +const frontExitStyle = { + opacity: 0, + transform: "translate3d(0, 4rem, 0)", +} satisfies CSSProperties; +const stackedExitStyle = { + opacity: 0, + transform: "translate3d(0, 7rem, 0)", +} satisfies CSSProperties; +const restingStyle = { + opacity: 1, + transform: "translate3d(0, 0, 0)", +} satisfies CSSProperties; +const exitTransitionStyle = { + transition: `transform ${DISMISS_TRANSITION_MS}ms ease-in, opacity ${DISMISS_TRANSITION_MS}ms ease-in`, + willChange: "transform, opacity", +} satisfies CSSProperties; + +export interface ComposerBannerStackItem { + readonly id: string; + readonly variant: "error" | "info" | "success" | "warning"; + readonly icon: ReactNode; + readonly title: ReactNode; + readonly description?: ReactNode; + readonly actions?: ReactNode; + readonly dismissLabel?: string; + readonly onDismiss?: () => void; +} + +interface ComposerBannerStackProps { + readonly className?: string; + readonly items: ReadonlyArray; +} + +export function ComposerBannerStack({ className, items }: ComposerBannerStackProps) { + const [exitingItemId, setExitingItemId] = useState(null); + const dismissTimeoutRef = useRef | null>(null); + + useEffect(() => { + if (exitingItemId && !items.some((item) => item.id === exitingItemId)) { + setExitingItemId(null); + } + }, [exitingItemId, items]); + + useEffect(() => { + return () => { + if (dismissTimeoutRef.current) { + clearTimeout(dismissTimeoutRef.current); + } + }; + }, []); + + if (items.length === 0) { + return null; + } + + const frontItem = items[0]; + if (!frontItem) { + return null; + } + const stackedItems = items.slice(1); + const hasStack = stackedItems.length > 0; + const showCollapsedStackCap = hasStack && exitingItemId !== frontItem.id; + + const requestDismiss = (item: ComposerBannerStackItem) => { + if (!item.onDismiss || exitingItemId) { + return; + } + setExitingItemId(item.id); + if (dismissTimeoutRef.current) { + clearTimeout(dismissTimeoutRef.current); + } + dismissTimeoutRef.current = setTimeout(() => { + dismissTimeoutRef.current = null; + item.onDismiss?.(); + }, DISMISS_TRANSITION_MS); + }; + + return ( +
+
+ {showCollapsedStackCap ? ( + +
+ ); +} + +function ComposerBannerStackAlert({ + item, + exiting, + onDismissRequest, +}: { + readonly item: ComposerBannerStackItem; + readonly exiting: boolean; + readonly onDismissRequest: () => void; +}) { + const dismissOnly = item.onDismiss && !item.actions; + + return ( + + {item.icon} + {item.title} + {item.description ? {item.description} : null} + {item.actions || item.onDismiss ? ( + + {item.actions} + {item.onDismiss ? ( + + ) : null} + + ) : null} + + ); +} diff --git a/apps/web/src/components/chat/ComposerCommandMenu.tsx b/apps/web/src/components/chat/ComposerCommandMenu.tsx index 5d13e6593b4..f687ec7ba23 100644 --- a/apps/web/src/components/chat/ComposerCommandMenu.tsx +++ b/apps/web/src/components/chat/ComposerCommandMenu.tsx @@ -1,6 +1,6 @@ import { type ProjectEntry, - type ProviderKind, + type ProviderDriverKind, type ServerProviderSkill, type ServerProviderSlashCommand, } from "@t3tools/contracts"; @@ -39,7 +39,7 @@ export type ComposerCommandItem = | { id: string; type: "provider-slash-command"; - provider: ProviderKind; + provider: ProviderDriverKind; command: ServerProviderSlashCommand; label: string; description: string; @@ -47,7 +47,7 @@ export type ComposerCommandItem = | { id: string; type: "skill"; - provider: ProviderKind; + provider: ProviderDriverKind; skill: ServerProviderSkill; label: string; description: string; diff --git a/apps/web/src/components/chat/ComposerPrimaryActions.tsx b/apps/web/src/components/chat/ComposerPrimaryActions.tsx index 2bc40fe6ae8..fbeb9de30b8 100644 --- a/apps/web/src/components/chat/ComposerPrimaryActions.tsx +++ b/apps/web/src/components/chat/ComposerPrimaryActions.tsx @@ -1,4 +1,4 @@ -import { memo } from "react"; +import { memo, type PointerEventHandler } from "react"; import { ChevronDownIcon, ChevronLeftIcon } from "lucide-react"; import { cn } from "~/lib/utils"; import { Button } from "../ui/button"; @@ -20,8 +20,10 @@ interface ComposerPrimaryActionsProps { promptHasText: boolean; isSendBusy: boolean; isConnecting: boolean; + isEnvironmentUnavailable: boolean; isPreparingWorktree: boolean; hasSendableContent: boolean; + preserveComposerFocusOnPointerDown?: boolean; onPreviousPendingQuestion: () => void; onInterrupt: () => void; onImplementPlanInNewThread: () => void; @@ -45,6 +47,10 @@ export const formatPendingPrimaryActionLabel = (input: { return input.questionIndex > 0 ? "Submit answers" : "Submit answer"; }; +const preventPointerFocus: PointerEventHandler = (event) => { + event.preventDefault(); +}; + export const ComposerPrimaryActions = memo(function ComposerPrimaryActions({ compact, pendingAction, @@ -53,12 +59,18 @@ export const ComposerPrimaryActions = memo(function ComposerPrimaryActions({ promptHasText, isSendBusy, isConnecting, + isEnvironmentUnavailable, isPreparingWorktree, hasSendableContent, + preserveComposerFocusOnPointerDown = false, onPreviousPendingQuestion, onInterrupt, onImplementPlanInNewThread, }: ComposerPrimaryActionsProps) { + const pointerFocusProps = preserveComposerFocusOnPointerDown + ? { onPointerDown: preventPointerFocus } + : undefined; + if (pendingAction) { return (
@@ -68,6 +80,7 @@ export const ComposerPrimaryActions = memo(function ComposerPrimaryActions({ size="icon-sm" variant="outline" className="rounded-full" + {...pointerFocusProps} onClick={onPreviousPendingQuestion} disabled={pendingAction.isResponding} aria-label="Previous question" @@ -79,6 +92,7 @@ export const ComposerPrimaryActions = memo(function ComposerPrimaryActions({ size="sm" variant="outline" className="rounded-full" + {...pointerFocusProps} onClick={onPreviousPendingQuestion} disabled={pendingAction.isResponding} > @@ -90,7 +104,9 @@ export const ComposerPrimaryActions = memo(function ComposerPrimaryActions({ type="submit" size="sm" className={cn("rounded-full", compact ? "px-3" : "px-4")} + {...pointerFocusProps} disabled={ + isEnvironmentUnavailable || pendingAction.isResponding || (pendingAction.isLastQuestion ? !pendingAction.isComplete : !pendingAction.canAdvance) } @@ -111,6 +127,7 @@ export const ComposerPrimaryActions = memo(function ComposerPrimaryActions({ @@ -141,7 +159,8 @@ export const ComposerPrimaryActions = memo(function ComposerPrimaryActions({ type="submit" size="sm" className="h-9 rounded-l-full rounded-r-none px-4 sm:h-8" - disabled={isSendBusy || isConnecting} + {...pointerFocusProps} + disabled={isSendBusy || isConnecting || isEnvironmentUnavailable} > {isConnecting || isSendBusy ? "Sending..." : "Implement"} @@ -153,7 +172,8 @@ export const ComposerPrimaryActions = memo(function ComposerPrimaryActions({ variant="default" className="h-9 rounded-l-none rounded-r-full border-l-white/12 px-2 sm:h-8" aria-label="Implementation actions" - disabled={isSendBusy || isConnecting} + {...pointerFocusProps} + disabled={isSendBusy || isConnecting || isEnvironmentUnavailable} /> } > @@ -161,7 +181,7 @@ export const ComposerPrimaryActions = memo(function ComposerPrimaryActions({ void onImplementPlanInNewThread()} > Implement in a new thread @@ -176,15 +196,18 @@ export const ComposerPrimaryActions = memo(function ComposerPrimaryActions({ + } + /> + - - - } - /> - - Favorites - - -
-
+ Favorites +
+ +
+
+ ) : null} - {/* Provider buttons */} - {AVAILABLE_PROVIDER_OPTIONS.map((option) => { - const OptionIcon = PROVIDER_ICON_BY_PROVIDER[option.value]; - const liveProvider = props.providers - ? getProviderSnapshot(props.providers, option.value) - : undefined; + {/* Instance buttons (one per configured instance — built-in + custom) */} + {props.instanceEntries.map((entry) => { + const isDisabled = !entry.isAvailable || entry.status !== "ready"; + const isSelected = props.selectedInstanceId === entry.instanceId; + const showNewBadge = props.newBadgeInstanceIds?.has(entry.instanceId) ?? false; + const showInstanceBadge = + Boolean(entry.accentColor) || (duplicateDriverCounts.get(entry.driverKind) ?? 0) > 1; - const isDisabled = !liveProvider || liveProvider.status !== "ready"; - const isSelected = props.selectedProvider === option.value; - const badge = option.pickerSidebarBadge; + const tooltip = isDisabled + ? describeUnavailableInstance(entry) + : showNewBadge + ? `${entry.displayName} — New` + : entry.displayName; - const providerTooltip = isDisabled - ? describeUnavailableProvider(option.label, liveProvider) - : badge === "new" - ? `${option.label} — New` - : option.label; + const button = ( + + ); - const button = ( - - ); + const trigger = isDisabled ? ( + {button} + ) : ( + button + ); - const trigger = isDisabled ? ( - {button} - ) : ( - button - ); + return ( +
+ {isSelected &&
} + + + + {tooltip} + + +
+ ); + })} - return ( -
- {isSelected &&
} + {showComingSoon ? ( + <> + {/* Gemini button (coming soon) */} - + + + + } + /> - {providerTooltip} + Gemini — Coming soon -
- ); - })} -
+ {/* Github Copilot button (coming soon) */} + + + + + } + /> + + Github Copilot — Coming soon + + + + ) : null} +
+ ); }); diff --git a/apps/web/src/components/chat/ProviderInstanceIcon.tsx b/apps/web/src/components/chat/ProviderInstanceIcon.tsx new file mode 100644 index 00000000000..154cada19aa --- /dev/null +++ b/apps/web/src/components/chat/ProviderInstanceIcon.tsx @@ -0,0 +1,73 @@ +import { type CSSProperties, memo } from "react"; +import { type ProviderDriverKind } from "@t3tools/contracts"; + +import { PROVIDER_ICON_BY_PROVIDER } from "./providerIconUtils"; +import { cn } from "~/lib/utils"; + +export function providerInstanceInitials(label: string): string { + const words = label.replace(/[_-]+/g, " ").split(/\s+/u).filter(Boolean); + if (words.length === 0) return ""; + if (words.length === 1) return words[0]!.slice(0, 2).toUpperCase(); + return words + .slice(0, 2) + .map((word) => word[0]?.toUpperCase() ?? "") + .join(""); +} + +export const ProviderInstanceIcon = memo(function ProviderInstanceIcon(props: { + driverKind: ProviderDriverKind; + displayName: string; + accentColor?: string | undefined; + showBadge?: boolean; + className?: string; + iconClassName?: string; + badgeClassName?: string; + statusDotClassName?: string; +}) { + const Icon = PROVIDER_ICON_BY_PROVIDER[props.driverKind] ?? null; + const accentStyle = props.accentColor + ? ({ "--provider-accent": props.accentColor } as CSSProperties) + : undefined; + + return ( + + {Icon ? ( + + ) : ( + + {providerInstanceInitials(props.displayName)} + + )} + {props.statusDotClassName ? ( + + ) : null} + {props.showBadge ? ( + + {providerInstanceInitials(props.displayName)} + + ) : null} + + ); +}); diff --git a/apps/web/src/components/chat/ProviderModelPicker.browser.tsx b/apps/web/src/components/chat/ProviderModelPicker.browser.tsx index c827febf042..d3b168876a6 100644 --- a/apps/web/src/components/chat/ProviderModelPicker.browser.tsx +++ b/apps/web/src/components/chat/ProviderModelPicker.browser.tsx @@ -1,12 +1,22 @@ -import { type ProviderKind, type ServerProvider } from "@t3tools/contracts"; +import { ProviderDriverKind, ProviderInstanceId, type ServerProvider } from "@t3tools/contracts"; import { EnvironmentId } from "@t3tools/contracts"; +import { createModelCapabilities } from "@t3tools/shared/model"; import { page, userEvent } from "vitest/browser"; import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; import { render } from "vitest-browser-react"; import { ProviderModelPicker } from "./ProviderModelPicker"; -import { getCustomModelOptionsByProvider } from "../../modelSelection"; -import { DEFAULT_CLIENT_SETTINGS, DEFAULT_UNIFIED_SETTINGS } from "@t3tools/contracts/settings"; +import { getCustomModelOptionsByInstance } from "../../modelSelection"; +import { + deriveProviderInstanceEntries, + sortProviderInstanceEntries, +} from "../../providerInstances"; +import type { ModelEsque } from "./providerIconUtils"; +import { + DEFAULT_CLIENT_SETTINGS, + DEFAULT_UNIFIED_SETTINGS, + type UnifiedSettings, +} from "@t3tools/contracts/settings"; import { __resetLocalApiForTests } from "../../localApi"; // Mock the environments/runtime module to provide a mock primary environment connection @@ -66,17 +76,35 @@ vi.mock("../../environments/runtime", () => { }; }); -function effort(value: string, isDefault = false) { +function selectDescriptor( + id: string, + label: string, + options: ReadonlyArray<{ id: string; label: string; isDefault?: boolean }>, +) { return { - value, - label: value, - ...(isDefault ? { isDefault: true } : {}), + id, + label, + type: "select" as const, + options: [...options], + ...(options.find((option) => option.isDefault)?.id + ? { currentValue: options.find((option) => option.isDefault)?.id } + : {}), + }; +} + +function booleanDescriptor(id: string, label: string) { + return { + id, + label, + type: "boolean" as const, }; } const TEST_PROVIDERS: ReadonlyArray = [ { - provider: "codex", + driver: ProviderDriverKind.make("codex"), + instanceId: ProviderInstanceId.make("codex"), + displayName: "Codex", enabled: true, installed: true, version: "0.116.0", @@ -90,30 +118,38 @@ const TEST_PROVIDERS: ReadonlyArray = [ slug: "gpt-5-codex", name: "GPT-5 Codex", isCustom: false, - capabilities: { - reasoningEffortLevels: [effort("low"), effort("medium", true), effort("high")], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("reasoningEffort", "Reasoning", [ + { id: "low", label: "low" }, + { id: "medium", label: "medium", isDefault: true }, + { id: "high", label: "high" }, + ]), + booleanDescriptor("fastMode", "Fast Mode"), + ], + }), }, { slug: "gpt-5.3-codex", name: "GPT-5.3 Codex", isCustom: false, - capabilities: { - reasoningEffortLevels: [effort("low"), effort("medium", true), effort("high")], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("reasoningEffort", "Reasoning", [ + { id: "low", label: "low" }, + { id: "medium", label: "medium", isDefault: true }, + { id: "high", label: "high" }, + ]), + booleanDescriptor("fastMode", "Fast Mode"), + ], + }), }, ], }, { - provider: "claudeAgent", + driver: ProviderDriverKind.make("claudeAgent"), + instanceId: ProviderInstanceId.make("claudeAgent"), + displayName: "Claude", enabled: true, installed: true, version: "1.0.0", @@ -127,55 +163,62 @@ const TEST_PROVIDERS: ReadonlyArray = [ slug: "claude-opus-4-6", name: "Claude Opus 4.6", isCustom: false, - capabilities: { - reasoningEffortLevels: [ - effort("low"), - effort("medium", true), - effort("high"), - effort("max"), + capabilities: createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("effort", "Reasoning", [ + { id: "low", label: "low" }, + { id: "medium", label: "medium", isDefault: true }, + { id: "high", label: "high" }, + { id: "max", label: "max" }, + ]), + booleanDescriptor("thinking", "Thinking"), ], - supportsFastMode: false, - supportsThinkingToggle: true, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + }), }, { slug: "claude-sonnet-4-6", name: "Claude Sonnet 4.6", isCustom: false, - capabilities: { - reasoningEffortLevels: [ - effort("low"), - effort("medium", true), - effort("high"), - effort("max"), + capabilities: createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("effort", "Reasoning", [ + { id: "low", label: "low" }, + { id: "medium", label: "medium", isDefault: true }, + { id: "high", label: "high" }, + { id: "max", label: "max" }, + ]), + booleanDescriptor("thinking", "Thinking"), ], - supportsFastMode: false, - supportsThinkingToggle: true, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + }), }, { slug: "claude-haiku-4-5", name: "Claude Haiku 4.5", isCustom: false, - capabilities: { - reasoningEffortLevels: [effort("low"), effort("medium", true), effort("high")], - supportsFastMode: false, - supportsThinkingToggle: true, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("effort", "Reasoning", [ + { id: "low", label: "low" }, + { id: "medium", label: "medium", isDefault: true }, + { id: "high", label: "high" }, + ]), + booleanDescriptor("thinking", "Thinking"), + ], + }), }, ], }, ]; +const CODEX_INSTANCE_ID = ProviderInstanceId.make("codex"); +const CLAUDE_INSTANCE_ID = ProviderInstanceId.make("claudeAgent"); +const OPENCODE_INSTANCE_ID = ProviderInstanceId.make("opencode"); + function buildCodexProvider(models: ServerProvider["models"]): ServerProvider { return { - provider: "codex", + driver: ProviderDriverKind.make("codex"), + instanceId: ProviderInstanceId.make("codex"), + displayName: "Codex", enabled: true, installed: true, version: "0.116.0", @@ -190,7 +233,8 @@ function buildCodexProvider(models: ServerProvider["models"]): ServerProvider { function buildOpenCodeProvider(models: ServerProvider["models"]): ServerProvider { return { - provider: "opencode", + driver: ProviderDriverKind.make("opencode"), + instanceId: ProviderInstanceId.make("opencode"), enabled: true, installed: true, version: "1.0.0", @@ -204,37 +248,47 @@ function buildOpenCodeProvider(models: ServerProvider["models"]): ServerProvider } async function mountPicker(props: { - provider: ProviderKind; + activeInstanceId?: ProviderInstanceId; model: string; - lockedProvider: ProviderKind | null; + lockedProvider: ProviderDriverKind | null; + lockedContinuationGroupKey?: string | null; providers?: ReadonlyArray; + settings?: UnifiedSettings; triggerVariant?: "ghost" | "outline"; }) { const host = document.createElement("div"); document.body.append(host); - const onProviderModelChange = vi.fn(); + const onInstanceModelChange = vi.fn(); const providers = props.providers ?? TEST_PROVIDERS; - const modelOptionsByProvider = getCustomModelOptionsByProvider( - DEFAULT_UNIFIED_SETTINGS, + const instanceEntries = sortProviderInstanceEntries(deriveProviderInstanceEntries(providers)); + const activeInstanceId = props.activeInstanceId ?? CODEX_INSTANCE_ID; + const modelOptionsByInstance = getCustomModelOptionsByInstance( + props.settings ?? DEFAULT_UNIFIED_SETTINGS, providers, - props.provider, + activeInstanceId, props.model, ); const screen = await render( , { container: host }, ); return { - onProviderModelChange, + onInstanceModelChange, + // Back-compat alias used by callers that still assert on the old callback + // name. Delegates to the instance-aware mock so existing expectations work. + get onProviderModelChange() { + return onInstanceModelChange; + }, cleanup: async () => { await screen.unmount(); host.remove(); @@ -277,7 +331,7 @@ describe("ProviderModelPicker", () => { it("shows provider sidebar in unlocked mode", async () => { const mounted = await mountPicker({ - provider: "claudeAgent", + activeInstanceId: CLAUDE_INSTANCE_ID, model: "claude-opus-4-6", lockedProvider: null, }); @@ -298,7 +352,7 @@ describe("ProviderModelPicker", () => { it("shows favorites first in the provider sidebar", async () => { const mounted = await mountPicker({ - provider: "claudeAgent", + activeInstanceId: CLAUDE_INSTANCE_ID, model: "claude-opus-4-6", lockedProvider: null, }); @@ -307,11 +361,9 @@ describe("ProviderModelPicker", () => { await page.getByRole("button").click(); await vi.waitFor(() => { - // Fork: sidebar order includes copilot between codex and claudeAgent. - expect(getSidebarProviderOrder().slice(0, 4)).toEqual([ + expect(getSidebarProviderOrder().slice(0, 3)).toEqual([ "favorites", "codex", - "copilot", "claudeAgent", ]); }); @@ -322,7 +374,7 @@ describe("ProviderModelPicker", () => { it("filters models by selected provider in sidebar", async () => { const mounted = await mountPicker({ - provider: "claudeAgent", + activeInstanceId: CLAUDE_INSTANCE_ID, model: "claude-opus-4-6", lockedProvider: null, }); @@ -333,7 +385,7 @@ describe("ProviderModelPicker", () => { // Start with Claude models visible await vi.waitFor(() => { const text = document.body.textContent ?? ""; - expect(text).not.toContain("GPT-5.3 Codex"); + expect(text).not.toContain("GPT-5 Codex"); expect(text).toContain("Claude Opus 4.6"); }); @@ -346,7 +398,7 @@ describe("ProviderModelPicker", () => { // Now should only show Codex models await vi.waitFor(() => { const listText = getModelPickerListText(); - expect(listText).toContain("GPT-5.3 Codex"); + expect(listText).toContain("GPT-5 Codex"); expect(listText).not.toContain("Claude Opus 4.6"); }); } finally { @@ -354,9 +406,37 @@ describe("ProviderModelPicker", () => { } }); + it("uses client model visibility and ordering preferences", async () => { + const mounted = await mountPicker({ + activeInstanceId: CLAUDE_INSTANCE_ID, + model: "claude-opus-4-6", + lockedProvider: null, + settings: { + ...DEFAULT_UNIFIED_SETTINGS, + providerModelPreferences: { + [CLAUDE_INSTANCE_ID]: { + hiddenModels: ["claude-opus-4-6"], + modelOrder: ["claude-haiku-4-5", "claude-sonnet-4-6"], + }, + }, + }, + }); + + try { + await page.getByRole("button").click(); + + await vi.waitFor(() => { + expect(getVisibleModelNames()).toEqual(["Claude Haiku 4.5", "Claude Sonnet 4.6"]); + expect(getModelPickerListText()).not.toContain("Claude Opus 4.6"); + }); + } finally { + await mounted.cleanup(); + } + }); + it("focuses the search input after selecting a sidebar provider", async () => { const mounted = await mountPicker({ - provider: "claudeAgent", + activeInstanceId: CLAUDE_INSTANCE_ID, model: "claude-opus-4-6", lockedProvider: null, }); @@ -394,9 +474,9 @@ describe("ProviderModelPicker", () => { ); const mounted = await mountPicker({ - provider: "claudeAgent", + activeInstanceId: CLAUDE_INSTANCE_ID, model: "claude-opus-4-6", - lockedProvider: "claudeAgent", + lockedProvider: ProviderDriverKind.make("claudeAgent"), }); try { @@ -406,14 +486,9 @@ describe("ProviderModelPicker", () => { const text = document.body.textContent ?? ""; // Should show locked provider label expect(text).toContain("Claude"); - // Fork: production claudeAgent model list now includes Opus 4.7 and 4.5 - // in addition to Opus 4.6, Sonnet 4.6 and Haiku 4.5. Favorite (Sonnet) - // surfaces first, then remaining models in production order. expect(getVisibleModelNames()).toEqual([ "Claude Sonnet 4.6", - "Claude Opus 4.7", "Claude Opus 4.6", - "Claude Opus 4.5", "Claude Haiku 4.5", ]); }); @@ -423,31 +498,115 @@ describe("ProviderModelPicker", () => { } }); + it("keeps an instance sidebar in locked mode when that provider has multiple instances", async () => { + const defaultCodexModels: ServerProvider["models"] = [ + { + slug: "gpt-work", + name: "GPT Work", + isCustom: false, + capabilities: createModelCapabilities({ optionDescriptors: [] }), + }, + ]; + const personalCodexModels: ServerProvider["models"] = [ + { + slug: "gpt-personal", + name: "GPT Personal", + isCustom: false, + capabilities: createModelCapabilities({ optionDescriptors: [] }), + }, + ]; + const isolatedCodexModels: ServerProvider["models"] = [ + { + slug: "gpt-isolated", + name: "GPT Isolated", + isCustom: false, + capabilities: createModelCapabilities({ optionDescriptors: [] }), + }, + ]; + const providers: ReadonlyArray = [ + { + ...buildCodexProvider(defaultCodexModels), + instanceId: "codex" as ProviderInstanceId, + displayName: "Codex Work", + accentColor: "#2563eb", + continuation: { groupKey: "codex:home:/Users/julius/.codex" }, + }, + { + ...buildCodexProvider(personalCodexModels), + instanceId: "codex_personal" as ProviderInstanceId, + displayName: "Codex Personal", + accentColor: "#dc2626", + continuation: { groupKey: "codex:home:/Users/julius/.codex" }, + }, + { + ...buildCodexProvider(isolatedCodexModels), + instanceId: "codex_isolated" as ProviderInstanceId, + displayName: "Codex Isolated", + accentColor: "#16a34a", + continuation: { groupKey: "codex:home:/Users/julius/.codex_isolated" }, + }, + TEST_PROVIDERS[1]!, + ]; + const mounted = await mountPicker({ + activeInstanceId: "codex" as ProviderInstanceId, + model: "gpt-work", + lockedProvider: ProviderDriverKind.make("codex"), + lockedContinuationGroupKey: "codex:home:/Users/julius/.codex", + providers, + }); + + try { + await page.getByRole("button").click(); + + await vi.waitFor(() => { + expect(getSidebarProviderOrder()).toEqual(["codex", "codex_personal"]); + expect(getModelPickerListText()).not.toContain("Codex Isolated"); + expect( + document.querySelector('[data-model-picker-provider="codex_personal"]') + ?.dataset.providerAccentColor, + ).toBe("#dc2626"); + expect(getModelPickerListText()).toContain("Codex Work"); + expect(getVisibleModelNames()).toEqual(["GPT Work"]); + }); + + await page.getByRole("button", { name: "Codex Personal" }).click(); + + await vi.waitFor(() => { + expect(getModelPickerListText()).toContain("Codex Personal"); + expect(getVisibleModelNames()).toEqual(["GPT Personal"]); + }); + } finally { + await mounted.cleanup(); + } + }); + it("falls back to the active provider's first model when props.model belongs to another provider (#1982)", async () => { const host = document.createElement("div"); document.body.append(host); - const onProviderModelChange = vi.fn(); - const modelOptionsByProvider = { - claudeAgent: [ - { slug: "claude-opus-4-6", name: "Claude Opus 4.6" }, - { slug: "claude-sonnet-4-6", name: "Claude Sonnet 4.6" }, + const onInstanceModelChange = vi.fn(); + const modelOptionsByInstance = new Map>([ + [ + "claudeAgent" as ProviderInstanceId, + [ + { slug: "claude-opus-4-6", name: "Claude Opus 4.6" }, + { slug: "claude-sonnet-4-6", name: "Claude Sonnet 4.6" }, + ], ], - codex: [{ slug: "gpt-5-codex", name: "GPT-5 Codex" }], - cursor: [], - opencode: [], - copilot: [], - geminiCli: [], - amp: [], - kilo: [], - } as const; + ["codex" as ProviderInstanceId, [{ slug: "gpt-5-codex", name: "GPT-5 Codex" }]], + ["cursor" as ProviderInstanceId, []], + ["opencode" as ProviderInstanceId, []], + ]); + const instanceEntries = sortProviderInstanceEntries( + deriveProviderInstanceEntries(TEST_PROVIDERS), + ); const screen = await render( , { container: host }, ); @@ -466,13 +625,7 @@ describe("ProviderModelPicker", () => { } }); - // Fork: getCustomModelOptionsByProvider reads from the static MODEL_OPTIONS_BY_PROVIDER - // list (keyed on settings.providers[x].customModels) and does not merge live provider - // `models` — so subProvider / shortName metadata supplied via server-reported model - // entries never reaches the picker's trigger. Testing that enrichment path would - // require changing production wiring; see ChatComposer.modelOptionsByProvider which - // uses providerStatuses.models directly in the real app. - it.skip("uses the trigger label for locked opencode rows", async () => { + it("uses the trigger label for locked opencode rows", async () => { const providers: ReadonlyArray = [ buildOpenCodeProvider([ { @@ -481,20 +634,22 @@ describe("ProviderModelPicker", () => { subProvider: "GitHub Copilot", shortName: "Opus 4.5", isCustom: false, - capabilities: { - reasoningEffortLevels: [effort("low"), effort("medium", true), effort("high")], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("reasoningEffort", "Reasoning", [ + { id: "low", label: "low" }, + { id: "medium", label: "medium", isDefault: true }, + { id: "high", label: "high" }, + ]), + ], + }), }, ]), ]; const mounted = await mountPicker({ - provider: "opencode", + activeInstanceId: OPENCODE_INSTANCE_ID, model: "github-copilot/claude-opus-4.5", - lockedProvider: "opencode", + lockedProvider: ProviderDriverKind.make("opencode"), providers, }); @@ -519,7 +674,7 @@ describe("ProviderModelPicker", () => { it("searches models by name in flat list", async () => { const mounted = await mountPicker({ - provider: "claudeAgent", + activeInstanceId: CLAUDE_INSTANCE_ID, model: "claude-opus-4-6", lockedProvider: null, }); @@ -549,9 +704,9 @@ describe("ProviderModelPicker", () => { it("supports arrow-key navigation in the model picker", async () => { const mounted = await mountPicker({ - provider: "claudeAgent", + activeInstanceId: CLAUDE_INSTANCE_ID, model: "claude-opus-4-6", - lockedProvider: "claudeAgent", + lockedProvider: ProviderDriverKind.make("claudeAgent"), }); try { @@ -560,13 +715,12 @@ describe("ProviderModelPicker", () => { const searchInput = page.getByPlaceholder("Search models..."); await userEvent.click(searchInput); await userEvent.keyboard("{ArrowDown}"); - // Fork: production claudeAgent list now starts with Claude Opus 4.7. await vi.waitFor(() => { const highlightedItem = document.querySelector( '[data-slot="combobox-item"][data-highlighted]', ); expect(highlightedItem).not.toBeNull(); - expect(highlightedItem?.textContent).toContain("Claude Opus 4.7"); + expect(highlightedItem?.textContent).toContain("Claude Opus 4.6"); }); await userEvent.keyboard("{ArrowDown}"); await vi.waitFor(() => { @@ -574,11 +728,14 @@ describe("ProviderModelPicker", () => { '[data-slot="combobox-item"][data-highlighted]', ); expect(highlightedItem).not.toBeNull(); - expect(highlightedItem?.textContent).toContain("Claude Opus 4.6"); + expect(highlightedItem?.textContent).toContain("Claude Sonnet 4.6"); }); await userEvent.keyboard("{Enter}"); - expect(mounted.onProviderModelChange).toHaveBeenCalledWith("claudeAgent", "claude-opus-4-6"); + expect(mounted.onProviderModelChange).toHaveBeenCalledWith( + "claudeAgent", + "claude-sonnet-4-6", + ); } finally { await mounted.cleanup(); } @@ -586,7 +743,7 @@ describe("ProviderModelPicker", () => { it("hides the provider sidebar while searching", async () => { const mounted = await mountPicker({ - provider: "claudeAgent", + activeInstanceId: CLAUDE_INSTANCE_ID, model: "claude-opus-4-6", lockedProvider: null, }); @@ -610,7 +767,7 @@ describe("ProviderModelPicker", () => { it("closes the picker when escape is pressed in search", async () => { const mounted = await mountPicker({ - provider: "claudeAgent", + activeInstanceId: CLAUDE_INSTANCE_ID, model: "claude-opus-4-6", lockedProvider: null, }); @@ -638,7 +795,7 @@ describe("ProviderModelPicker", () => { it("searches models by provider name", async () => { const mounted = await mountPicker({ - provider: "claudeAgent", + activeInstanceId: CLAUDE_INSTANCE_ID, model: "claude-opus-4-6", lockedProvider: null, }); @@ -649,7 +806,7 @@ describe("ProviderModelPicker", () => { await vi.waitFor(() => { const text = document.body.textContent ?? ""; expect(text).toContain("Claude Opus 4.6"); - expect(text).not.toContain("GPT-5.3 Codex"); + expect(text).not.toContain("GPT-5 Codex"); }); // Search by provider name @@ -658,7 +815,7 @@ describe("ProviderModelPicker", () => { await vi.waitFor(() => { const listText = getModelPickerListText(); - expect(listText).toContain("GPT-5.3 Codex"); + expect(listText).toContain("GPT-5 Codex"); expect(listText).not.toContain("Claude Opus 4.6"); }); } finally { @@ -667,56 +824,57 @@ describe("ProviderModelPicker", () => { }); it("matches fuzzy multi-token queries across provider and model text", async () => { - // Fork: getCustomModelOptionsByProvider reads from the static - // MODEL_OPTIONS_BY_PROVIDER list and does not merge server-reported model - // names, so we assert against the static opencode option "Anthropic / - // Claude Opus 4.7" using a query that fuzzily matches across the combined - // provider/model search fields. const providers: ReadonlyArray = [ buildCodexProvider([ { - slug: "gpt-5.3-codex", - name: "GPT-5.3 Codex", + slug: "gpt-5-codex", + name: "GPT-5 Codex", isCustom: false, - capabilities: { - reasoningEffortLevels: [effort("low"), effort("medium", true), effort("high")], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("reasoningEffort", "Reasoning", [ + { id: "low", label: "low" }, + { id: "medium", label: "medium", isDefault: true }, + { id: "high", label: "high" }, + ]), + booleanDescriptor("fastMode", "Fast Mode"), + ], + }), }, ]), buildOpenCodeProvider([ { - slug: "anthropic/claude-opus-4-7", - name: "Anthropic / Claude Opus 4.7", + slug: "github-copilot/claude-opus-4.7", + name: "Claude Opus 4.7", + subProvider: "GitHub Copilot", isCustom: false, - capabilities: { - reasoningEffortLevels: [effort("low"), effort("medium", true), effort("high")], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("reasoningEffort", "Reasoning", [ + { id: "low", label: "low" }, + { id: "medium", label: "medium", isDefault: true }, + { id: "high", label: "high" }, + ]), + ], + }), }, ]), ]; const mounted = await mountPicker({ - provider: "opencode", - model: "anthropic/claude-opus-4-7", + activeInstanceId: OPENCODE_INSTANCE_ID, + model: "github-copilot/claude-opus-4.7", lockedProvider: null, providers, }); try { await page.getByRole("button").click(); - await page.getByPlaceholder("Search models...").fill("opcd anth op"); + await page.getByPlaceholder("Search models...").fill("coplt op"); await vi.waitFor(() => { const listText = getModelPickerListText(); - expect(listText).toContain("Anthropic / Claude Opus 4.7"); - expect(listText).not.toContain("GPT-5.3 Codex"); + expect(listText).toContain("Claude Opus 4.7"); + expect(listText).not.toContain("GPT-5 Codex"); }); } finally { await mounted.cleanup(); @@ -724,24 +882,22 @@ describe("ProviderModelPicker", () => { }); it("renders each search result with its own provider branding", async () => { - // Fork: getCustomModelOptionsByProvider reads static MODEL_OPTIONS_BY_PROVIDER - // and does not merge subProvider metadata from server-reported models. - // Instead of asserting the "OpenCode · GitHub Copilot" combined label, we - // assert each matching row carries its own provider label alongside its - // model name (Claude for claudeAgent rows, OpenCode for opencode rows). const providers: ReadonlyArray = [ buildOpenCodeProvider([ { - slug: "anthropic/claude-opus-4-7", - name: "Anthropic / Claude Opus 4.7", + slug: "github-copilot/claude-opus-4.7", + name: "Claude Opus 4.7", + subProvider: "GitHub Copilot", isCustom: false, - capabilities: { - reasoningEffortLevels: [effort("low"), effort("medium", true), effort("high")], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("reasoningEffort", "Reasoning", [ + { id: "low", label: "low" }, + { id: "medium", label: "medium", isDefault: true }, + { id: "high", label: "high" }, + ]), + ], + }), }, ]), { @@ -751,25 +907,24 @@ describe("ProviderModelPicker", () => { slug: "claude-opus-4-6", name: "Claude Opus 4.6", isCustom: false, - capabilities: { - reasoningEffortLevels: [ - effort("low"), - effort("medium", true), - effort("high"), - effort("max"), + capabilities: createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("effort", "Reasoning", [ + { id: "low", label: "low" }, + { id: "medium", label: "medium", isDefault: true }, + { id: "high", label: "high" }, + { id: "max", label: "max" }, + ]), + booleanDescriptor("thinking", "Thinking"), ], - supportsFastMode: false, - supportsThinkingToggle: true, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + }), }, ], }, ]; const mounted = await mountPicker({ - provider: "opencode", - model: "anthropic/claude-opus-4-7", + activeInstanceId: OPENCODE_INSTANCE_ID, + model: "github-copilot/claude-opus-4.7", lockedProvider: null, providers, }); @@ -780,14 +935,8 @@ describe("ProviderModelPicker", () => { await vi.waitFor(() => { const listText = getModelPickerListText(); - // Both the claudeAgent and opencode results show in search. - expect(listText).toContain("Claude Opus 4.6"); - expect(listText).toContain("Anthropic / Claude Opus 4.7"); - // Each row carries its own provider label rather than sharing one. - expect(listText).toContain("OpenCode"); + expect(listText).toContain("OpenCode · GitHub Copilot"); expect(listText).toContain("Claude"); - // Anti-assertion: rows should not be conflated — "OpenCode" should - // never appear directly abutting the claudeAgent row's model name. expect(listText).not.toContain("OpenCodeClaude Opus 4.6"); }); } finally { @@ -799,7 +948,7 @@ describe("ProviderModelPicker", () => { localStorage.removeItem("t3code:client-settings:v1"); const mounted = await mountPicker({ - provider: "claudeAgent", + activeInstanceId: CLAUDE_INSTANCE_ID, model: "claude-opus-4-6", lockedProvider: null, }); @@ -844,7 +993,7 @@ describe("ProviderModelPicker", () => { localStorage.removeItem("t3code:client-settings:v1"); const mounted = await mountPicker({ - provider: "claudeAgent", + activeInstanceId: CLAUDE_INSTANCE_ID, model: "claude-opus-4-6", lockedProvider: null, }); @@ -875,8 +1024,6 @@ describe("ProviderModelPicker", () => { }); it("shows favorited models first within the selected provider list", async () => { - // Fork: static codex model list starts with GPT-5.4, GPT-5.4 Mini, so - // after favoriting GPT-5.3 Codex it should float to the top of the list. localStorage.setItem( "t3code:client-settings:v1", JSON.stringify({ @@ -886,8 +1033,7 @@ describe("ProviderModelPicker", () => { ); const mounted = await mountPicker({ - provider: "codex", - model: "gpt-5.3-codex", + model: "gpt-5-codex", lockedProvider: null, }); @@ -896,7 +1042,7 @@ describe("ProviderModelPicker", () => { await page.getByRole("button", { name: "Codex", exact: true }).click(); await vi.waitFor(() => { - expect(getVisibleModelNames().slice(0, 2)).toEqual(["GPT-5.3 Codex", "GPT-5.4"]); + expect(getVisibleModelNames().slice(0, 2)).toEqual(["GPT-5.3 Codex", "GPT-5 Codex"]); }); } finally { await mounted.cleanup(); @@ -906,9 +1052,9 @@ describe("ProviderModelPicker", () => { it("dispatches callback with correct provider and model when selected", async () => { const mounted = await mountPicker({ - provider: "claudeAgent", + activeInstanceId: CLAUDE_INSTANCE_ID, model: "claude-opus-4-6", - lockedProvider: "claudeAgent", + lockedProvider: ProviderDriverKind.make("claudeAgent"), }); try { @@ -933,25 +1079,23 @@ describe("ProviderModelPicker", () => { } }); - // TODO: Fork's getCustomModelOptionsByProvider reads the static - // MODEL_OPTIONS_BY_PROVIDER list (which always contains gpt-5.3-codex-spark) - // and does not filter by server-reported models, so the "hidden" assertion - // cannot hold without rewiring production code. Re-enable once the picker - // sources model options from providers[x].models instead. - it.skip("only shows codex spark when the server reports it", async () => { + it("only shows codex spark when the server reports it", async () => { const providersWithoutSpark: ReadonlyArray = [ buildCodexProvider([ { slug: "gpt-5.3-codex", name: "GPT-5.3 Codex", isCustom: false, - capabilities: { - reasoningEffortLevels: [effort("low"), effort("medium", true), effort("high")], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("reasoningEffort", "Reasoning", [ + { id: "low", label: "low" }, + { id: "medium", label: "medium", isDefault: true }, + { id: "high", label: "high" }, + ]), + booleanDescriptor("fastMode", "Fast Mode"), + ], + }), }, ]), TEST_PROVIDERS[1]!, @@ -962,32 +1106,37 @@ describe("ProviderModelPicker", () => { slug: "gpt-5.3-codex", name: "GPT-5.3 Codex", isCustom: false, - capabilities: { - reasoningEffortLevels: [effort("low"), effort("medium", true), effort("high")], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("reasoningEffort", "Reasoning", [ + { id: "low", label: "low" }, + { id: "medium", label: "medium", isDefault: true }, + { id: "high", label: "high" }, + ]), + booleanDescriptor("fastMode", "Fast Mode"), + ], + }), }, { slug: "gpt-5.3-codex-spark", name: "GPT-5.3 Codex Spark", isCustom: false, - capabilities: { - reasoningEffortLevels: [effort("low"), effort("medium", true), effort("high")], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, + capabilities: createModelCapabilities({ + optionDescriptors: [ + selectDescriptor("reasoningEffort", "Reasoning", [ + { id: "low", label: "low" }, + { id: "medium", label: "medium", isDefault: true }, + { id: "high", label: "high" }, + ]), + booleanDescriptor("fastMode", "Fast Mode"), + ], + }), }, ]), TEST_PROVIDERS[1]!, ]; const hidden = await mountPicker({ - provider: "codex", model: "gpt-5.3-codex", lockedProvider: null, providers: providersWithoutSpark, @@ -1006,7 +1155,6 @@ describe("ProviderModelPicker", () => { } const visible = await mountPicker({ - provider: "codex", model: "gpt-5.3-codex", lockedProvider: null, providers: providersWithSpark, @@ -1026,7 +1174,7 @@ describe("ProviderModelPicker", () => { it("shows disabled providers grayed out in sidebar", async () => { const disabledProviders = TEST_PROVIDERS.slice(); const claudeIndex = disabledProviders.findIndex( - (provider) => provider.provider === "claudeAgent", + (provider) => provider.instanceId === ProviderInstanceId.make("claudeAgent"), ); if (claudeIndex >= 0) { const claudeProvider = disabledProviders[claudeIndex]!; @@ -1038,8 +1186,7 @@ describe("ProviderModelPicker", () => { } const mounted = await mountPicker({ - provider: "codex", - model: "gpt-5.3-codex", + model: "gpt-5-codex", lockedProvider: null, providers: disabledProviders, }); @@ -1048,11 +1195,10 @@ describe("ProviderModelPicker", () => { await page.getByRole("button").click(); await vi.waitFor(() => { - const listText = getModelPickerListText(); - // Codex (enabled, selected) models are visible. - expect(listText).toContain("GPT-5.3 Codex"); - // Disabled claudeAgent models should not appear in the list. - expect(listText).not.toContain("Claude Opus 4.6"); + const text = document.body.textContent ?? ""; + expect(text).toContain("GPT-5 Codex"); + // Disabled provider should not have its models shown + expect(text).not.toContain("Claude Opus 4.6"); }); } finally { await mounted.cleanup(); @@ -1061,7 +1207,6 @@ describe("ProviderModelPicker", () => { it("accepts outline trigger styling", async () => { const mounted = await mountPicker({ - provider: "codex", model: "gpt-5-codex", lockedProvider: null, triggerVariant: "outline", diff --git a/apps/web/src/components/chat/ProviderModelPicker.logic.test.ts b/apps/web/src/components/chat/ProviderModelPicker.logic.test.ts deleted file mode 100644 index 60a44bc4590..00000000000 --- a/apps/web/src/components/chat/ProviderModelPicker.logic.test.ts +++ /dev/null @@ -1,93 +0,0 @@ -import { describe, expect, it } from "vitest"; -import { MODEL_OPTIONS_BY_PROVIDER } from "@t3tools/contracts"; - -import { resolveModelOptionsByProvider } from "../../providerModelOptions"; - -const EMPTY_CUSTOM_MODELS = { - customCodexModels: [], - customCopilotModels: [], - customClaudeModels: [], - customCursorModels: [], - customOpencodeModels: [], - customGeminiCliModels: [], - customAmpModels: [], - customKiloModels: [], -} as const; - -describe("resolveModelOptionsByProvider", () => { - it("keeps built-in model catalogs when the server has no snapshot for a provider", () => { - const modelOptions = resolveModelOptionsByProvider(EMPTY_CUSTOM_MODELS); - - expect(modelOptions.copilot.length).toBeGreaterThan(0); - expect(modelOptions.cursor.length).toBeGreaterThan(0); - expect(modelOptions.opencode.length).toBeGreaterThan(0); - expect(modelOptions.geminiCli.length).toBeGreaterThan(0); - expect(modelOptions.amp.length).toBeGreaterThan(0); - expect(modelOptions.kilo.length).toBeGreaterThan(0); - for (const provider of ["copilot", "cursor", "opencode", "geminiCli", "amp", "kilo"] as const) { - expect( - modelOptions[provider].some( - (option) => option.slug === MODEL_OPTIONS_BY_PROVIDER[provider][0]?.slug, - ), - ).toBe(true); - } - }); - - it("merges discovered provider models on top of the built-in fallback list", () => { - const modelOptions = resolveModelOptionsByProvider({ - ...EMPTY_CUSTOM_MODELS, - discovered: { - opencode: [ - { slug: "openai/gpt-5", name: "OpenAI / GPT-5", connected: true }, - { slug: "anthropic/sonnet", name: "Anthropic / Sonnet", connected: false }, - ], - kilo: [{ slug: "openai/gpt-5", name: "OpenAI / GPT-5", connected: true }], - copilot: [{ slug: "gpt-5.4", name: "GPT-5.4", pricingTier: "1x" }], - }, - }); - - expect(modelOptions.opencode[0]).toEqual({ - slug: "anthropic/sonnet", - name: "Anthropic / Sonnet", - connected: false, - }); - expect(modelOptions.opencode).toContainEqual({ - slug: "openai/gpt-5", - name: "OpenAI / GPT-5", - connected: true, - isCustom: false, - }); - expect(modelOptions.kilo).toContainEqual({ - slug: "openai/gpt-5", - name: "OpenAI / GPT-5", - connected: true, - isCustom: false, - }); - expect(modelOptions.copilot.find((option) => option.slug === "gpt-5.4")?.pricingTier).toBe( - "1x", - ); - }); - - it("retains copilot fallback models when discovery only returns a partial snapshot", () => { - const baseCopilotModels = MODEL_OPTIONS_BY_PROVIDER.copilot.map((option) => option.slug); - const firstFallbackSlug = baseCopilotModels[0]; - const secondFallbackSlug = baseCopilotModels.find((slug) => slug !== firstFallbackSlug); - - expect(firstFallbackSlug).toBeTruthy(); - expect(secondFallbackSlug).toBeTruthy(); - - const modelOptions = resolveModelOptionsByProvider({ - ...EMPTY_CUSTOM_MODELS, - discovered: { - copilot: [{ slug: firstFallbackSlug!, name: "Refreshed Copilot Model", pricingTier: "1x" }], - }, - }); - - expect(modelOptions.copilot.find((option) => option.slug === firstFallbackSlug)).toMatchObject({ - slug: firstFallbackSlug, - name: "Refreshed Copilot Model", - pricingTier: "1x", - }); - expect(modelOptions.copilot.some((option) => option.slug === secondFallbackSlug)).toBe(true); - }); -}); diff --git a/apps/web/src/components/chat/ProviderModelPicker.tsx b/apps/web/src/components/chat/ProviderModelPicker.tsx index 4f4140f834d..4a2860ee469 100644 --- a/apps/web/src/components/chat/ProviderModelPicker.tsx +++ b/apps/web/src/components/chat/ProviderModelPicker.tsx @@ -1,9 +1,9 @@ import { - type ProviderKind, + type ProviderInstanceId, + type ProviderDriverKind, type ResolvedKeybindingsConfig, - type ServerProvider, } from "@t3tools/contracts"; -import { memo, useEffect, useState } from "react"; +import { memo, useEffect, useMemo, useState } from "react"; import type { VariantProps } from "class-variance-authority"; import { ChevronDownIcon } from "lucide-react"; import { Button, buttonVariants } from "../ui/button"; @@ -11,21 +11,28 @@ import { Popover, PopoverPopup, PopoverTrigger } from "../ui/popover"; import { Tooltip, TooltipPopup, TooltipTrigger } from "../ui/tooltip"; import { cn } from "~/lib/utils"; import { ModelPickerContent } from "./ModelPickerContent"; +import { ProviderInstanceIcon } from "./ProviderInstanceIcon"; import { ModelEsque, - PROVIDER_ICON_BY_PROVIDER, getTriggerDisplayModelLabel, getTriggerDisplayModelName, } from "./providerIconUtils"; import { setModelPickerOpen } from "../../modelPickerOpenState"; +import type { ProviderInstanceEntry } from "../../providerInstances"; export const ProviderModelPicker = memo(function ProviderModelPicker(props: { - provider: ProviderKind; + /** + * The instance currently selected in the composer. Drives the trigger + * icon, label and the default-highlighted combobox row. + */ + activeInstanceId: ProviderInstanceId; model: string; - lockedProvider: ProviderKind | null; - providers?: ReadonlyArray; + lockedProvider: ProviderDriverKind | null; + lockedContinuationGroupKey?: string | null; + /** Instance entries rendered in the sidebar + used to resolve display name. */ + instanceEntries: ReadonlyArray; keybindings?: ResolvedKeybindingsConfig; - modelOptionsByProvider: Record>; + modelOptionsByInstance: ReadonlyMap>; activeProviderIconClassName?: string; compact?: boolean; disabled?: boolean; @@ -34,22 +41,36 @@ export const ProviderModelPicker = memo(function ProviderModelPicker(props: { triggerVariant?: VariantProps["variant"]; triggerClassName?: string; onOpenChange?: (open: boolean) => void; - onProviderModelChange: (provider: ProviderKind, model: string) => void; + onInstanceModelChange: (instanceId: ProviderInstanceId, model: string) => void; }) { const [uncontrolledIsMenuOpen, setUncontrolledIsMenuOpen] = useState(false); - const activeProvider = props.lockedProvider ?? props.provider; const isMenuOpen = props.open ?? uncontrolledIsMenuOpen; - const selectedProviderOptions = props.modelOptionsByProvider[activeProvider]; - // If the current slug belongs to a different provider (for example after a provider - // switch or disable), prefer the active provider's first option so the trigger icon - // and label stay in sync instead of showing a stale foreign slug. + + // Resolve the active instance entry by exact routing key. The composer + // resolves fallbacks before rendering this component; if the selected + // instance disappears, do not infer a replacement from its driver kind. + const activeEntry = useMemo(() => { + return ( + props.instanceEntries.find((entry) => entry.instanceId === props.activeInstanceId) ?? null + ); + }, [props.activeInstanceId, props.instanceEntries]); + + const activeInstanceId = props.activeInstanceId; + const selectedInstanceOptions = props.modelOptionsByInstance.get(activeInstanceId) ?? []; + // If the current slug belongs to a different instance (for example after + // a provider switch or disable), prefer the active instance's first + // option so the trigger icon and label stay in sync instead of showing + // a stale foreign slug. const selectedModel = - selectedProviderOptions.find((option) => option.slug === props.model) ?? - selectedProviderOptions[0]; - const ProviderIcon = PROVIDER_ICON_BY_PROVIDER[activeProvider]; + selectedInstanceOptions.find((option) => option.slug === props.model) ?? + selectedInstanceOptions[0]; const triggerTitle = selectedModel ? getTriggerDisplayModelName(selectedModel) : props.model; const triggerSubtitle = selectedModel?.subProvider; const triggerLabel = selectedModel ? getTriggerDisplayModelLabel(selectedModel) : props.model; + const duplicateDriverCount = props.instanceEntries.filter( + (entry) => activeEntry !== null && entry.driverKind === activeEntry.driverKind, + ).length; + const showInstanceBadge = Boolean(activeEntry?.accentColor) || duplicateDriverCount > 1; const setIsMenuOpen = (open: boolean) => { props.onOpenChange?.(open); @@ -65,9 +86,9 @@ export const ProviderModelPicker = memo(function ProviderModelPicker(props: { }; }, [isMenuOpen]); - const handleProviderModelChange = (provider: ProviderKind, model: string) => { + const handleInstanceModelChange = (instanceId: ProviderInstanceId, model: string) => { if (props.disabled) return; - props.onProviderModelChange(provider, model); + props.onInstanceModelChange(instanceId, model); setIsMenuOpen(false); }; @@ -103,10 +124,17 @@ export const ProviderModelPicker = memo(function ProviderModelPicker(props: { props.compact ? "max-w-36 sm:pl-1" : undefined, )} > -