diff --git a/.env.development.example b/.env.development.example new file mode 100644 index 000000000..eab671044 --- /dev/null +++ b/.env.development.example @@ -0,0 +1,45 @@ +# Development environment +# Copy to .env.development (or run: deno task dev:setup) + +# Runtime environment +NODE_ENV=development + +# Zzz app directory (config, state, cache, runtime) +PUBLIC_ZZZ_DIR="./.zzz" + +# Comma-separated filesystem paths Zzz can access (e.g., "./projects,~/code") +PUBLIC_ZZZ_SCOPED_DIRS="./src/test/fs1,./src/test/fs2" + +# Server (BaseServerEnv) +PORT=8999 +HOST=localhost + +# Frontend SvelteKit env vars (used by $env/static/public in constants.ts) +PUBLIC_SERVER_PROTOCOL=http +PUBLIC_SERVER_HOST=localhost +PUBLIC_SERVER_PORT=5173 +PUBLIC_SERVER_API_PATH="/api" +PUBLIC_SERVER_PROXIED_PORT=8999 +PUBLIC_WEBSOCKET_URL=ws://localhost:8999/api/ws + +# Debug delay in milliseconds for API responses (0 = no delay) +PUBLIC_BACKEND_ARTIFICIAL_RESPONSE_DELAY=0 + +# Database (PGlite in-memory for development) +DATABASE_URL=postgres://localhost/zzz + +# Auth - cookie signing key (generate with: openssl rand -base64 32) +SECRET_COOKIE_KEYS=dev-only-not-for-production-use-000 + +# Security - allowed origins for API requests +# Patterns: https://example.com, https://*.example.com, http://localhost:* +ALLOWED_ORIGINS=http://localhost:* + +# Bootstrap token path (for initial admin account creation) +BOOTSTRAP_TOKEN_PATH=.zzz/bootstrap_token + +# AI provider API keys (optional, for remote providers) +SECRET_OPENAI_API_KEY= +SECRET_ANTHROPIC_API_KEY= +SECRET_GOOGLE_API_KEY= +SECRET_GITHUB_API_TOKEN= diff --git a/.env.production.example b/.env.production.example new file mode 100644 index 000000000..38a6a400e --- /dev/null +++ b/.env.production.example @@ -0,0 +1,39 @@ +# Production environment +# Copy to .env.production (or run: deno task prod:setup) + +# Zzz app directory (config, state, cache, runtime) +PUBLIC_ZZZ_DIR="./.zzz" + +# Comma-separated filesystem paths Zzz can access +PUBLIC_ZZZ_SCOPED_DIRS= + +# Server (BaseServerEnv) +PORT=8999 +HOST=localhost +DATABASE_URL=postgres://localhost/zzz + +# Auth - cookie signing key (generate with: openssl rand -base64 32) +SECRET_COOKIE_KEYS=CHANGE_ME_generate_with_openssl_rand_base64_32 + +# Security - allowed origins for API requests +ALLOWED_ORIGINS=http://localhost:* + +# Bootstrap token path (for initial admin account creation) +# BOOTSTRAP_TOKEN_PATH=.zzz/bootstrap_token + +# Frontend SvelteKit env vars (used by $env/static/public in constants.ts) +PUBLIC_SERVER_PROTOCOL=http +PUBLIC_SERVER_HOST=localhost +PUBLIC_SERVER_PORT=8999 +PUBLIC_SERVER_API_PATH="/api" +PUBLIC_WEBSOCKET_URL=ws://localhost:8999/api/ws +PUBLIC_SERVER_PROXIED_PORT=8999 + +# Debug delay in milliseconds for API responses (0 = no delay) +PUBLIC_BACKEND_ARTIFICIAL_RESPONSE_DELAY=0 + +# AI provider API keys (optional, for remote providers) +SECRET_OPENAI_API_KEY= +SECRET_ANTHROPIC_API_KEY= +SECRET_GOOGLE_API_KEY= +SECRET_GITHUB_API_TOKEN= diff --git a/CLAUDE.md b/CLAUDE.md index 0a5e65b8b..f32f6432f 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -2,9 +2,9 @@ > nice web things for the tired -`@fuzdev/zzz` — local-first AI forge: chat + files + prompts in one app. -SvelteKit frontend, Hono/Node.js backend, Svelte 5 runes, Zod schemas. -v0.0.1, no auth, no database yet. 26 cell classes, 20 action specs, 4 AI providers. +`@fuzdev/zzz` — local-first AI forge: chat + files + prompts + terminals in one app. +SvelteKit frontend, Hono/Deno backend, Svelte 5 runes, Zod schemas. +v0.0.1. fuz_app auth stack (sessions, bearer tokens, bootstrap), PGlite DB. 32 cell classes, 29 action specs, 4 AI providers. For coding conventions, see [`fuz-stack`](../fuz-stack/CLAUDE.md). @@ -14,7 +14,8 @@ For coding conventions, see [`fuz-stack`](../fuz-stack/CLAUDE.md). 2. **Edit files** on disk — scoped filesystem, syntax highlighting, multi-tab editor 3. **Build prompts** — reusable content templates composed from text parts and file references 4. **Manage models** — Ollama local models + Claude/ChatGPT/Gemini via BYOK API keys -5. **Symmetric actions** — JSON-RPC 2.0 between frontend and backend, same ActionPeer on both sides +5. **Run terminals** — interactive PTY terminals via xterm.js with preset commands, contextmenu copy, and restart +6. **Symmetric actions** — JSON-RPC 2.0 between frontend and backend, same ActionPeer on both sides ## Key Principles @@ -25,36 +26,80 @@ For coding conventions, see [`fuz-stack`](../fuz-stack/CLAUDE.md). ## Development Stage -Early development, v0.0.1. Breaking changes are expected and welcome. No authentication — development use only. All state is in-memory (no database yet). The Hono/Node.js backend is a reference implementation that may be replaced by a Rust daemon (`fuzd`). +Early development, v0.0.1. Breaking changes are expected and welcome. fuz_app auth stack on both RPC and WebSocket endpoints (cookie sessions, bearer tokens, daemon tokens, bootstrap flow); WebSocket upgrade requires authentication with event-driven session revocation. PostgreSQL DB for auth; domain state (files, terminals) still in-memory. The Hono/Deno backend is the reference implementation. A Rust backend (`crates/zzz_server`) is in development — Phase 4 (AI provider system: Anthropic fully implemented with SSE streaming, OpenAI/Gemini/Ollama stubs) in progress atop Phase 3 (full auth stack, filesystem, terminals, PostgreSQL, bootstrap) with 79 integration tests verifying parity. Long-term the CLI and daemon migrate to Rust fuz/fuzd. See [GitHub issues](https://github.com/fuzdev/zzz/issues) for planned work. +## CLI + +zzz has a Deno-compiled CLI binary for daemon management and browser launching. +See [src/lib/zzz/CLAUDE.md](src/lib/zzz/CLAUDE.md) for full CLI architecture. + +```bash +zzz # start daemon if needed, open browser +zzz ~/dev/ # open workspace at ~/dev/ +zzz daemon start # start daemon (foreground) +zzz daemon status # show daemon info +zzz init # initialize ~/.zzz/ +``` + +The global daemon runs on port 4460 with state at `~/.zzz/`. Built via +`gro_plugin_deno_compile` (see `gro.config.ts` and `deno.json`). + ## Docs - [docs/architecture.md](docs/architecture.md) — Action system, Cell system, content model, data flow - [docs/development.md](docs/development.md) — Development workflow, extension points, patterns - [docs/providers.md](docs/providers.md) — AI provider integration, adding new providers - [src/lib/server/CLAUDE.md](src/lib/server/CLAUDE.md) — Backend server architecture, providers, security +- [src/lib/zzz/CLAUDE.md](src/lib/zzz/CLAUDE.md) — CLI architecture, commands, runtime abstraction +- [crates/CLAUDE.md](crates/CLAUDE.md) — Rust backend (zzz_server) ## Repository Structure ``` +crates/ # Rust workspace +│ ├── CLAUDE.md # Rust backend docs +│ └── zzz_server/ # Axum JSON-RPC server (Phase 2b: auth + fs) +│ └── src/ +│ ├── main.rs # Entry point, config, DB/keyring init, shutdown +│ ├── handlers.rs # App state, Ctx (per-request + auth), dispatch +│ ├── rpc.rs # JSON-RPC classify, HTTP handler with auth pipeline +│ ├── ws.rs # WebSocket handler (no auth yet) +│ ├── auth.rs # Keyring, cookie parsing, session validation, auth checks +│ ├── bootstrap.rs # POST /bootstrap (first admin account creation) +│ ├── db.rs # Connection pool, migrations, auth queries +│ ├── scoped_fs.rs # Scoped filesystem (path validation, symlink rejection) +│ └── error.rs # Error types +test/ +│ └── integration/ # Cross-backend integration tests (Deno) +│ ├── run.ts # Test runner (--backend=deno|rust|both) +│ ├── config.ts # Backend configurations +│ └── tests.ts # Test cases src/ ├── lib/ # Published as @fuzdev/zzz -│ ├── server/ # Backend (Hono/Node.js reference impl) +│ ├── server/ # Backend (Hono/Deno reference impl) │ │ ├── backend.ts -│ │ ├── server.ts -│ │ ├── backend_action_handlers.ts +│ │ ├── server.ts # Deno server entry (dev + production) +│ │ ├── zzz_action_handlers.ts # Unified handlers — single source of truth +│ │ ├── zzz_rpc_actions.ts # Thin adapter for fuz_app RPC format +│ │ ├── register_websocket_actions.ts # WS dispatch with direct handler calls │ │ ├── backend_provider_*.ts # Ollama, Claude, ChatGPT, Gemini +│ │ ├── pty_ffi.ts # Deno FFI bindings for libfuz_pty.so +│ │ ├── backend_pty_manager.ts # PTY process management (FFI or fallback) │ │ ├── scoped_fs.ts -│ │ ├── security.ts -│ │ └── backend_action_types.gen.ts +│ │ └── security.ts │ │ -│ ├── *.svelte.ts # Cell state classes (26 classes) -│ ├── action_specs.ts # All 20 action spec definitions -│ ├── action_spec.ts # ActionSpec schema -│ ├── action_event.ts # Action lifecycle state machine -│ ├── action_peer.ts # Symmetric send/receive +│ ├── zzz/ # CLI (Deno compiled binary) +│ │ ├── main.ts # Entry point (deno compile target) +│ │ ├── cli.ts # Arg parsing wrapper +│ │ ├── cli_config.ts # ~/.zzz/config.json +│ │ ├── runtime/ # ZzzRuntime abstraction +│ │ ├── cli/ # CLI infrastructure +│ │ └── commands/ # init, daemon, open, status +│ │ +│ ├── *.svelte.ts # Cell state classes (28 classes) +│ ├── action_specs.ts # All 29 action spec definitions │ ├── cell.svelte.ts # Base Cell class │ ├── cell_classes.ts # Cell class registry │ ├── indexed_collection.svelte.ts @@ -64,7 +109,7 @@ src/ │ ├── frontend_action_types.gen.ts │ └── action_metatypes.gen.ts │ -├── routes/ # SvelteKit routes (16 dirs) +├── routes/ # SvelteKit routes (17 dirs) │ ├── about/ │ ├── actions/ │ ├── bots/ @@ -80,7 +125,9 @@ src/ │ ├── repos/ │ ├── settings/ │ ├── tabs/ -│ └── views/ +│ ├── terminals/ +│ ├── views/ +│ └── workspaces/ │ ├── test/ # Tests (not co-located) │ ├── cell.svelte.*.test.ts @@ -101,7 +148,7 @@ See [docs/architecture.md](docs/architecture.md) for detailed data flow, content ## Cell Classes -26 registered classes in `src/lib/cell_classes.ts`: +32 registered classes in `src/lib/cell_classes.ts`: | Class | Source file | Purpose | | ---------------- | ------------------------------ | ------------------------------------ | @@ -129,12 +176,16 @@ See [docs/architecture.md](docs/architecture.md) for detailed data flow, content | `Turn` | `turn.svelte.ts` | Single conversation message | | `Thread` | `thread.svelte.ts` | Linear conversation with one model | | `Threads` | `threads.svelte.ts` | Collection of threads | +| `Terminal` | `terminal.svelte.ts` | PTY terminal process state | +| `TerminalPreset` | `terminal_preset.svelte.ts` | Saved terminal command config | | `Time` | `time.svelte.ts` | Reactive time state | | `Ui` | `ui.svelte.ts` | UI state (menus, layout) | +| `Workspace` | `workspace.svelte.ts` | Open workspace directory | +| `Workspaces` | `workspaces.svelte.ts` | Collection of workspaces | ## Action Specs -20 specs in `src/lib/action_specs.ts`: +29 specs in `src/lib/action_specs.ts`: | Method | Kind | Initiator | Purpose | | ------------------------ | --------------------- | ---------- | -------------------------------- | @@ -158,27 +209,84 @@ See [docs/architecture.md](docs/architecture.md) for detailed data flow, content | `ollama_unload` | `request_response` | `frontend` | Unload Ollama model from memory | | `provider_load_status` | `request_response` | `frontend` | Check provider availability | | `provider_update_api_key`| `request_response` | `frontend` | Update provider API key | +| `terminal_create` | `request_response` | `frontend` | Spawn PTY terminal process | +| `terminal_data_send` | `request_response` | `frontend` | Send stdin to terminal | +| `terminal_data` | `remote_notification` | `backend` | Stream stdout/stderr to frontend | +| `terminal_resize` | `request_response` | `frontend` | Update PTY dimensions | +| `terminal_close` | `request_response` | `frontend` | Kill terminal process | +| `workspace_open` | `request_response` | `frontend` | Open workspace directory | +| `workspace_close` | `request_response` | `frontend` | Close workspace directory | +| `workspace_list` | `request_response` | `frontend` | List open workspaces | +| `workspace_changed` | `remote_notification` | `backend` | Workspace open/close notification| ## Development Workflow ### Setup ```bash -cp src/lib/server/.env.development.example .env.development +deno task dev:setup npm install + +# Optional: build fuz_pty for real PTY support (echo, prompts, colors, resize) +# Without this, terminals fall back to Deno.Command pipes (no interactivity) +cd ~/dev/private_fuz && cargo build -p fuz_pty --release ``` ### Daily Commands | Command | Purpose | | --------------- | ------------------------------------------ | +| `deno task dev` | Dev server: Rust backend + Vite frontend | | `gro check` | All checks (typecheck, test, gen, format, lint) | | `gro typecheck` | Type checking only (faster iteration) | -| `gro test` | Run Vitest tests | +| `gro test` | Run Vitest unit tests | +| `deno task test` | All tests (Vitest + integration) | +| `deno task test:integration` | Cross-backend parity tests (Rust + Deno) | | `gro gen` | Regenerate `*.gen.ts` files | | `gro format` | Format with Prettier | | `gro build` | Production build | +Two dev server modes: +- **`deno task dev`** — Rust `zzz_server` backend + Vite frontend (preferred) +- **`gro dev`** — Deno/Hono backend + Vite frontend (legacy, still works) + +### Rust Backend + +Shadow implementation of the Deno server using axum. Same `/api/*` route +paths as the Deno server — both backends are interchangeable from the +frontend's perspective. 16 RPC methods: `ping`, `session_load`, `workspace_*`, +`diskfile_update`, `diskfile_delete`, `directory_create`, `terminal_create`, +`terminal_data_send`, `terminal_resize`, `terminal_close`, +`provider_load_status`, `provider_update_api_key` (keeper-only), +`completion_create`. Cookie session auth and bearer token auth (API tokens) +on HTTP and WebSocket, `ScopedFs` path safety, PTY terminals via `fuz_pty` +native crate, and WebSocket connection tracking (`broadcast`/`send_to`). +PostgreSQL via `tokio-postgres`/`deadpool-postgres`, HMAC-SHA256 cookie +signing, blake3 session/token hashing, per-action auth checks with credential +type enforcement, bootstrap endpoint. AI provider system with enum-dispatched +providers — Anthropic fully implemented (non-streaming + SSE streaming with +connection-targeted `completion_progress` notifications), OpenAI/Gemini/Ollama +stubs. The Deno server is ground truth — 79 integration tests on both backends +(all cross-backend, 0 skips) verify identical JSON-RPC responses. + +```bash +cargo build -p zzz_server # Build +cargo clippy -p zzz_server # Lint +./target/debug/zzz_server --port 1174 # Run (requires DATABASE_URL, SECRET_COOKIE_KEYS) +deno task dev # Dev server: Rust backend + Vite frontend +deno task test:integration --backend=rust # Integration tests (Rust) +deno task test:integration --backend=deno # Integration tests (Deno) +deno task test:integration --backend=both # Both (default, shows comparison) +deno task test:integration --filter=ping # Substring match on test name +``` + +Requires `~/dev/private_fuz` as a sibling directory (path deps) and PostgreSQL +(`createdb zzz_test` for integration tests). Both backends share the same test +database (`TEST_DATABASE_URL`, defaults to `postgres://localhost/zzz_test`), +cleaned between runs. +See [crates/CLAUDE.md](crates/CLAUDE.md) for architecture, endpoints, +prerequisites, and what the integration tests check. + ### Naming Conventions | Thing | Convention | Example | @@ -203,12 +311,12 @@ export const ChatJson = CellJson.extend({ selected_thread_id: Uuid.nullable().default(null), }).meta({cell_class_name: 'Chat'}); -// 2. Class with $state for schema fields, $derived for computed +// 2. Class with $state.raw for most fields, $state for in-place-mutated arrays export class Chat extends Cell { - name: string = $state()!; - thread_ids: Array = $state()!; - view_mode: ChatViewMode = $state()!; - selected_thread_id: Uuid | null = $state()!; + name: string = $state.raw()!; + thread_ids: Array = $state()!; // $state because push/splice used + view_mode: ChatViewMode = $state.raw()!; + selected_thread_id: Uuid | null = $state.raw()!; readonly threads: Array = $derived.by(() => { const result: Array = []; @@ -233,9 +341,10 @@ Each action is a plain object with Zod schemas for input/output: ```typescript export const diskfile_update_action_spec = { method: 'diskfile_update', + description: 'Write content to a file on disk', kind: 'request_response', initiator: 'frontend', - auth: 'public', + auth: 'authenticated', side_effects: true, input: z.strictObject({ path: DiskfilePath, @@ -254,6 +363,70 @@ Action kinds: | `remote_notification` | WebSocket only | Backend pushes to frontend | | `local_call` | None (in-process) | Frontend-only | +### Adding an Action (End-to-End) + +Adding a new action touches up to 5 files. Here's the full workflow: + +**1. Define the spec** in `src/lib/action_specs.ts`: + +```typescript +export const my_action_spec = { + method: 'my_action', + kind: 'request_response', // or 'remote_notification', 'local_call' + initiator: 'frontend', // or 'backend', 'both' + auth: 'public', + side_effects: true, // or null for read-only + input: z.strictObject({ foo: z.string() }), + output: z.strictObject({ bar: z.number() }), + async: true, + description: 'What this action does.', +} satisfies ActionSpecUnion; +``` + +Add it to the `all_action_specs` array at the bottom of the file. + +**2. Run `gro gen`** — regenerates 3 files: +- `action_collections.ts` — `ActionInputs`/`ActionOutputs` type maps +- `action_metatypes.ts` — `ActionMethod` union, `ActionsApi` interface +- `frontend_action_types.ts` — `FrontendActionHandlers` type + +**3. Add handler** in `src/lib/server/zzz_action_handlers.ts`: + +```typescript +my_action: async (input, ctx) => { + // input is validated by Zod, ctx has { backend, request_id } + return {bar: 42}; // must match spec's output schema +}, +``` + +Both HTTP RPC and WebSocket paths automatically pick up the new handler. + +**4. Add frontend handler** in `src/lib/frontend_action_handlers.ts`: + +```typescript +my_action: { + // For request_response: + receive_response: ({app, data: {output}}) => { /* handle success */ }, + receive_error: ({data: {error}}) => { /* handle error */ }, + // For remote_notification: + receive: ({app, data: {input}}) => { /* handle notification */ }, +}, +``` + +**5. Call from frontend** via `app.api`: + +```typescript +// Returns Result<{value: OutputType}, {error: JsonrpcError}> +const result = await app.api.my_action({foo: 'hello'}); +if (result.ok) { + console.log(result.value.bar); // 42 +} +``` + +For `remote_notification` actions, also add to `BackendActionsApi` +in `src/lib/server/backend_actions_api.ts` — follow the `terminal_data` +or `completion_progress` pattern. + ### Zod Schema Conventions - Always use `z.strictObject()` (not `z.object()`) for action specs — unknown keys are rejected @@ -262,8 +435,9 @@ Action kinds: ### State Class Rules -- Schema fields use `$state()!` (non-null assertion, set by `init()`) -- Computed values use `$derived` or `$derived.by(() => ...)` +- Schema fields use `$state.raw()!` by default (non-null assertion, set by `init()`) +- Use `$state()!` only for arrays/objects mutated in place (push, splice, index assignment) +- Computed values use `readonly $derived` or `readonly $derived.by(() => ...)` — always `readonly` unless reassignment is explicitly needed - No `$effect` inside Cell classes — effects belong in components - Constructor must call `this.init()` as the last statement - Always register new Cell classes in `cell_classes.ts` @@ -283,32 +457,47 @@ The `.zzz/` directory stores app data. Configured via `PUBLIC_ZZZ_DIR`. | Subdirectory | Purpose | | ------------ | -------------------------------------- | -| `state/` | Persistent data (completions logs) | +| `state/` | Persistent data (completions, workspaces.json) | | `cache/` | Regenerable data, safe to delete | -| `run/` | Runtime ephemeral (server.json: PID, port) | +| `run/` | Runtime ephemeral (daemon.json: PID, port) | All filesystem access goes through `ScopedFs` — path validation, no symlinks, absolute paths only. ## Environment Variables -From `src/lib/server/.env.development.example`: +### Server (BaseServerEnv from fuz_app) + +| Variable | Purpose | +| ---------------------- | ---------------------------------------- | +| `NODE_ENV` | `development` or `production` | +| `PORT` | HTTP server port (default 4040) | +| `HOST` | Bind address (default `localhost`) | +| `DATABASE_URL` | `memory://`, `file://`, or `postgres://` | +| `SECRET_COOKIE_KEYS` | HMAC signing keys (min 32 chars) | +| `ALLOWED_ORIGINS` | Origin patterns for API verification | +| `BOOTSTRAP_TOKEN_PATH` | One-shot admin bootstrap token path | + +### zzz-specific server vars + +| Variable | Purpose | +| ------------------------------------------ | ---------------------------------- | +| `PUBLIC_ZZZ_DIR` | Zzz app directory (default `.zzz`) | +| `PUBLIC_ZZZ_SCOPED_DIRS` | Comma-separated filesystem paths | +| `PUBLIC_BACKEND_ARTIFICIAL_RESPONSE_DELAY` | Testing delay (ms) | +| `SECRET_ANTHROPIC_API_KEY` | Claude API key | +| `SECRET_OPENAI_API_KEY` | OpenAI API key | +| `SECRET_GOOGLE_API_KEY` | Google Gemini API key | + +### SvelteKit frontend vars (PUBLIC_*) | Variable | Purpose | | ------------------------------------- | ------------------------------------------ | -| `PUBLIC_ZZZ_DIR` | Zzz app directory (default `.zzz`) | -| `PUBLIC_ZZZ_SCOPED_DIRS` | Comma-separated user file paths | | `PUBLIC_SERVER_PROTOCOL` | `http` or `https` | -| `PUBLIC_SERVER_HOST` | Server hostname | +| `PUBLIC_SERVER_HOST` | Server hostname (frontend) | | `PUBLIC_SERVER_PORT` | SvelteKit dev server port | | `PUBLIC_SERVER_API_PATH` | API endpoint path | | `PUBLIC_WEBSOCKET_URL` | WebSocket URL | -| `PUBLIC_SERVER_PROXIED_PORT` | Hono backend port | -| `PUBLIC_BACKEND_ARTIFICIAL_RESPONSE_DELAY` | Testing delay in ms | -| `ALLOWED_ORIGINS` | Origin allowlist patterns | -| `SECRET_OPENAI_API_KEY` | OpenAI API key | -| `SECRET_ANTHROPIC_API_KEY` | Anthropic API key | -| `SECRET_GOOGLE_API_KEY` | Google Gemini API key | -| `SECRET_GITHUB_API_TOKEN` | GitHub API token | +| `PUBLIC_SERVER_PROXIED_PORT` | Hono backend port (frontend) | ## Avoid @@ -322,16 +511,28 @@ From `src/lib/server/.env.development.example`: ## Known Limitations -- **No authentication** — development use only, anyone with network access can use it -- **No database** — all state is in-memory, lost on restart (pglite planned) +- **WebSocket auth** — Auth is enforced at upgrade time via `require_auth` middleware (cookie sessions, bearer tokens — bearer silently discarded in browser context via Origin/Referer defense). Per-action auth checks enforce spec-level auth (e.g. `keeper` requires `daemon_token` + keeper role). Batch JSON-RPC and role-based auth are rejected (not yet supported). Sockets are closed on session/token revocation, logout, and password change via audit events. No per-message session revalidation — event-driven revocation is sufficient. ActionPeer itself has no auth awareness. +- **Bearer auth soft-fails** — fuz_app's bearer middleware soft-fails for invalid/expired/empty tokens (calls `next()`, no error response). Auth enforcement happens downstream via `check_action_auth` (JSON-RPC) or `require_auth` (routes). Both Deno and Rust backends produce identical `{code: -32001, message: "unauthenticated"}` JSON-RPC errors. Public actions are not blocked by bad bearer credentials. +- **Domain state is in-memory** — auth/accounts are in PGlite DB, but zzz domain state (files, terminals, workspaces) is in-memory, lost on restart. Workspaces persist to JSON file as a stopgap. - **No undo/history** — file edits are permanent -- **No terminal integration** — no shell access from the UI +- **PTY via FFI** — real PTY support via `fuz_pty` Rust crate loaded through Deno FFI (`forkpty()`). Requires `cargo build -p fuz_pty --release` in `~/dev/private_fuz/`. For bundled binaries, place `libfuz_pty.so` next to the `zzz` executable. Falls back to `Deno.Command` pipes (no echo, no prompt) if `.so` not found - **No git integration** — no commit/push/pull from the UI - **No MCP/A2A** — protocol support planned but not implemented -- **Backend is reference impl** — may be replaced by Rust daemon (`fuzd`) +- **Rust backend is Phase 4** — 16 RPC methods with full auth stack, same `/api/*` route paths as Deno. `deno task dev` runs the Rust backend with Vite frontend. Anthropic provider fully implemented (non-streaming + SSE streaming), OpenAI/Gemini stubs (status only), Ollama stub (always unavailable). No batch JSON-RPC, no Ollama actions (`ollama_list`, `ollama_ps`, etc.). See [Rust Backends quest](../grimoire/quests/rust-backends.md) for roadmap ## fuz_app -zzz is the primary source for the Cell and Action patterns that will become the `fuz_app` package — a shared foundation for Fuz ecosystem apps. +zzz is the reference implementation for Cell and Action patterns. The full SAES +runtime has been extracted to `@fuzdev/fuz_app` — zzz imports ActionSpec, +ActionEvent, ActionPeer, transports, and `create_rpc_client` from +`@fuzdev/fuz_app/actions/*.js`. Cell patterns (Cell base class, cell classes, +IndexedCollection) remain in zzz. The generated `TypedActionEvent` alias +intersects fuz_app's generic `ActionEvent` with zzz's `ActionEventDatas` map +for typed input/output in handlers. `Uuid` and `create_uuid` are re-exported +from `@fuzdev/fuz_app/uuid.js` via `zod_helpers.ts`. + +The CLI and daemon lifecycle use `@fuzdev/fuz_app/cli/*` helpers: `DaemonInfo` +schema, `write_daemon_info`, `read_daemon_info`, `is_daemon_running`, +`stop_daemon`. The server writes `~/.zzz/run/daemon.json` (not `server.json`). -Last updated: 2026-02-10 +Last updated: 2026-04-14 diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 000000000..e3eb4e3e1 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,3088 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "aho-corasick" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" +dependencies = [ + "memchr", +] + +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + +[[package]] +name = "argon2" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072" +dependencies = [ + "base64ct", + "blake2", + "cpufeatures 0.2.17", + "password-hash", +] + +[[package]] +name = "arrayref" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" + +[[package]] +name = "arrayvec" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "aws-lc-rs" +version = "1.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a054912289d18629dc78375ba2c3726a3afe3ff71b4edba9dedfca0e3446d1fc" +dependencies = [ + "aws-lc-sys", + "zeroize", +] + +[[package]] +name = "aws-lc-sys" +version = "0.39.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83a25cf98105baa966497416dbd42565ce3a8cf8dbfd59803ec9ad46f3126399" +dependencies = [ + "cc", + "cmake", + "dunce", + "fs_extra", +] + +[[package]] +name = "axum" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8" +dependencies = [ + "axum-core", + "base64", + "bytes", + "form_urlencoded", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "serde_core", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sha1", + "sync_wrapper", + "tokio", + "tokio-tungstenite", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-core" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "sync_wrapper", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-extra" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fef252edff26ddba56bbcdf2ee3307b8129acb86f5749b68990c168a6fcc9c76" +dependencies = [ + "axum", + "axum-core", + "bytes", + "cookie", + "futures-core", + "futures-util", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "base64ct" +version = "1.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "blake3" +version = "1.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d2d5991425dfd0785aed03aedcf0b321d61975c9b5b3689c774a2610ae0b51e" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if", + "constant_time_eq", + "cpufeatures 0.3.0", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block-buffer" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdd35008169921d80bc60d3d0ab416eecb028c4cd653352907921d95084790be" +dependencies = [ + "hybrid-array", +] + +[[package]] +name = "bumpalo" +version = "3.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + +[[package]] +name = "cc" +version = "1.2.59" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7a4d3ec6524d28a329fc53654bbadc9bdd7b0431f5d65f1a56ffb28a1ee5283" +dependencies = [ + "find-msvc-tools", + "jobserver", + "libc", + "shlex", +] + +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chacha20" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f8d983286843e49675a4b7a2d174efe136dc93a18d69130dd18198a6c167601" +dependencies = [ + "cfg-if", + "cpufeatures 0.3.0", + "rand_core 0.10.0", +] + +[[package]] +name = "cmake" +version = "0.1.58" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0f78a02292a74a88ac736019ab962ece0bc380e3f977bf72e376c5d78ff0678" +dependencies = [ + "cc", +] + +[[package]] +name = "cmov" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f88a43d011fc4a6876cb7344703e297c71dda42494fee094d5f7c76bf13f746" + +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + +[[package]] +name = "const-oid" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6ef517f0926dd24a1582492c791b6a4818a4d94e789a334894aa15b0d12f55c" + +[[package]] +name = "constant_time_eq" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d52eff69cd5e647efe296129160853a42795992097e8af39800e1060caeea9b" + +[[package]] +name = "cookie" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ddef33a339a91ea89fb53151bd0a4689cfce27055c291dfa69945475d22c747" +dependencies = [ + "percent-encoding", + "time", + "version_check", +] + +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "cpufeatures" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b2a41393f66f16b0823bb79094d54ac5fbd34ab292ddafb9a0456ac9f87d201" +dependencies = [ + "libc", +] + +[[package]] +name = "crypto-common" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "crypto-common" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77727bb15fa921304124b128af125e7e3b968275d1b108b379190264f4423710" +dependencies = [ + "hybrid-array", +] + +[[package]] +name = "ctutils" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d5515a3834141de9eafb9717ad39eea8247b5674e6066c404e8c4b365d2a29e" +dependencies = [ + "cmov", +] + +[[package]] +name = "data-encoding" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea" + +[[package]] +name = "deadpool" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0be2b1d1d6ec8d846f05e137292d0b89133caf95ef33695424c09568bdd39b1b" +dependencies = [ + "deadpool-runtime", + "lazy_static", + "num_cpus", + "tokio", +] + +[[package]] +name = "deadpool-postgres" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d697d376cbfa018c23eb4caab1fd1883dd9c906a8c034e8d9a3cb06a7e0bef9" +dependencies = [ + "async-trait", + "deadpool", + "getrandom 0.2.17", + "tokio", + "tokio-postgres", + "tracing", +] + +[[package]] +name = "deadpool-runtime" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "092966b41edc516079bdf31ec78a2e0588d1d0c08f78b91d8307215928642b2b" +dependencies = [ + "tokio", +] + +[[package]] +name = "deranged" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cd812cc2bc1d69d4764bd80df88b4317eaef9e773c75226407d9bc0876b211c" +dependencies = [ + "powerfmt", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer 0.10.4", + "crypto-common 0.1.7", + "subtle", +] + +[[package]] +name = "digest" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4850db49bf08e663084f7fb5c87d202ef91a3907271aff24a94eb97ff039153c" +dependencies = [ + "block-buffer 0.12.0", + "const-oid", + "crypto-common 0.2.1", + "ctutils", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "fallible-iterator" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" + +[[package]] +name = "find-msvc-tools" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + +[[package]] +name = "fsevent-sys" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" +dependencies = [ + "libc", +] + +[[package]] +name = "futures-channel" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" + +[[package]] +name = "futures-task" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" + +[[package]] +name = "futures-util" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" +dependencies = [ + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "slab", +] + +[[package]] +name = "fuz_common" +version = "0.1.0" +dependencies = [ + "blake3", + "libc", + "serde", + "serde_json", + "thiserror 2.0.18", + "time", +] + +[[package]] +name = "fuz_pty" +version = "0.1.0" +dependencies = [ + "libc", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi 0.11.1+wasi-snapshot-preview1", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "r-efi 5.3.0", + "wasip2", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555" +dependencies = [ + "cfg-if", + "libc", + "r-efi 6.0.0", + "rand_core 0.10.0", + "wasip2", + "wasip3", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f467dd6dccf739c208452f8014c75c18bb8301b050ad1cfb27153803edb0f51" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "hmac" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6303bc9732ae41b04cb554b844a762b4115a61bfaa81e3e83050991eeb56863f" +dependencies = [ + "digest 0.11.2", +] + +[[package]] +name = "http" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" +dependencies = [ + "bytes", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "http-range-header" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c" + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hybrid-array" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3944cf8cf766b40e2a1a333ee5e9b563f854d5fa49d6a8ca2764e97c6eddb214" +dependencies = [ + "typenum", +] + +[[package]] +name = "hyper" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6299f016b246a94207e63da54dbe807655bf9e00044f73ded42c3ac5305fbcca" +dependencies = [ + "atomic-waker", + "bytes", + "futures-channel", + "futures-core", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2b52f86d1d4bc0d6b4e6826d960b1b333217e07d36b882dca570a5e1c48895b" +dependencies = [ + "http", + "hyper", + "hyper-util", + "rustls", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" +dependencies = [ + "base64", + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "hyper", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", +] + +[[package]] +name = "icu_collections" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2984d1cd16c883d7935b9e07e44071dca8d917fd52ecc02c04d5fa0b5a3f191c" +dependencies = [ + "displaydoc", + "potential_utf", + "utf8_iter", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92219b62b3e2b4d88ac5119f8904c10f8f61bf7e95b640d25ba3075e6cac2c29" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c56e5ee99d6e3d33bd91c5d85458b6005a22140021cc324cea84dd0e72cff3b4" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da3be0ae77ea334f4da67c12f149704f19f81d1adf7c51cf482943e84a2bad38" + +[[package]] +name = "icu_properties" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bee3b67d0ea5c2cca5003417989af8996f8604e34fb9ddf96208a033901e70de" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e2bbb201e0c04f7b4b3e14382af113e17ba4f63e2c9d2ee626b720cbce54a14" + +[[package]] +name = "icu_provider" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "139c4cf31c8b5f33d7e199446eff9c1e02decfc2f0eec2c8d71f65befa45b421" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d466e9454f08e4a911e14806c24e16fba1b4c121d1ea474396f396069cf949d9" +dependencies = [ + "equivalent", + "hashbrown 0.17.0", + "serde", + "serde_core", +] + +[[package]] +name = "inotify" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd5b3eaf1a28b758ac0faa5a4254e8ab2705605496f1b1f3fbbc3988ad73d199" +dependencies = [ + "bitflags 2.11.0", + "inotify-sys", + "libc", +] + +[[package]] +name = "inotify-sys" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" +dependencies = [ + "libc", +] + +[[package]] +name = "ipnet" +version = "2.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d98f6fed1fde3f8c21bc40a1abb88dd75e67924f9cffc3ef95607bad8017f8e2" + +[[package]] +name = "iri-string" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25e659a4bb38e810ebc252e53b5814ff908a8c58c2a9ce2fae1bbec24cbf4e20" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "itoa" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f42a60cbdf9a97f5d2305f08a87dc4e09308d1276d28c869c684d7777685682" + +[[package]] +name = "jni" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" +dependencies = [ + "cesu8", + "cfg-if", + "combine", + "jni-sys 0.3.1", + "log", + "thiserror 1.0.69", + "walkdir", + "windows-sys 0.45.0", +] + +[[package]] +name = "jni-sys" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41a652e1f9b6e0275df1f15b32661cf0d4b78d4d87ddec5e0c3c20f097433258" +dependencies = [ + "jni-sys 0.4.1", +] + +[[package]] +name = "jni-sys" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6377a88cb3910bee9b0fa88d4f42e1d2da8e79915598f65fb0c7ee14c878af2" +dependencies = [ + "jni-sys-macros", +] + +[[package]] +name = "jni-sys-macros" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38c0b942f458fe50cdac086d2f946512305e5631e720728f2a61aabcd47a6264" +dependencies = [ + "quote", + "syn", +] + +[[package]] +name = "jobserver" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" +dependencies = [ + "getrandom 0.3.4", + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2964e92d1d9dc3364cae4d718d93f227e3abb088e747d92e0395bfdedf1c12ca" +dependencies = [ + "cfg-if", + "futures-util", + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "kqueue" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" +dependencies = [ + "kqueue-sys", + "libc", +] + +[[package]] +name = "kqueue-sys" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" +dependencies = [ + "bitflags 1.3.2", + "libc", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + +[[package]] +name = "libc" +version = "0.2.184" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48f5d2a454e16a5ea0f4ced81bd44e4cfc7bd3a507b61887c99fd3538b28e4af" + +[[package]] +name = "libredox" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e02f3bb43d335493c96bf3fd3a321600bf6bd07ed34bc64118e9293bdffea46c" +dependencies = [ + "libc", +] + +[[package]] +name = "litemap" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92daf443525c4cce67b150400bc2316076100ce0b3686209eb8cf3c31612e6f0" + +[[package]] +name = "lock_api" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + +[[package]] +name = "matchers" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "matchit" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" + +[[package]] +name = "md-5" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69b6441f590336821bb897fb28fc622898ccceb1d6cea3fde5ea86b090c4de98" +dependencies = [ + "cfg-if", + "digest 0.11.2", +] + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "mime_guess" +version = "2.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" +dependencies = [ + "mime", + "unicase", +] + +[[package]] +name = "mio" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50b7e5b27aa02a74bac8c3f23f448f8d87ff11f92d3aac1a6ed369ee08cc56c1" +dependencies = [ + "libc", + "log", + "wasi 0.11.1+wasi-snapshot-preview1", + "windows-sys 0.61.2", +] + +[[package]] +name = "notify" +version = "8.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3" +dependencies = [ + "bitflags 2.11.0", + "fsevent-sys", + "inotify", + "kqueue", + "libc", + "log", + "mio", + "notify-types", + "walkdir", + "windows-sys 0.60.2", +] + +[[package]] +name = "notify-types" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42b8cfee0e339a0337359f3c88165702ac6e600dc01c0cc9579a92d62b08477a" +dependencies = [ + "bitflags 2.11.0", +] + +[[package]] +name = "nu-ansi-term" +version = "0.50.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "num-conv" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6673768db2d862beb9b39a78fdcb1a69439615d5794a1be50caa9bc92c81967" + +[[package]] +name = "num_cpus" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "objc2-core-foundation" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a180dd8642fa45cdb7dd721cd4c11b1cadd4929ce112ebd8b9f5803cc79d536" +dependencies = [ + "bitflags 2.11.0", +] + +[[package]] +name = "objc2-system-configuration" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7216bd11cbda54ccabcab84d523dc93b858ec75ecfb3a7d89513fa22464da396" +dependencies = [ + "objc2-core-foundation", +] + +[[package]] +name = "once_cell" +version = "1.21.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7c3e4beb33f85d45ae3e3a1792185706c8e16d043238c593331cc7cd313b50" + +[[package]] +name = "openssl-probe" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe" + +[[package]] +name = "parking_lot" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-link", +] + +[[package]] +name = "password-hash" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" +dependencies = [ + "base64ct", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "phf" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1562dc717473dbaa4c1f85a36410e03c047b2e7df7f45ee938fbef64ae7fadf" +dependencies = [ + "phf_shared", + "serde", +] + +[[package]] +name = "phf_shared" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e57fef6bc5981e38c2ce2d63bfa546861309f875b8a75f092d1d54ae2d64f266" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" + +[[package]] +name = "postgres-protocol" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56201207dac53e2f38e848e31b4b91616a6bb6e0c7205b77718994a7f49e70fc" +dependencies = [ + "base64", + "byteorder", + "bytes", + "fallible-iterator", + "hmac 0.13.0", + "md-5", + "memchr", + "rand 0.10.1", + "sha2 0.11.0", + "stringprep", +] + +[[package]] +name = "postgres-types" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8dc729a129e682e8d24170cd30ae1aa01b336b096cbb56df6d534ffec133d186" +dependencies = [ + "bytes", + "fallible-iterator", + "postgres-protocol", + "uuid", +] + +[[package]] +name = "potential_utf" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0103b1cef7ec0cf76490e969665504990193874ea05c85ff9bab8b911d0a0564" +dependencies = [ + "zerovec", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quinn" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" +dependencies = [ + "bytes", + "cfg_aliases", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls", + "socket2", + "thiserror 2.0.18", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-proto" +version = "0.11.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "434b42fec591c96ef50e21e886936e66d3cc3f737104fdb9b737c40ffb94c098" +dependencies = [ + "aws-lc-rs", + "bytes", + "getrandom 0.3.4", + "lru-slab", + "rand 0.9.2", + "ring", + "rustc-hash", + "rustls", + "rustls-pki-types", + "slab", + "thiserror 2.0.18", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2", + "tracing", + "windows-sys 0.60.2", +] + +[[package]] +name = "quote" +version = "1.0.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "r-efi" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" + +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha", + "rand_core 0.9.5", +] + +[[package]] +name = "rand" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2e8e8bcc7961af1fdac401278c6a831614941f6164ee3bf4ce61b7edb162207" +dependencies = [ + "chacha20", + "getrandom 0.4.2", + "rand_core 0.10.0", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.5", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" + +[[package]] +name = "rand_core" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" +dependencies = [ + "getrandom 0.3.4", +] + +[[package]] +name = "rand_core" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c8d0fd677905edcbeedbf2edb6494d676f0e98d54d5cf9bda0b061cb8fb8aba" + +[[package]] +name = "redox_syscall" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags 2.11.0", +] + +[[package]] +name = "regex-automata" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a" + +[[package]] +name = "reqwest" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab3f43e3283ab1488b624b44b0e988d0acea0b3214e694730a055cb6b2efa801" +dependencies = [ + "base64", + "bytes", + "futures-core", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-util", + "js-sys", + "log", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls", + "rustls-pki-types", + "rustls-platform-verifier", + "serde", + "serde_json", + "sync_wrapper", + "tokio", + "tokio-rustls", + "tokio-util", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.17", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustc-hash" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94300abf3f1ae2e2b8ffb7b58043de3d399c73fa6f4b73826402a5c457614dbe" + +[[package]] +name = "rustls" +version = "0.23.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f9466fb2c14ea04357e91413efb882e2a6d4a406e625449bc0a5d360d53a21" +dependencies = [ + "aws-lc-rs", + "once_cell", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "612460d5f7bea540c490b2b6395d8e34a953e52b491accd6c86c8164c5932a63" +dependencies = [ + "openssl-probe", + "rustls-pki-types", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pki-types" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" +dependencies = [ + "web-time", + "zeroize", +] + +[[package]] +name = "rustls-platform-verifier" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d99feebc72bae7ab76ba994bb5e121b8d83d910ca40b36e0921f53becc41784" +dependencies = [ + "core-foundation", + "core-foundation-sys", + "jni", + "log", + "once_cell", + "rustls", + "rustls-native-certs", + "rustls-platform-verifier-android", + "rustls-webpki", + "security-framework", + "security-framework-sys", + "webpki-root-certs", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustls-platform-verifier-android" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" + +[[package]] +name = "rustls-webpki" +version = "0.103.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20a6af516fea4b20eccceaf166e8aa666ac996208e8a644ce3ef5aa783bc7cd4" +dependencies = [ + "aws-lc-rs", + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91c1b7e4904c873ef0710c1f407dde2e6287de2bebc1bbbf7d430bb7cbffd939" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "security-framework" +version = "3.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7f4bc775c73d9a02cde8bf7b2ec4c9d12743edf609006c7facc23998404cd1d" +dependencies = [ + "bitflags 2.11.0", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2691df843ecc5d231c0b14ece2acc3efb62c0a398c7e1d875f3983ce020e3" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a7852d02fc848982e0c167ef163aaff9cd91dc640ba85e263cb1ce46fae51cd" + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "serde_path_to_error" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457" +dependencies = [ + "itoa", + "serde", + "serde_core", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures 0.2.17", + "digest 0.10.7", +] + +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures 0.2.17", + "digest 0.10.7", +] + +[[package]] +name = "sha2" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "446ba717509524cb3f22f17ecc096f10f4822d76ab5c0b9822c5f9c284e825f4" +dependencies = [ + "cfg-if", + "cpufeatures 0.3.0", + "digest 0.11.2", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" +dependencies = [ + "errno", + "libc", +] + +[[package]] +name = "siphasher" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e" + +[[package]] +name = "slab" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "socket2" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "stringprep" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", + "unicode-properties", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" +dependencies = [ + "thiserror-impl 2.0.18", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "time" +version = "0.3.47" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "743bd48c283afc0388f9b8827b976905fb217ad9e647fae3a379a9283c4def2c" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde_core", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca" + +[[package]] +name = "time-macros" +version = "0.2.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e70e4c5a0e0a8a4823ad65dfe1a6930e4f4d756dcd9dd7939022b5e8c501215" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tinystr" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8323304221c2a851516f22236c5722a72eaa19749016521d6dff0824447d96d" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tinyvec" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e61e67053d25a4e82c844e8424039d9745781b3fc4f32b8d55ed50f5f667ef3" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.51.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f66bf9585cda4b724d3e78ab34b73fb2bbaba9011b9bfdf69dc836382ea13b8c" +dependencies = [ + "bytes", + "libc", + "mio", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.61.2", +] + +[[package]] +name = "tokio-macros" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "385a6cb71ab9ab790c5fe8d67f1645e6c450a7ce006a33de03daa956cf70a496" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-postgres" +version = "0.7.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dd8df5ef180f6364759a6f00f7aadda4fbbac86cdee37480826a6ff9f3574ce" +dependencies = [ + "async-trait", + "byteorder", + "bytes", + "fallible-iterator", + "futures-channel", + "futures-util", + "log", + "parking_lot", + "percent-encoding", + "phf", + "pin-project-lite", + "postgres-protocol", + "postgres-types", + "rand 0.10.1", + "socket2", + "tokio", + "tokio-util", + "whoami", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857" +dependencies = [ + "futures-util", + "log", + "tokio", + "tungstenite", +] + +[[package]] +name = "tokio-util" +version = "0.7.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "futures-util", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tower" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-http" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" +dependencies = [ + "bitflags 2.11.0", + "bytes", + "futures-core", + "futures-util", + "http", + "http-body", + "http-body-util", + "http-range-header", + "httpdate", + "iri-string", + "mime", + "mime_guess", + "percent-encoding", + "pin-project-lite", + "tokio", + "tokio-util", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7f578e5945fb242538965c2d0b04418d38ec25c79d160cd279bf0731c8d319" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex-automata", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "tungstenite" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442" +dependencies = [ + "bytes", + "data-encoding", + "http", + "httparse", + "log", + "rand 0.9.2", + "sha1", + "thiserror 2.0.18", + "utf-8", +] + +[[package]] +name = "typenum" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" + +[[package]] +name = "unicase" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbc4bc3a9f746d862c45cb89d705aa10f187bb96c76001afab07a0d35ce60142" + +[[package]] +name = "unicode-bidi" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-normalization" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-properties" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7df058c713841ad818f1dc5d3fd88063241cc61f49f5fbea4b951e8cf5a8d71d" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "uuid" +version = "1.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ac8b6f42ead25368cf5b098aeb3dc8a1a2c05a3eee8a9a1a68c640edbfc79d9" +dependencies = [ + "getrandom 0.4.2", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasi" +version = "0.14.7+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c" +dependencies = [ + "wasip2", +] + +[[package]] +name = "wasip2" +version = "1.0.2+wasi-0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasite" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66fe902b4a6b8028a753d5424909b764ccf79b7a209eac9bf97e59cda9f71a42" +dependencies = [ + "wasi 0.14.7+wasi-0.2.4", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.118" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf938a0bacb0469e83c1e148908bd7d5a6010354cf4fb73279b7447422e3a89" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f371d383f2fb139252e0bfac3b81b265689bf45b6874af544ffa4c975ac1ebf8" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.118" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eeff24f84126c0ec2db7a449f0c2ec963c6a49efe0698c4242929da037ca28ed" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.118" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d08065faf983b2b80a79fd87d8254c409281cf7de75fc4b773019824196c904" +dependencies = [ + "bumpalo", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.118" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fd04d9e306f1907bd13c6361b5c6bfc7b3b3c095ed3f8a9246390f8dbdee129" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasm-streams" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1ec4f6517c9e11ae630e200b2b65d193279042e28edd4a2cda233e46670bbb" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags 2.11.0", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + +[[package]] +name = "web-sys" +version = "0.3.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f2dfbb17949fa2088e5d39408c48368947b86f7834484e87b73de55bc14d97d" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-root-certs" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "804f18a4ac2676ffb4e8b5b5fa9ae38af06df08162314f96a68d2a363e21a8ca" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "whoami" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6a5b12f9df4f978d2cfdb1bd3bac52433f44393342d7ee9c25f5a1c14c0f45d" +dependencies = [ + "libc", + "libredox", + "objc2-system-configuration", + "wasite", + "web-sys", +] + +[[package]] +name = "winapi-util" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" + +[[package]] +name = "wit-bindgen" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags 2.11.0", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] + +[[package]] +name = "writeable" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ffae5123b2d3fc086436f8834ae3ab053a283cfac8fe0a0b8eaae044768a4c4" + +[[package]] +name = "yoke" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abe8c5fda708d9ca3df187cae8bfb9ceda00dd96231bed36e445a1a48e66f9ca" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de844c262c8848816172cef550288e7dc6c7b7814b4ee56b3e1553f275f1858e" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.8.48" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eed437bf9d6692032087e337407a86f04cd8d6a16a37199ed57949d415bd68e9" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.48" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70e3cd084b1788766f53af483dd21f93881ff30d7320490ec3ef7526d203bad4" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zerofrom" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69faa1f2a1ea75661980b013019ed6687ed0e83d069bc1114e2cc74c6c04c4df" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11532158c46691caf0f2593ea8358fed6bbf68a0315e80aae9bd41fbade684a1" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + +[[package]] +name = "zerotrie" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f9152d31db0792fa83f70fb2f83148effb5c1f5b8c7686c3459e361d9bc20bf" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90f911cbc359ab6af17377d242225f4d75119aec87ea711a880987b18cd7b239" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "625dc425cab0dca6dc3c3319506e6593dcb08a9f387ea3b284dbd52a92c40555" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" + +[[package]] +name = "zzz_server" +version = "0.1.0" +dependencies = [ + "argon2", + "axum", + "axum-extra", + "base64", + "blake3", + "deadpool-postgres", + "futures-util", + "fuz_common", + "fuz_pty", + "hmac 0.12.1", + "libc", + "notify", + "rand 0.10.1", + "reqwest", + "serde", + "serde_json", + "sha2 0.10.9", + "thiserror 2.0.18", + "tokio", + "tokio-postgres", + "tokio-util", + "tower", + "tower-http", + "tracing", + "tracing-subscriber", + "uuid", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 000000000..7c4d8dc0d --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,69 @@ +[workspace] +resolver = "2" +members = ["crates/zzz_server"] + +[workspace.package] +version = "0.1.0" +edition = "2024" +license = "AGPL-3.0-only" +publish = false + +[workspace.dependencies] +fuz_common = { path = "../private_fuz/crates/fuz_common" } +fuz_pty = { path = "../private_fuz/crates/fuz_pty" } +tokio = { version = "1", features = ["rt-multi-thread", "macros", "net", "signal"] } +axum = { version = "0.8", features = ["ws"] } +axum-extra = { version = "0.12", features = ["cookie"] } +serde = { version = "1", features = ["derive"] } +serde_json = "1" +thiserror = "2" +tower = "0.5" +tower-http = { version = "0.6", features = ["fs"] } +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["env-filter"] } +futures-util = { version = "0.3", features = ["sink"] } +tokio-util = { version = "0.7", features = ["rt"] } +tokio-postgres = { version = "0.7", features = ["with-uuid-1"] } +deadpool-postgres = "0.14" +hmac = "0.12" +sha2 = "0.10" +blake3 = "1" +base64 = "0.22" +uuid = { version = "1", features = ["v4"] } +argon2 = { version = "0.5", features = ["rand"] } +rand = "0.10" +notify = { version = "8", default-features = false, features = ["macos_fsevent"] } +reqwest = { version = "0.13", default-features = false, features = ["rustls", "stream", "json"] } + +[workspace.lints.rust] +unsafe_code = "forbid" +missing_debug_implementations = "warn" +trivial_casts = "warn" +trivial_numeric_casts = "warn" +unused_lifetimes = "warn" +unused_qualifications = "warn" + +[workspace.lints.clippy] +all = { level = "warn", priority = -1 } +pedantic = { level = "warn", priority = -1 } +nursery = { level = "warn", priority = -1 } +cargo = { level = "warn", priority = -1 } +module_name_repetitions = "allow" +must_use_candidate = "allow" +similar_names = "allow" +too_many_lines = "allow" +significant_drop_tightening = "allow" +cargo_common_metadata = "allow" +multiple_crate_versions = "allow" +clone_on_ref_ptr = "warn" +dbg_macro = "warn" +expect_used = "warn" +panic = "warn" +todo = "warn" +unwrap_used = "warn" + +[profile.release] +lto = true +codegen-units = 1 +panic = "abort" +strip = true diff --git a/README.md b/README.md index af323aee3..576cf18db 100644 --- a/README.md +++ b/README.md @@ -40,11 +40,10 @@ and it will have a production build with the Node adapter and Hono server soon. To run Zzz, we need an `.env.development` file in your project root. -In your terminal, copy over -[src/lib/server/.env.development.example](/src/lib/server/.env.development.example): +In your terminal, run the setup script (idempotent — safe to re-run): ```bash -cp src/lib/server/.env.development.example .env.development --update=none +deno task dev:setup ``` You can edit `.env.development` with your API keys, @@ -73,7 +72,7 @@ Zzz builds on a great deal of software. - I started using [Claude](https://claude.ai/) in late 2024 after making the initial prototype, and in late 2025 I started doing much of the coding with Claude Code, Opus 4.5 being the first over some threshold for me for this project - - see `⚠️ AI generated` and similar disclaimers + - see `NOTE: AI-generated` and similar disclaimers ## License 🐦 diff --git a/crates/CLAUDE.md b/crates/CLAUDE.md new file mode 100644 index 000000000..e9d819c2b --- /dev/null +++ b/crates/CLAUDE.md @@ -0,0 +1,423 @@ +# zzz Rust Backend + +Shadow implementation of the Deno/Hono server using axum. Same JSON-RPC 2.0 +protocol, same wire format — the Deno server is ground truth and the +integration tests enforce identical behaviour between both backends. + +Phase 4 in progress: AI provider system with enum-dispatched providers +(Anthropic fully implemented, OpenAI/Gemini/Ollama stubs). 16 RPC methods: +`ping`, `session_load`, `workspace_*`, `diskfile_*`, `directory_create`, +`terminal_*`, `provider_load_status`, `provider_update_api_key`, +`completion_create`. Full auth stack (cookie sessions, bearer tokens, daemon +tokens), account management routes, filesystem actions with `ScopedFs`, +terminal actions via `fuz_pty`, `session_load` returns real provider status +from all registered providers, `workspace_changed`/`filer_change`/ +`terminal_data`/`terminal_exited` notifications, file watching via `notify` +crate with debounced broadcasts and immediate index updates, WebSocket +connection tracking with targeted `completion_progress` streaming +notifications, event-driven socket revocation. Database (PostgreSQL via +`tokio-postgres`/`deadpool-postgres`), HMAC-SHA256 cookie signing, blake3 +session hashing. Anthropic provider uses `reqwest` HTTP client with manual +SSE parsing for streaming completions. + +## Prerequisites + +`private_fuz` must be checked out as a sibling directory: + +``` +~/dev/zzz/ (this repo) +~/dev/private_fuz/ (path deps: fuz_common, fuz_pty) +``` + +If a path dep is missing, `cargo build` will fail with +`failed to read .../private_fuz/crates/{crate}/Cargo.toml`. + +**PostgreSQL** is required. Create the development and test databases: + +```bash +createdb zzz # development +createdb zzz_test # integration tests +``` + +## Build and Run + +```bash +cargo build -p zzz_server +cargo clippy -p zzz_server # workspace lints: pedantic + nursery + +# Run (requires DATABASE_URL and SECRET_COOKIE_KEYS) +DATABASE_URL=postgres://localhost/zzz \ +SECRET_COOKIE_KEYS=dev-only-not-for-production-use-000 \ +./target/debug/zzz_server --port 1174 + +# Quick smoke test +curl http://localhost:1174/health +curl -X POST http://localhost:1174/api/rpc \ + -H 'Content-Type: application/json' \ + -d '{"jsonrpc":"2.0","id":"1","method":"ping"}' +# → {"jsonrpc":"2.0","id":"1","result":{"ping_id":"1"}} +``` + +CLI args (`--port`, `--static-dir`) take precedence over env vars +(`ZZZ_PORT`, `ZZZ_STATIC_DIR`). + +### Required Environment Variables + +| Variable | Purpose | +|----------------------|----------------------------------------------------| +| `DATABASE_URL` | PostgreSQL connection (e.g. `postgres://localhost/zzz`) | +| `SECRET_COOKIE_KEYS` | HMAC signing keys (min 32 chars, `__` separator for rotation) | + +### Optional Environment Variables + +| Variable | Purpose | +|--------------------------|--------------------------------------------| +| `BOOTSTRAP_TOKEN_PATH` | Path to bootstrap token file | +| `ALLOWED_ORIGINS` | Comma-separated origin patterns | +| `PUBLIC_ZZZ_SCOPED_DIRS` | Comma-separated filesystem paths | +| `ZZZ_PORT` | Server port (default 1174, CLI overrides) | +| `ZZZ_STATIC_DIR` | Static file directory | + +## Endpoints + +| Method | Path | Description | +|--------|-----------------------------------|------------------------------------------| +| GET | `/api/rpc` | JSON-RPC 2.0 (cacheable reads, query params) | +| POST | `/api/rpc` | JSON-RPC 2.0 (HTTP transport, auth-gated) | +| POST | `/api/account/bootstrap` | One-shot admin account creation | +| GET | `/api/account/status` | Current account info or 401 + bootstrap status | +| POST | `/api/account/login` | Username/password login → session cookie | +| POST | `/api/account/logout` | Invalidate session, close WS connections | +| POST | `/api/account/password` | Change password, revoke all sessions/tokens | +| GET | `/api/account/sessions` | List sessions for authenticated account | +| POST | `/api/account/sessions/:id/revoke`| Revoke a specific session | +| GET | `/api/ws` | JSON-RPC 2.0 (WebSocket, cookie/bearer/daemon) | +| GET | `/health` | Health check (`{"status":"ok"}`) | +| GET | `/*` | Static files (if `--static-dir`) | + +Route paths match the Deno server — both backends use the same `/api/*` prefix. +Integration tests use identical config for both backends. + +## Auth + +Cookie-based session auth and bearer token auth mirroring fuz_app's auth stack: + +1. **Keyring** — HMAC-SHA256 cookie signing with key rotation support. + Keys from `SECRET_COOKIE_KEYS` env, separated by `__`. First key signs, + all keys verify. + +2. **Cookie format** — `fuz_session` cookie containing signed + `{session_token}:{expires_at}.{base64_signature}`. 30-day expiry, + `Secure; HttpOnly; SameSite=Strict`. + +3. **Session validation** — Cookie → HMAC verify → blake3 hash token → + `auth_session` table lookup → build `RequestContext` (account, actor, + permits). Sessions touched (last_seen_at updated) fire-and-forget. + +4. **Bearer token auth** — `Authorization: Bearer ` header. Token + hashed with blake3, looked up in `api_token` table. Browser context + silently discarded (Origin/Referer headers present → bearer ignored). Token + `last_used_at` touched fire-and-forget. Sets `CredentialType::ApiToken`. + +5. **Daemon token auth** — `X-Daemon-Token` header. Token is a 43-char + base64url string (32 random bytes), generated at startup and written to + `{zzz_dir}/run/daemon_token`. Rotated every 30 seconds (previous token + accepted during rotation race window). Validated with constant-time + comparison. Resolves the keeper account for the `RequestContext`. Sets + `CredentialType::DaemonToken`. State protected by `tokio::sync::RwLock`. + +6. **Auth pipeline** — Both transports try: daemon token → cookie → bearer. + Daemon token has highest priority (matches fuz_app middleware order). + `ResolvedAuth` carries `credential_type` (`Session`, `ApiToken`, + `DaemonToken`) and optional `token_hash` (session connections only — + bearer and daemon token connections have `None`). + +7. **Per-action auth** — Each RPC method has an auth level: + - `public` — no auth required (`ping`) + - `authenticated` — valid session or bearer token required (workspace_*, session_load, etc.) + - `keeper` — requires `DaemonToken` credential type AND keeper role permit (`provider_update_api_key`). API tokens and session cookies cannot access keeper actions even if the account has the keeper permit. + +8. **Bootstrap** — `POST /bootstrap` creates first admin account with keeper + + admin permits. Reads token from `BOOTSTRAP_TOKEN_PATH`, timing-safe + compare, Argon2 password hashing, all in a transaction with bootstrap_lock. + +9. **Origin verification** — `ALLOWED_ORIGINS` patterns checked on requests + with an `Origin` header. Supports exact match, wildcard port + (`http://localhost:*`), subdomain wildcard (`https://*.example.com`). + +10. **Socket revocation** — `close_sockets_for_session(token_hash)` and + `close_sockets_for_account(account_id)` methods on `App` close matching + WebSocket connections by dropping the channel sender. Session connections + are revocable per-session or per-account; bearer connections are revocable + only per-account. Called by logout (per-session) and password change + (per-account). + +11. **Account status** — `GET /api/account/status` returns account info + + permits (200) when authenticated, or 401 with optional + `bootstrap_available` flag when not. Consumed by fuz_app's `AuthState` + for the frontend auth gate (bootstrap → login → verified flow). + +12. **Account management** — `POST /api/account/login` (username/password → + session cookie with enumeration prevention via dummy hash), + `POST /api/account/logout` (invalidate session + close WS connections), + `POST /api/account/password` (change password, revoke all sessions + API + tokens, close all WS connections), `GET /api/account/sessions` (list + sessions for account), `POST /api/account/sessions/:id/revoke` (revoke + specific session, scoped to own account). + +## Integration Tests + +79 tests on both backends, all cross-backend (0 skips, 0 backend-specific +branches). Both backends bootstrap +auth (admin account + session cookie), create a non-keeper user (account + +actor + session, no +keeper permit, cookie signed via HMAC-SHA256), and insert API tokens into +the `api_token` table before tests. The test database (`zzz_test` by default, +configurable via `TEST_DATABASE_URL`) is cleaned (TRUNCATE CASCADE) before +each backend run. A scoped directory (`/tmp/zzz_integration_scoped`) is +created for filesystem tests. Tests are split across modules: `tests.ts` +(core RPC, auth, filesystem, terminal tests), `bearer_tests.ts` (bearer +token auth, keeper credential enforcement, session revocation), +`account_tests.ts` (login, logout, password change, session management), +`test_helpers.ts` (shared assertion and HTTP/WS helpers). + +**WS tests (both backends):** `ping_ws`, `parse_error_ws`, +`method_not_found_ws`, `invalid_request_ws`, `notification_ws`, +`multi_message_ws`, `ws_workspace_list` — 7 tests verify identical WS +behaviour including authenticated actions over WebSocket. + +**HTTP tests (both backends):** `null_id_is_invalid`, `parse_error_http`, +`parse_error_empty_body`, `method_not_found_http`, `invalid_request_*` +(4 variants), `notification_http` — 9 tests verify identical HTTP behaviour. + +**HTTP tests (both backends):** `ping_http`, `ping_numeric_id` — ping handler +echoes the JSON-RPC request id back as `ping_id`. + +**Cross-backend:** `health_check` — 1 test on both backends. + +**Workspace tests (both backends):** `workspace_open_and_list`, +`workspace_open_idempotent`, `workspace_open_nonexistent`, +`workspace_close` — 4 tests. + +**Workspace notification tests (both backends):** +`workspace_changed_on_open`, `workspace_changed_on_close`, +`workspace_changed_idempotent_no_notification` — 3 tests verify +`workspace_changed` notifications are broadcast to WebSocket clients on +workspace open/close, and that idempotent opens do not broadcast. + +**Auth tests (both backends):** `auth_required_without_cookie`, +`auth_required_invalid_cookie`, `auth_public_no_cookie`, +`auth_keeper_forbidden` — 4 tests verify auth enforcement (unauthenticated +→ -32001/401, public → success, non-keeper calling keeper action → -32002/403). + +**WebSocket auth test (both backends):** `ws_auth_required` — 1 test verifies +unauthenticated WS upgrade is rejected. + +**Session/provider tests (both backends):** `session_load_basic`, +`session_load_returns_zzz_dir_files`, `session_load_returns_nested_files`, +`provider_load_status_empty` — 4 tests verify session data loading +(including zzz_dir file listing with contents and recursive subdirectory +walk) and provider status stub. + +**Filesystem tests (both backends):** `diskfile_update_and_read`, +`diskfile_update_in_zzz_dir`, `diskfile_update_in_zzz_dir_subdirectory`, +`diskfile_delete`, `directory_create`, `directory_create_already_exists`, +`diskfile_update_outside_scope`, `diskfile_update_path_traversal`, +`diskfile_update_relative_path`, `diskfile_delete_nonexistent` — 10 tests +verify scoped filesystem operations (including writes to zzz_dir and nested +subdirectories), idempotent directory creation, path traversal rejection, +relative path rejection, and nonexistent file deletion. + +**Workspace edge cases (both backends):** `workspace_open_not_directory` — +1 test verifies opening a file (not a directory) returns an error. + +**File watcher tests (both backends):** `filer_change_on_file_create` — +1 test verifies `filer_change` notifications are broadcast when files are +created in an open workspace. + +**Terminal tests (both backends):** `terminal_create_echo`, +`terminal_close`, `terminal_write_and_read`, `terminal_resize_live`, +`terminal_create_with_cwd`, `terminal_create_nonexistent_command`, +`terminal_data_send_missing`, `terminal_close_missing`, +`terminal_resize_missing` — 9 tests verify PTY spawn/read/write/close +lifecycle, `terminal_data`/`terminal_exited` notifications over WebSocket, +stdin write with echo verification, live resize, explicit cwd, nonexistent +command handling, explicit process kill, and silent return behavior for +missing terminal IDs. + +**Non-keeper tests (both backends):** `non_keeper_authenticated_action`, +`auth_keeper_forbidden` — 2 tests verify non-keeper users can access +authenticated actions but are rejected from keeper actions. + +**Bearer token tests (both backends unless noted):** +`bearer_token_auth`, `bearer_token_invalid`, `bearer_token_expired`, +`bearer_token_public_action`, `bearer_token_ws`, +`bearer_token_ws_rejected_invalid`, `keeper_requires_daemon_token`, +`ws_revocation_on_session_delete`, +`bearer_rejects_browser_context_origin`, +`bearer_rejects_browser_context_referer`, `bearer_empty_value`, +`bearer_cookie_priority` — 12 tests verify API token auth via +`Authorization: Bearer` header on HTTP and WebSocket, expired/invalid token +rejection, keeper credential enforcement (API tokens can't access keeper +actions), session revocation via DB delete, browser context discard +(Origin/Referer headers → bearer silently ignored), empty bearer value +handling, and cookie-over-bearer priority. + +**Account management tests (both backends):** +`login_success`, `login_invalid_password`, `login_nonexistent_user`, +`logout_clears_session`, `logout_unauthenticated`, +`password_change_revokes_all`, `password_wrong_current`, +`session_list`, `session_revoke` — 9 tests verify login with +valid/invalid/nonexistent credentials, logout with session invalidation and +cookie clearing, password change with full session + token revocation and +re-login verification, session listing (with `account_id` field), and single +session revocation (idempotent with `revoked` field). + +```bash +deno task test:integration --backend=rust # Rust only +deno task test:integration --backend=deno # Deno only +deno task test:integration --backend=both # Both (default) +deno task test:integration --filter=ping # Substring match on test name +``` + +The test runner cleans the `zzz_test` database, writes a bootstrap token, +starts the backend, bootstraps an admin account, runs tests with the session +cookie, then stops the backend and cleans up. + +## Architecture + +``` +crates/zzz_server/src/ +├── main.rs # Entry, config, DB/keyring/daemon-token init, route setup, graceful shutdown +├── handlers.rs # App (server state + connection tracking + watchers), Ctx, dispatch +├── rpc.rs # JSON-RPC classify + notification builder, HTTP handler with auth pipeline +├── ws.rs # WebSocket upgrade with auth, connection tracking, select! message loop +├── auth.rs # Keyring, cookie/bearer/daemon-token resolution, per-action auth +├── daemon_token.rs # Daemon token state, generation, timing-safe validation, rotation task +├── account.rs # Account routes: login, logout, password change, session management +├── bootstrap.rs # POST /bootstrap handler (account + session creation) +├── db.rs # Connection pool, migrations, auth + account management queries +├── filer.rs # Filer + FilerManager (notify crate) — immediate file index updates, debounced filer_change broadcasts +├── provider/ # AI provider system +│ ├── mod.rs # ProviderName, ProviderStatus, Provider enum, ProviderManager, CompletionOptions +│ ├── anthropic.rs # AnthropicProvider — Messages API with SSE streaming +│ ├── openai.rs # OpenAiProvider stub (status only) +│ ├── gemini.rs # GeminiProvider stub (status only) +│ └── ollama.rs # OllamaProvider stub (status only) +├── pty_manager.rs # PTY terminal manager (fuz_pty crate) → terminal_data/exited notifications +├── scoped_fs.rs # Scoped filesystem — path validation, symlink rejection +└── error.rs # ServerError (Bind, Serve, Database, Config) +``` + +**App/Ctx/dispatch pattern**: `App` holds long-lived server state (workspaces +in `RwLock`, `deadpool_postgres::Pool`, `Keyring`, origin config, +`ScopedFs`, `zzz_dir`, `scoped_dirs`, `PtyManager`, `DaemonTokenState`, +connection tracking via `AtomicU64` + `RwLock>`, `FilerManager` with per-watcher ignore config, event +debouncing, in-memory file index, and lifetime tracking (permanent for +`zzz_dir`/`scoped_dirs`, workspace-scoped for `workspace_open`; deduplicates +by path)), constructed once in `main`, wrapped in `Arc`. `Ctx` is +per-request context (borrows `App` + holds `Arc` for spawning tasks, +`request_id`, `auth: Option<&RequestContext>`), constructed by each transport +before calling `handlers::dispatch`. + +**Auth pipeline** (HTTP RPC path): +1. Origin verification (if `Origin` header present) +2. Try daemon token auth: `X-Daemon-Token` → timing-safe validate → resolve keeper account +3. If no daemon token: try cookie auth: `fuz_session` cookie → HMAC verify → blake3 hash → `auth_session` lookup +4. If no cookie: try bearer auth: `Authorization: Bearer` → reject browser context → blake3 hash → `api_token` lookup +5. Build `RequestContext` (account → actor → permits) with `CredentialType` +6. Check per-action auth level (keeper actions require `DaemonToken` credential type) + +**Message classification** (`rpc::classify`) is transport-agnostic: +- HTTP: origin check → auth → classify → auth check → dispatch +- WS: upgrade auth (reject 401) → classify → per-action auth check → dispatch + +## Known Issues + +- **No per-message WS session revalidation** — upgrade-time auth only. Event- + driven revocation covers logout and password change (closes matching WS + connections via `close_sockets_for_session`/`close_sockets_for_account`). + Per-message session recheck is not done — the event-driven approach is + sufficient for current needs. +- **error.data intentional divergence** — Deno includes Zod validation details + in `error.data` for -32602 errors; Rust omits for security (no schema leak to + unauthenticated callers). The integration test `normalize_error_data` function + handles this. Future: environment-conditional in both (include in dev, strip + in prod). + +### Cross-Backend Response Divergences + +Tracked asymmetries between Deno (ground truth) and Rust backends. Bearer +auth response format (issue #1) was resolved — both backends now produce +identical JSON-RPC envelopes for all auth failures. + +| Issue | Status | Detail | +|-------|--------|--------| +| Bearer invalid/expired token | **Resolved** | Both backends soft-fail → JSON-RPC `-32001` unauthenticated | +| `provider_load_status` shape | **Resolved** | Both backends return `{status: ProviderStatus}` per the action spec. Test is cross-backend (no backend branching). | +| `session_list` response | **Resolved** | Both backends now return `{sessions: [{id, account_id, created_at, last_seen_at, expires_at}]}` matching fuz_app `AuthSessionJson`. Tests are cross-backend. | +| `session_revoke` format | **Resolved** | Both backends now return `{ok: true, revoked: boolean}` with idempotent 200 responses. Route paths unified (`/api/account/*`). Tests are cross-backend. | +| `error.data` (validation) | Intentional | Deno includes Zod issues in `error.data` for -32602; Rust omits. Intentional divergence — Rust's omission is the safer production default, Deno's inclusion aids DX. Handled by `normalize_error_data` in tests. Future: environment-conditional in both backends (include in dev, strip in prod). | + +## Known Limitations + +- 16 RPC methods (`ping`, `session_load`, `workspace_*`, `diskfile_update`, `diskfile_delete`, `directory_create`, `terminal_*`, `provider_load_status`, `provider_update_api_key` keeper-only, `completion_create`) +- 5 `remote_notification` actions: `workspace_changed` (broadcast on open/close), `filer_change` (`FilerManager` with `notify` crate — recursive watching, 80ms debounced broadcasts with immediate index updates, per-watcher ignore config, in-memory file index; ignores `.git`/`node_modules`/`.svelte-kit`/`target`/`dist` globally plus zzz dir name for workspace/scoped_dir watchers; startup filers on `zzz_dir` and `scoped_dirs`, per-workspace filers with dedup and lifetime tracking), `terminal_data` (PTY stdout broadcast), `terminal_exited` (process exit broadcast), `completion_progress` (streaming completion chunks to requesting WS connection) +- AI providers: Anthropic fully implemented (non-streaming + SSE streaming), OpenAI/Gemini stubs (status only), Ollama stub (always unavailable) +- No batch request support (JSON arrays) +- No Ollama actions (`ollama_list`, `ollama_ps`, etc.) +- No signup route (requires invite system) +- No token management routes (GET /tokens, POST /tokens/create, etc.) +- No SSE/realtime audit event broadcasting +- No rate limiting on login/password endpoints + +## Design Decisions + +- **DB**: `tokio-postgres` + `deadpool-postgres` pool in `App`. Required at + startup — server fails fast if `DATABASE_URL` is missing or unreachable. + Migrations run on every startup (CREATE TABLE IF NOT EXISTS). +- **Cookie signing**: Pure Rust HMAC-SHA256 via `hmac`/`sha2` crates. + Compatible with fuz_app's keyring format (same `value.base64(signature)`). +- **Session hashing**: `blake3` crate for token → storage key hashing. + Compatible with fuz_app's `hash_blake3` (same hex output). +- **Password hashing**: Argon2id via `argon2` crate (bootstrap, login, password change), + offloaded to `tokio::task::spawn_blocking` to avoid blocking the async runtime. +- **Dispatch is async**: filesystem handlers (`diskfile_update`, etc.) use + `tokio::fs` async I/O. Workspace handlers remain sync (no await points). +- **`std::sync::RwLock`** (not tokio): current handlers are sync. When async + handlers arrive, scope lock guards before await points. +- **Session touch**: fire-and-forget via `tokio::spawn` — doesn't block + the request pipeline. +- **PTY terminals**: `fuz_pty` as a native crate dependency (no FFI + indirection). `PtyManager` in `App` manages spawned processes with async + read loops via `tokio::spawn`. Each terminal gets a `CancellationToken` so + `terminal_close` can stop the read loop before killing the process. Matching + Deno behavior: 10ms poll interval, 50ms wait after kill before waitpid, + silent returns for missing terminal IDs. +- **Provider system**: Enum-dispatched (`Provider` enum, not trait objects) — + 4 providers known at compile time, exhaustive matching. Provider state behind + `tokio::sync::RwLock` for async `set_api_key`. `complete()` clones the + `reqwest::Client` (internally `Arc`'d) and releases the lock before HTTP + calls, so `set_api_key` is never blocked by long-running streaming responses. + SSE parsing is manual with `\r\n` normalization per RFC 8895. + +## What's Next + +**Phase 4** (in progress — AI providers): +- [x] Provider system: enum-dispatched `Provider` with `ProviderManager`, `ProviderStatus`, `CompletionOptions` +- [x] Anthropic provider: full implementation with `reqwest` HTTP client, SSE streaming, message format conversion +- [x] `provider_load_status` handler (cross-backend, all 4 providers report status) +- [x] `provider_update_api_key` handler (keeper-only, runtime API key updates) +- [x] `completion_create` handler with `completion_progress` streaming notifications (targeted to requesting WS connection) +- [x] `session_load` returns real provider status from all providers +- [ ] OpenAI provider: full completion implementation +- [ ] Gemini provider: full completion implementation +- [ ] Ollama provider: HTTP client to local Ollama API, `ollama_list`, `ollama_ps`, etc. + +**Phase 5** (remaining): +1. Codegen from Zod specs (action input/output types) +2. Token management routes (create, list, revoke API tokens) +3. Rate limiting on login/password endpoints + +See the [Rust Backends quest](../../grimoire/quests/rust-backends.md). diff --git a/crates/zzz_server/Cargo.toml b/crates/zzz_server/Cargo.toml new file mode 100644 index 000000000..0f2528658 --- /dev/null +++ b/crates/zzz_server/Cargo.toml @@ -0,0 +1,41 @@ +[package] +name = "zzz_server" +version.workspace = true +edition.workspace = true +license.workspace = true +publish.workspace = true + +[[bin]] +name = "zzz_server" +path = "src/main.rs" + +[dependencies] +fuz_common.workspace = true +tokio.workspace = true +axum.workspace = true +axum-extra.workspace = true +serde.workspace = true +serde_json.workspace = true +thiserror.workspace = true +tower.workspace = true +tower-http.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true +futures-util.workspace = true +tokio-util.workspace = true +tokio-postgres.workspace = true +deadpool-postgres.workspace = true +hmac.workspace = true +sha2.workspace = true +blake3.workspace = true +base64.workspace = true +uuid.workspace = true +argon2.workspace = true +rand.workspace = true +notify.workspace = true +fuz_pty.workspace = true +reqwest.workspace = true +libc = "0.2" + +[lints] +workspace = true diff --git a/crates/zzz_server/src/account.rs b/crates/zzz_server/src/account.rs new file mode 100644 index 000000000..92ce2d3af --- /dev/null +++ b/crates/zzz_server/src/account.rs @@ -0,0 +1,590 @@ +use std::sync::Arc; + +use argon2::password_hash::{PasswordHasher, PasswordVerifier, SaltString}; +use base64::Engine; +use argon2::Argon2; +use axum::extract::{Path, State}; +use axum::http::{HeaderMap, StatusCode}; +use axum::response::{IntoResponse, Response}; +use axum::Json; +use rand::RngExt; +use serde::{Deserialize, Serialize}; + +use crate::auth::{self, SESSION_AGE_MAX, SESSION_COOKIE_NAME}; +use crate::db; +use crate::handlers::App; + +// -- Shared helpers ----------------------------------------------------------- + +/// Current time in seconds since epoch. +pub fn now_secs() -> u64 { + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_secs() +} + +/// Generate a cryptographically random session token (base64url, 32 bytes). +pub fn generate_session_token() -> String { + let mut bytes = [0u8; 32]; + rand::rng().fill(&mut bytes); + base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(bytes) +} + +/// Build a signed `Set-Cookie` header value for a session. +pub fn sign_session_cookie(keyring: &auth::Keyring, session_token: &str) -> String { + let cookie_value = keyring.sign(&format!( + "{session_token}:{}", + now_secs() + SESSION_AGE_MAX + )); + format!( + "{SESSION_COOKIE_NAME}={cookie_value}; Path=/; HttpOnly; Secure; SameSite=Strict; Max-Age={SESSION_AGE_MAX}" + ) +} + +/// Build a `Set-Cookie` header that clears the session cookie. +fn clear_session_cookie() -> String { + format!( + "{SESSION_COOKIE_NAME}=; Path=/; HttpOnly; Secure; SameSite=Strict; Max-Age=0" + ) +} + +/// Short error response constructor. +fn error_json(status: StatusCode, error: &str) -> Response { + ( + status, + Json(ErrorBody { + error: error.to_owned(), + }), + ) + .into_response() +} + +/// Dummy Argon2 hash for enumeration prevention — run argon2 verify against +/// a known hash when the account doesn't exist, so timing is consistent. +const DUMMY_HASH: &str = "$argon2id$v=19$m=19456,t=2,p=1$AAAAAAAAAAAAAAAAAAAAAA$AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"; + +// -- Types -------------------------------------------------------------------- + +#[derive(Deserialize)] +pub struct LoginInput { + username: String, + password: String, +} + +#[derive(Deserialize)] +pub struct PasswordInput { + current_password: String, + new_password: String, +} + +#[derive(Serialize)] +struct LoginSuccess { + ok: bool, + username: String, + account_id: String, +} + +#[derive(Serialize)] +struct ErrorBody { + error: String, +} + +#[derive(Serialize)] +struct SessionInfo { + id: String, + account_id: String, + created_at: String, + last_seen_at: String, + expires_at: String, +} + +#[derive(Serialize)] +struct SessionsListResponse { + sessions: Vec, +} + +#[derive(Serialize)] +struct OkResponse { + ok: bool, +} + +#[derive(Serialize)] +struct RevokeResponse { + ok: bool, + revoked: bool, +} + +// -- GET /status -------------------------------------------------------------- + +/// Response for authenticated status check. +#[derive(Serialize)] +struct StatusSuccess { + account: StatusAccount, + permits: Vec, +} + +#[derive(Serialize)] +struct StatusAccount { + id: String, + username: String, +} + +#[derive(Serialize)] +struct StatusPermit { + role: String, +} + +/// Response for unauthenticated status check (401). +#[derive(Serialize)] +struct StatusUnauthenticated { + error: &'static str, + #[serde(skip_serializing_if = "Option::is_none")] + bootstrap_available: Option, +} + +/// `GET /status` — current account info or 401 with bootstrap status. +/// +/// Mirrors `fuz_app`'s `create_account_status_route_spec`: +/// - 200 with account + permits if authenticated +/// - 401 with optional `bootstrap_available` if not +pub async fn status_handler( + State(app): State>, + headers: HeaderMap, +) -> Response { + // Try to resolve auth + let resolved = auth::resolve_auth_from_headers( + &headers, + &app.keyring, + &app.db_pool, + app.daemon_token_state.as_ref(), + ) + .await; + + match resolved { + Some(r) => { + let account = StatusAccount { + id: r.context.account.id.to_string(), + username: r.context.account.username.clone(), + }; + let permits: Vec = r + .context + .permits + .iter() + .map(|p| StatusPermit { + role: p.role.clone(), + }) + .collect(); + Json(StatusSuccess { account, permits }).into_response() + } + None => { + let bootstrap = if app + .bootstrap_available + .load(std::sync::atomic::Ordering::Relaxed) + { + Some(true) + } else { + None + }; + ( + StatusCode::UNAUTHORIZED, + Json(StatusUnauthenticated { + error: "authentication_required", + bootstrap_available: bootstrap, + }), + ) + .into_response() + } + } +} + +// -- POST /login -------------------------------------------------------------- + +/// `POST /login` — authenticate with username + password, create session. +/// +/// Mirrors `fuz_app`'s `login_account` from `account_routes.ts`: +/// - Case-insensitive username lookup +/// - Argon2 password verification +/// - Enumeration prevention (dummy hash on missing account) +/// - Session creation + signed cookie +pub async fn login_handler( + State(app): State>, + Json(input): Json, +) -> Response { + match login_inner(&app, input).await { + Ok(response) | Err(response) => response, + } +} + +async fn login_inner(app: &App, input: LoginInput) -> Result { + if input.username.is_empty() { + return Err(error_json(StatusCode::BAD_REQUEST, "username required")); + } + + let client = app.db_pool.get().await.map_err(|e| { + tracing::error!(error = %e, "login: db pool error"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + // Case-insensitive username lookup + let account_with_hash = db::query_account_with_password_hash(&client, &input.username) + .await + .map_err(|e| { + tracing::error!(error = %e, "login: account query failed"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + // Verify password (or run against dummy hash for enumeration prevention) + let (password_hash, account) = match account_with_hash { + Some(row) => (row.password_hash.clone(), Some(row)), + None => (DUMMY_HASH.to_owned(), None), + }; + + let password_valid = verify_password(input.password.clone(), password_hash).await; + + let Some(account) = account.filter(|_| password_valid) else { + return Err(error_json(StatusCode::UNAUTHORIZED, "invalid_credentials")); + }; + + // Create session + let session_token = generate_session_token(); + let token_hash = auth::hash_session_token(&session_token); + db::query_create_session(&client, &token_hash, &account.id) + .await + .map_err(|e| { + tracing::error!(error = %e, "login: session creation failed"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + // Build response with session cookie + let cookie = sign_session_cookie(&app.keyring, &session_token); + let mut headers = HeaderMap::new(); + if let Ok(val) = cookie.parse() { + headers.insert(axum::http::header::SET_COOKIE, val); + } + + tracing::info!(username = %input.username, "login successful"); + + Ok(( + StatusCode::OK, + headers, + Json(LoginSuccess { + ok: true, + username: account.username, + account_id: account.id.to_string(), + }), + ) + .into_response()) +} + +/// Verify a password against an Argon2 hash on a blocking thread. +/// +/// Returns `false` on any error (hash parse failure, wrong password, task panic). +async fn verify_password(password: String, hash: String) -> bool { + tokio::task::spawn_blocking(move || { + let Ok(parsed) = argon2::PasswordHash::new(&hash) else { + return false; + }; + Argon2::default() + .verify_password(password.as_bytes(), &parsed) + .is_ok() + }) + .await + .unwrap_or(false) +} + +// -- POST /logout ------------------------------------------------------------- + +/// `POST /logout` — invalidate current session, close WebSocket connections. +/// +/// Requires authenticated session (cookie). First real caller for +/// `close_sockets_for_session`. +pub async fn logout_handler( + State(app): State>, + headers: HeaderMap, +) -> Response { + match logout_inner(&app, &headers).await { + Ok(response) | Err(response) => response, + } +} + +async fn logout_inner(app: &App, headers: &HeaderMap) -> Result { + // Resolve session from cookie + let resolved = auth::resolve_auth_from_headers( + headers, + &app.keyring, + &app.db_pool, + app.daemon_token_state.as_ref(), + ) + .await + .ok_or_else(|| error_json(StatusCode::UNAUTHORIZED, "unauthenticated"))?; + + // Only cookie sessions can be logged out + if resolved.credential_type != auth::CredentialType::Session { + return Err(error_json(StatusCode::BAD_REQUEST, "session_required")); + } + + let token_hash = resolved.token_hash.as_deref().ok_or_else(|| { + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + let client = app.db_pool.get().await.map_err(|e| { + tracing::error!(error = %e, "logout: db pool error"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + // Delete session from DB + db::query_delete_session(&client, token_hash) + .await + .map_err(|e| { + tracing::error!(error = %e, "logout: session deletion failed"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + // Close WebSocket connections for this session + let closed = app.close_sockets_for_session(token_hash); + if closed > 0 { + tracing::info!(count = closed, "logout: closed WebSocket connections"); + } + + // Clear cookie + let mut response_headers = HeaderMap::new(); + if let Ok(val) = clear_session_cookie().parse() { + response_headers.insert(axum::http::header::SET_COOKIE, val); + } + + tracing::info!(username = %resolved.context.account.username, "logout successful"); + + Ok((StatusCode::OK, response_headers, Json(OkResponse { ok: true })).into_response()) +} + +// -- POST /password ----------------------------------------------------------- + +/// `POST /password` — change password, revoke all sessions + tokens, close sockets. +/// +/// Requires authenticated session. +pub async fn password_handler( + State(app): State>, + headers: HeaderMap, + Json(input): Json, +) -> Response { + match password_inner(&app, &headers, input).await { + Ok(response) | Err(response) => response, + } +} + +async fn password_inner( + app: &App, + headers: &HeaderMap, + input: PasswordInput, +) -> Result { + // Resolve auth + let resolved = auth::resolve_auth_from_headers( + headers, + &app.keyring, + &app.db_pool, + app.daemon_token_state.as_ref(), + ) + .await + .ok_or_else(|| error_json(StatusCode::UNAUTHORIZED, "unauthenticated"))?; + + if resolved.credential_type != auth::CredentialType::Session { + return Err(error_json(StatusCode::BAD_REQUEST, "session_required")); + } + + // Validate new password + if input.new_password.len() < 12 { + return Err(error_json( + StatusCode::BAD_REQUEST, + "new password must be at least 12 characters", + )); + } + + let account_id = resolved.context.account.id; + + let client = app.db_pool.get().await.map_err(|e| { + tracing::error!(error = %e, "password: db pool error"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + // Verify current password + let account_with_hash = db::query_account_with_password_hash_by_id(&client, &account_id) + .await + .map_err(|e| { + tracing::error!(error = %e, "password: account query failed"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })? + .ok_or_else(|| error_json(StatusCode::UNAUTHORIZED, "invalid_credentials"))?; + + if !verify_password(input.current_password.clone(), account_with_hash.password_hash).await { + return Err(error_json(StatusCode::UNAUTHORIZED, "invalid_credentials")); + } + + // Hash new password + let new_hash = hash_password(input.new_password.clone()).await.map_err(|e| { + tracing::error!(error = %e, "password: hashing failed"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + // Update password, revoke all sessions + API tokens for this account + db::query_update_password(&client, &account_id, &new_hash) + .await + .map_err(|e| { + tracing::error!(error = %e, "password: update failed"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + db::query_delete_all_sessions_for_account(&client, &account_id) + .await + .map_err(|e| { + tracing::error!(error = %e, "password: session revocation failed"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + db::query_delete_all_tokens_for_account(&client, &account_id) + .await + .map_err(|e| { + tracing::error!(error = %e, "password: token revocation failed"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + // Close all WebSocket connections for this account + let closed = app.close_sockets_for_account(account_id); + if closed > 0 { + tracing::info!(count = closed, "password change: closed WebSocket connections"); + } + + // Clear cookie + let mut response_headers = HeaderMap::new(); + if let Ok(val) = clear_session_cookie().parse() { + response_headers.insert(axum::http::header::SET_COOKIE, val); + } + + tracing::info!(username = %resolved.context.account.username, "password changed"); + + Ok((StatusCode::OK, response_headers, Json(OkResponse { ok: true })).into_response()) +} + +/// Hash a password with Argon2id on a blocking thread. +pub async fn hash_password(password: String) -> Result { + tokio::task::spawn_blocking(move || { + // Generate 16 random bytes for the salt (standard Argon2 salt size), + // then encode as base64 for SaltString. + let mut salt_bytes = [0u8; 16]; + rand::rng().fill(&mut salt_bytes); + let salt = SaltString::encode_b64(&salt_bytes) + .map_err(|_| argon2::password_hash::Error::SaltInvalid(argon2::password_hash::errors::InvalidValue::Malformed))?; + let argon2 = Argon2::default(); + let hash = argon2.hash_password(password.as_bytes(), &salt)?; + Ok(hash.to_string()) + }) + .await + .unwrap_or(Err(argon2::password_hash::Error::Algorithm)) +} + +// -- GET /sessions ------------------------------------------------------------ + +/// `GET /sessions` — list all sessions for the authenticated account. +pub async fn sessions_list_handler( + State(app): State>, + headers: HeaderMap, +) -> Response { + match sessions_list_inner(&app, &headers).await { + Ok(response) | Err(response) => response, + } +} + +async fn sessions_list_inner(app: &App, headers: &HeaderMap) -> Result { + let resolved = auth::resolve_auth_from_headers( + headers, + &app.keyring, + &app.db_pool, + app.daemon_token_state.as_ref(), + ) + .await + .ok_or_else(|| error_json(StatusCode::UNAUTHORIZED, "unauthenticated"))?; + + let client = app.db_pool.get().await.map_err(|e| { + tracing::error!(error = %e, "sessions list: db pool error"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + let rows = db::query_sessions_for_account(&client, &resolved.context.account.id) + .await + .map_err(|e| { + tracing::error!(error = %e, "sessions list: query failed"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + let account_id_str = resolved.context.account.id.to_string(); + let sessions: Vec = rows + .into_iter() + .map(|r| SessionInfo { + id: r.id, + account_id: account_id_str.clone(), + created_at: r.created_at, + last_seen_at: r.last_seen_at, + expires_at: r.expires_at, + }) + .collect(); + + Ok(Json(SessionsListResponse { sessions }).into_response()) +} + +// -- POST /sessions/:id/revoke ------------------------------------------------ + +/// `POST /sessions/:id/revoke` — revoke a specific session (scoped to own account). +pub async fn session_revoke_handler( + State(app): State>, + headers: HeaderMap, + Path(session_id): Path, +) -> Response { + match session_revoke_inner(&app, &headers, &session_id).await { + Ok(response) | Err(response) => response, + } +} + +async fn session_revoke_inner( + app: &App, + headers: &HeaderMap, + session_id: &str, +) -> Result { + let resolved = auth::resolve_auth_from_headers( + headers, + &app.keyring, + &app.db_pool, + app.daemon_token_state.as_ref(), + ) + .await + .ok_or_else(|| error_json(StatusCode::UNAUTHORIZED, "unauthenticated"))?; + + let client = app.db_pool.get().await.map_err(|e| { + tracing::error!(error = %e, "session revoke: db pool error"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + // Delete session — scoped to the authenticated account + let deleted = db::query_delete_session_for_account( + &client, + session_id, + &resolved.context.account.id, + ) + .await + .map_err(|e| { + tracing::error!(error = %e, "session revoke: delete failed"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + if !deleted { + // Idempotent — session already gone or belongs to another account + return Ok(Json(RevokeResponse { ok: true, revoked: false }).into_response()); + } + + // Close WebSocket connections for this session + let closed = app.close_sockets_for_session(session_id); + if closed > 0 { + tracing::info!(count = closed, "session revoke: closed WebSocket connections"); + } + + Ok(Json(RevokeResponse { ok: true, revoked: true }).into_response()) +} diff --git a/crates/zzz_server/src/auth.rs b/crates/zzz_server/src/auth.rs new file mode 100644 index 000000000..0cdc84b9b --- /dev/null +++ b/crates/zzz_server/src/auth.rs @@ -0,0 +1,660 @@ +use base64::Engine; +use base64::engine::general_purpose::STANDARD as BASE64; +use hmac::{Hmac, Mac}; +use sha2::Sha256; + +use crate::daemon_token::SharedDaemonTokenState; +use crate::db::{ + AccountRow, ActorRow, PermitRow, + query_account_by_id, query_actor_by_account, query_permits_for_actor, + query_session_get_valid, query_session_touch, + query_validate_api_token, query_api_token_touch, +}; +use fuz_common::JsonRpcError; + +type HmacSha256 = Hmac; + +// -- Keyring ------------------------------------------------------------------ + +/// Cookie signing keyring. +/// +/// First key signs, all keys verify (supports key rotation). +/// Mirrors `fuz_app`'s `src/lib/auth/keyring.ts`. +pub struct Keyring { + keys: Vec>, +} + +const KEY_SEPARATOR: &str = "__"; +const MIN_KEY_LENGTH: usize = 32; + +impl Keyring { + /// Create a keyring from `SECRET_COOKIE_KEYS` env value. + /// + /// Keys are separated by `__`. First key signs, all verify. + /// Returns `None` if no valid keys. + pub fn new(env_value: &str) -> Option { + let keys: Vec> = env_value + .split(KEY_SEPARATOR) + .filter(|k| !k.is_empty()) + .map(|k| k.as_bytes().to_vec()) + .collect(); + + if keys.is_empty() { + return None; + } + Some(Self { keys }) + } + + /// Validate key configuration. Returns errors if any. + pub fn validate(env_value: &str) -> Vec { + let keys: Vec<&str> = env_value + .split(KEY_SEPARATOR) + .filter(|k| !k.is_empty()) + .collect(); + + if keys.is_empty() { + return vec!["SECRET_COOKIE_KEYS is required".to_owned()]; + } + + let mut errors = Vec::new(); + for (i, key) in keys.iter().enumerate() { + if key.len() < MIN_KEY_LENGTH { + errors.push(format!( + "Key {} is too short ({} chars, min {MIN_KEY_LENGTH})", + i + 1, + key.len() + )); + } + } + errors + } + + /// Sign a value with HMAC-SHA256 using the primary (first) key. + /// + /// Returns `value.base64(signature)`. + #[allow(clippy::expect_used)] // HMAC-SHA256 accepts any key length + pub fn sign(&self, value: &str) -> String { + let mut mac = + HmacSha256::new_from_slice(&self.keys[0]).expect("HMAC key length is always valid"); + mac.update(value.as_bytes()); + let signature = mac.finalize().into_bytes(); + let sig_b64 = BASE64.encode(signature); + format!("{value}.{sig_b64}") + } + + /// Verify a signed value. Tries all keys for rotation support. + /// + /// Returns `(original_value, key_index)` or `None` if invalid. + #[allow(clippy::expect_used)] // HMAC-SHA256 accepts any key length + pub fn verify(&self, signed_value: &str) -> Option<(String, usize)> { + let dot_index = signed_value.rfind('.')?; + let value = &signed_value[..dot_index]; + let sig_b64 = &signed_value[dot_index + 1..]; + + let signature = BASE64.decode(sig_b64).ok()?; + + for (i, key) in self.keys.iter().enumerate() { + let mut mac = + HmacSha256::new_from_slice(key).expect("HMAC key length is always valid"); + mac.update(value.as_bytes()); + if mac.verify_slice(&signature).is_ok() { + return Some((value.to_owned(), i)); + } + } + None + } +} + +// -- Cookie parsing ----------------------------------------------------------- + +/// Cookie name for session cookies (matches `fuz_app`'s `fuz_session`). +pub const SESSION_COOKIE_NAME: &str = "fuz_session"; + +/// Cookie max age in seconds (30 days — aligned with `AUTH_SESSION_LIFETIME_MS`). +pub const SESSION_AGE_MAX: u64 = 60 * 60 * 24 * 30; + +/// Separator between identity payload and `expires_at` in the cookie value. +const VALUE_SEPARATOR: char = ':'; + +/// Parse the session token from a Cookie header value. +/// +/// Extracts the `fuz_session` cookie, verifies its HMAC signature, +/// checks expiration, and returns the raw session token. +pub fn parse_session_from_cookies(cookie_header: &str, keyring: &Keyring) -> Option { + // Find the fuz_session cookie value + let signed_value = extract_cookie_value(cookie_header, SESSION_COOKIE_NAME)?; + + // Verify signature + let (value, _key_index) = keyring.verify(signed_value)?; + + // Split on last ':' to get identity and expires_at + let last_sep = value.rfind(VALUE_SEPARATOR)?; + let identity = &value[..last_sep]; + let expires_at_str = &value[last_sep + 1..]; + + // Check expiration (cookie timestamps are always positive and fit in u64) + let expires_at: u64 = expires_at_str.parse().ok()?; + let now = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_secs(); + + if expires_at <= now { + return None; + } + + // Identity is the raw session token + if identity.is_empty() { + return None; + } + + Some(identity.to_owned()) +} + +/// Extract a named cookie value from a Cookie header string. +/// +/// Handles the `name=value; name2=value2` format. +fn extract_cookie_value<'a>(cookie_header: &'a str, name: &str) -> Option<&'a str> { + for part in cookie_header.split(';') { + let trimmed = part.trim(); + if let Some(rest) = trimmed.strip_prefix(name) + && let Some(value) = rest.strip_prefix('=') { + return Some(value); + } + } + None +} + +/// Hash a session token to its storage key using blake3. +/// +/// Mirrors `fuz_app`'s `hash_session_token` from `session_queries.ts`. +pub fn hash_session_token(token: &str) -> String { + blake3::hash(token.as_bytes()).to_hex().to_string() +} + +// -- Auth errors -------------------------------------------------------------- + +/// Errors from building a request context (pool or query failures). +#[derive(Debug, thiserror::Error)] +pub enum AuthError { + #[error("pool error: {0}")] + Pool(#[from] deadpool_postgres::PoolError), + #[error("query error: {0}")] + Query(#[from] tokio_postgres::Error), +} + +// -- Credential type ---------------------------------------------------------- + +/// How the request was authenticated. +/// +/// Mirrors `fuz_app`'s `credential_type` context key: +/// - `Session` — cookie-based session (`fuz_session`) +/// - `ApiToken` — `Authorization: Bearer ` looked up in `api_token` table +/// - `DaemonToken` — `X-Daemon-Token` header with timing-safe validation +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum CredentialType { + Session, + ApiToken, + DaemonToken, +} + +// -- Request context ---------------------------------------------------------- + +/// Authenticated request context — account + actor + active permits. +/// +/// Built from a valid session cookie. Passed to handlers via `Ctx`. +#[derive(Debug, Clone)] +pub struct RequestContext { + pub account: AccountRow, + pub actor: ActorRow, + pub permits: Vec, +} + +impl RequestContext { + /// Check if this context has an active permit for the given role. + pub fn has_role(&self, role: &str) -> bool { + self.permits.iter().any(|p| p.role == role) + } +} + +/// Build a `RequestContext` from a session token. +/// +/// Pipeline: cookie → verify signature → hash token → session lookup → +/// account → actor → permits. +pub async fn build_request_context( + pool: &deadpool_postgres::Pool, + session_token: &str, +) -> Result, AuthError> { + let client = pool.get().await?; + + // Hash token → look up session + let token_hash = hash_session_token(session_token); + let session = query_session_get_valid(&client, &token_hash).await?; + + let Some(session) = session else { + return Ok(None); + }; + + // Build context: account → actor → permits + let account = query_account_by_id(&client, &session.account_id).await?; + + let Some(account) = account else { + return Ok(None); + }; + + let actor = query_actor_by_account(&client, &account.id).await?; + + let Some(actor) = actor else { + return Ok(None); + }; + + let permits = query_permits_for_actor(&client, &actor.id).await?; + + // Touch session (fire-and-forget — don't block the request) + let touch_pool = pool.clone(); + let touch_hash = token_hash.clone(); + tokio::spawn(async move { + if let Ok(client) = touch_pool.get().await + && let Err(e) = query_session_touch(&client, &touch_hash).await { + tracing::warn!(error = %e, "session touch failed"); + } + }); + + Ok(Some(RequestContext { + account, + actor, + permits, + })) +} + +// -- Per-action auth check ---------------------------------------------------- + +/// Auth level for an action spec. +/// +/// Mirrors the `auth` field from zzz's `action_specs.ts`. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ActionAuth { + /// No auth required. + Public, + /// Must have a valid session. + Authenticated, + /// Must have keeper role. In `fuz_app` this requires `daemon_token` + /// credential type; the Rust backend checks keeper permit on cookie sessions. + Keeper, +} + +/// JSON-RPC error codes for auth failures. +/// +/// Matches `fuz_app/src/lib/http/jsonrpc_errors.ts`: +/// - unauthenticated: -32001 → HTTP 401 +/// - forbidden: -32002 → HTTP 403 +const JSONRPC_UNAUTHENTICATED: i32 = -32001; +const JSONRPC_FORBIDDEN: i32 = -32002; + +/// Check per-action auth. +/// +/// Returns `None` if authorized, `Some(error)` if not. +/// Mirrors `fuz_app`'s `check_action_auth` from `action_rpc.ts` and +/// the keeper check from `register_websocket_actions.ts`. +/// +/// Keeper actions require both `daemon_token` credential type AND the +/// keeper role permit — API tokens with keeper permit are rejected. +pub fn check_action_auth( + auth: ActionAuth, + context: Option<&RequestContext>, + credential_type: Option, +) -> Option { + match auth { + ActionAuth::Public => None, + ActionAuth::Authenticated => { + if context.is_some() { + None + } else { + Some(JsonRpcError { + code: JSONRPC_UNAUTHENTICATED, + message: "unauthenticated".to_owned(), + data: None, + }) + } + } + ActionAuth::Keeper => { + let Some(ctx) = context else { + return Some(JsonRpcError { + code: JSONRPC_UNAUTHENTICATED, + message: "unauthenticated".to_owned(), + data: None, + }); + }; + // Keeper actions require daemon_token credential type AND keeper role. + // API tokens and session cookies cannot access keeper actions even if + // the account has the keeper permit. + if credential_type != Some(CredentialType::DaemonToken) + || !ctx.has_role("keeper") + { + Some(JsonRpcError { + code: JSONRPC_FORBIDDEN, + message: "forbidden".to_owned(), + data: None, + }) + } else { + None + } + } + } +} + +/// Get the auth level for a method name. +/// +/// Mirrors the `auth` field from each action spec in `action_specs.ts`. +pub fn method_auth(method: &str) -> ActionAuth { + match method { + "ping" => ActionAuth::Public, + + // All other implemented methods require authentication + "workspace_list" | "workspace_open" | "workspace_close" | "session_load" + | "diskfile_update" | "diskfile_delete" | "directory_create" + | "completion_create" | "ollama_list" | "ollama_ps" | "ollama_show" + | "ollama_pull" | "ollama_delete" | "ollama_copy" | "ollama_create" + | "ollama_unload" | "provider_load_status" + | "terminal_create" | "terminal_data_send" | "terminal_resize" | "terminal_close" => { + ActionAuth::Authenticated + } + + "provider_update_api_key" => ActionAuth::Keeper, + + // Unknown methods — will hit method_not_found in dispatch anyway, + // but require auth so we don't leak method existence to unauthenticated callers + _ => ActionAuth::Authenticated, + } +} + +// -- Origin verification ------------------------------------------------------ + +/// Check if a request origin is allowed. +/// +/// Supports patterns: exact match, `http://localhost:*` (any port), +/// `https://*.example.com` (subdomain wildcard). +pub fn check_origin(origin: &str, allowed_patterns: &[String]) -> bool { + if allowed_patterns.is_empty() { + return true; // no restriction configured + } + + for pattern in allowed_patterns { + if pattern == origin { + return true; + } + // Wildcard port: http://localhost:* + if let Some(prefix) = pattern.strip_suffix(":*") + && let Some(rest) = origin.strip_prefix(prefix) + && rest.starts_with(':') && rest[1..].chars().all(|c| c.is_ascii_digit()) { + return true; + } + // Subdomain wildcard: https://*.example.com + if let Some(suffix) = pattern.strip_prefix("https://*.") + && let Some(host) = origin.strip_prefix("https://") + && host.ends_with(suffix) + && host.len() > suffix.len() + && host.as_bytes()[host.len() - suffix.len() - 1] == b'.' + { + return true; + } + } + false +} + +/// Resolve request context from HTTP headers (Cookie header). +/// +/// Returns `None` if no session cookie or session is invalid. +/// Used by both HTTP RPC and WebSocket upgrade handlers. +/// Resolved auth context with connection tracking metadata. +pub struct ResolvedAuth { + pub context: RequestContext, + /// blake3 hash of the session token (for targeted socket revocation). + /// `None` for bearer token connections (revocable only via account-level revocation). + pub token_hash: Option, + /// How this request was authenticated. + pub credential_type: CredentialType, +} + +pub async fn resolve_auth_from_headers( + headers: &axum::http::HeaderMap, + keyring: &Keyring, + pool: &deadpool_postgres::Pool, + daemon_token_state: Option<&SharedDaemonTokenState>, +) -> Option { + // Try daemon token first (highest priority, matches fuz_app middleware order) + if let Some(state) = daemon_token_state + && let Some(resolved) = resolve_daemon_token_from_headers(headers, state, pool).await + { + return Some(resolved); + } + + // Try cookie auth + if let Some(resolved) = resolve_cookie_from_headers(headers, keyring, pool).await { + return Some(resolved); + } + + // Fall back to bearer token auth + resolve_bearer_from_headers(headers, pool).await +} + +/// Resolve auth from cookie session (`fuz_session`). +async fn resolve_cookie_from_headers( + headers: &axum::http::HeaderMap, + keyring: &Keyring, + pool: &deadpool_postgres::Pool, +) -> Option { + let cookie_header = headers + .get(axum::http::header::COOKIE)? + .to_str() + .ok()?; + + let session_token = parse_session_from_cookies(cookie_header, keyring)?; + let token_hash = hash_session_token(&session_token); + + match build_request_context(pool, &session_token).await { + Ok(Some(context)) => Some(ResolvedAuth { + context, + token_hash: Some(token_hash), + credential_type: CredentialType::Session, + }), + Ok(None) => None, + Err(e) => { + tracing::warn!(error = %e, "cookie auth context build failed"); + None + } + } +} + +/// Resolve auth from `Authorization: Bearer ` header. +/// +/// Mirrors `fuz_app`'s `bearer_auth.ts`: +/// - Case-insensitive "Bearer " prefix (RFC 7235 §2.1) +/// - Rejects requests with `Origin` or `Referer` headers (defense-in-depth +/// against browser-initiated bearer usage) +/// - Hashes token with blake3, looks up in `api_token` table +/// - Touches `last_used_at` fire-and-forget +async fn resolve_bearer_from_headers( + headers: &axum::http::HeaderMap, + pool: &deadpool_postgres::Pool, +) -> Option { + let auth_header = headers + .get(axum::http::header::AUTHORIZATION)? + .to_str() + .ok()?; + + // Case-insensitive "Bearer " prefix check (RFC 7235 §2.1) + if auth_header.len() < 7 || !auth_header[..7].eq_ignore_ascii_case("bearer ") { + return None; + } + + // Defense-in-depth: reject bearer tokens from browser contexts + if headers.contains_key("origin") || headers.contains_key("referer") { + tracing::debug!("bearer auth rejected: browser context (Origin/Referer present)"); + return None; + } + + let raw_token = &auth_header[7..]; + if raw_token.is_empty() { + return None; + } + + // Hash and look up in api_token table + let token_hash = blake3::hash(raw_token.as_bytes()).to_hex().to_string(); + + let client = match pool.get().await { + Ok(c) => c, + Err(e) => { + tracing::warn!(error = %e, "bearer auth pool error"); + return None; + } + }; + + let token_row = match query_validate_api_token(&client, &token_hash).await { + Ok(Some(row)) => row, + Ok(None) => return None, + Err(e) => { + tracing::warn!(error = %e, "bearer auth token query failed"); + return None; + } + }; + + // Build request context from the token's account + let account = match query_account_by_id(&client, &token_row.account_id).await { + Ok(Some(a)) => a, + Ok(None) => return None, + Err(e) => { + tracing::warn!(error = %e, "bearer auth account query failed"); + return None; + } + }; + + let actor = match query_actor_by_account(&client, &account.id).await { + Ok(Some(a)) => a, + Ok(None) => return None, + Err(e) => { + tracing::warn!(error = %e, "bearer auth actor query failed"); + return None; + } + }; + + let permits = match query_permits_for_actor(&client, &actor.id).await { + Ok(p) => p, + Err(e) => { + tracing::warn!(error = %e, "bearer auth permits query failed"); + return None; + } + }; + + // Touch token usage (fire-and-forget) + let touch_pool = pool.clone(); + let touch_id = token_row.id.clone(); + tokio::spawn(async move { + if let Ok(client) = touch_pool.get().await + && let Err(e) = query_api_token_touch(&client, &touch_id).await + { + tracing::warn!(error = %e, "api token touch failed"); + } + }); + + Some(ResolvedAuth { + context: RequestContext { + account, + actor, + permits, + }, + token_hash: None, // bearer connections have no session token_hash + credential_type: CredentialType::ApiToken, + }) +} + +/// Header name for daemon token authentication. +const DAEMON_TOKEN_HEADER: &str = "x-daemon-token"; + +/// Resolve auth from `X-Daemon-Token` header. +/// +/// Validates the token against current and previous daemon tokens using +/// timing-safe comparison. If valid, resolves the keeper account from +/// `state.keeper_account_id` and builds a `RequestContext`. +/// +/// Mirrors `fuz_app`'s daemon token middleware — daemon token overrides +/// all other auth methods (highest trust: requires filesystem access to read). +async fn resolve_daemon_token_from_headers( + headers: &axum::http::HeaderMap, + daemon_state: &SharedDaemonTokenState, + pool: &deadpool_postgres::Pool, +) -> Option { + let token_value = headers.get(DAEMON_TOKEN_HEADER)?.to_str().ok()?; + + if token_value.is_empty() { + return None; + } + + // Read lock for validation + let state = daemon_state.read().await; + if !crate::daemon_token::validate_daemon_token(token_value, &state) { + tracing::debug!("daemon token validation failed"); + return None; + } + + // Valid token — resolve keeper account + let keeper_account_id = state.keeper_account_id?; + drop(state); // release read lock before DB queries + + let client = match pool.get().await { + Ok(c) => c, + Err(e) => { + tracing::warn!(error = %e, "daemon token auth pool error"); + return None; + } + }; + + let account = match query_account_by_id(&client, &keeper_account_id).await { + Ok(Some(a)) => a, + Ok(None) => { + tracing::warn!("daemon token keeper account not found in DB"); + return None; + } + Err(e) => { + tracing::warn!(error = %e, "daemon token account query failed"); + return None; + } + }; + + let actor = match query_actor_by_account(&client, &account.id).await { + Ok(Some(a)) => a, + Ok(None) => return None, + Err(e) => { + tracing::warn!(error = %e, "daemon token actor query failed"); + return None; + } + }; + + let permits = match query_permits_for_actor(&client, &actor.id).await { + Ok(p) => p, + Err(e) => { + tracing::warn!(error = %e, "daemon token permits query failed"); + return None; + } + }; + + Some(ResolvedAuth { + context: RequestContext { + account, + actor, + permits, + }, + token_hash: None, // daemon token connections have no session token_hash + credential_type: CredentialType::DaemonToken, + }) +} + +/// Parse `ALLOWED_ORIGINS` env value into a list of patterns. +pub fn parse_allowed_origins(env_value: &str) -> Vec { + env_value + .split(',') + .map(str::trim) + .filter(|s| !s.is_empty()) + .map(String::from) + .collect() +} diff --git a/crates/zzz_server/src/bootstrap.rs b/crates/zzz_server/src/bootstrap.rs new file mode 100644 index 000000000..a1e39c5fd --- /dev/null +++ b/crates/zzz_server/src/bootstrap.rs @@ -0,0 +1,212 @@ +use std::sync::Arc; + +use axum::extract::State; +use axum::http::{HeaderMap, StatusCode}; +use axum::response::{IntoResponse, Response}; +use axum::Json; +use serde::{Deserialize, Serialize}; + +use crate::account::{generate_session_token, hash_password, sign_session_cookie}; +use crate::auth; +use crate::db; +use crate::handlers::App; + +// -- Types -------------------------------------------------------------------- + +#[derive(Deserialize)] +pub struct BootstrapInput { + token: String, + username: String, + password: String, +} + +#[derive(Serialize)] +struct BootstrapSuccess { + ok: bool, + username: String, +} + +#[derive(Serialize)] +struct BootstrapErrorBody { + error: String, +} + +/// Short error response constructor. +fn error_json(status: StatusCode, error: &str) -> Response { + (status, Json(BootstrapErrorBody { error: error.to_owned() })).into_response() +} + +// -- Handler ------------------------------------------------------------------ + +/// `POST /bootstrap` — one-shot endpoint to create the first admin account. +/// +/// Mirrors `fuz_app`'s `bootstrap_routes.ts` / `bootstrap_account.ts`: +/// 1. Read and timing-safe-compare bootstrap token +/// 2. Hash password with Argon2 +/// 3. In a transaction: acquire bootstrap lock, create account + actor + permits +/// 4. Create session + set cookie +/// 5. Delete token file +pub async fn bootstrap_handler( + State(app): State>, + Json(input): Json, +) -> Response { + match bootstrap_inner(&app, input).await { + Ok(response) | Err(response) => response, + } +} + +/// Inner bootstrap logic — uses `Result` so early returns +/// via `?` produce error responses without repeating the pattern at every step. +async fn bootstrap_inner(app: &App, input: BootstrapInput) -> Result { + // Short-circuit if no bootstrap configured + let Some(ref token_path) = app.bootstrap_token_path else { + return Err(error_json(StatusCode::NOT_FOUND, "bootstrap_not_configured")); + }; + + // Check bootstrap lock (quick check before token comparison) + if !app.bootstrap_available.load(std::sync::atomic::Ordering::Relaxed) { + return Err(error_json(StatusCode::FORBIDDEN, "already_bootstrapped")); + } + + // 1. Read and verify bootstrap token + let expected_token = tokio::fs::read_to_string(token_path) + .await + .map(|t| t.trim().to_owned()) + .map_err(|_| error_json(StatusCode::NOT_FOUND, "token_file_missing"))?; + + if !timing_safe_eq(input.token.as_bytes(), expected_token.as_bytes()) { + return Err(error_json(StatusCode::UNAUTHORIZED, "invalid_token")); + } + + // 2. Validate input + if input.username.is_empty() || input.password.len() < 12 { + return Err(error_json( + StatusCode::BAD_REQUEST, + "invalid input: username required, password min 12 chars", + )); + } + + // 3. Hash password with Argon2 (CPU-intensive, before transaction) + let password_hash = hash_password(input.password.clone()).await.map_err(|e| { + tracing::error!(error = %e, "password hashing failed"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + // 4. Transaction: lock + create account + actor + permits + session + let client = app.db_pool.get().await.map_err(|e| { + tracing::error!(error = %e, "db pool error during bootstrap"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + client.execute("BEGIN", &[]).await.map_err(|e| { + tracing::error!(error = %e, "transaction begin failed"); + error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error") + })?; + + // Acquire bootstrap lock atomically + let lock_row = match client + .query_opt( + "UPDATE bootstrap_lock SET bootstrapped = true + WHERE id = 1 AND bootstrapped = false RETURNING id", + &[], + ) + .await + { + Ok(row) => row, + Err(e) => { + let _ = client.execute("ROLLBACK", &[]).await; + tracing::error!(error = %e, "bootstrap lock query failed"); + return Err(error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error")); + } + }; + if lock_row.is_none() { + let _ = client.execute("ROLLBACK", &[]).await; + app.bootstrap_available + .store(false, std::sync::atomic::Ordering::Relaxed); + return Err(error_json(StatusCode::FORBIDDEN, "already_bootstrapped")); + } + + // Create account + actor + permits + session (all in one helper) + let (account, session_token) = + match do_bootstrap_creates(&client, &input, &password_hash).await { + Ok(result) => result, + Err(e) => { + let _ = client.execute("ROLLBACK", &[]).await; + tracing::error!(error = %e, "bootstrap transaction failed"); + return Err(error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error")); + } + }; + + // Commit + if let Err(e) = client.execute("COMMIT", &[]).await { + tracing::error!(error = %e, "transaction commit failed"); + return Err(error_json(StatusCode::INTERNAL_SERVER_ERROR, "internal error")); + } + + // Mark bootstrap as no longer available + app.bootstrap_available + .store(false, std::sync::atomic::Ordering::Relaxed); + + // Set keeper_account_id on daemon token state (if enabled) + if let Some(ref daemon_state) = app.daemon_token_state { + let mut state = daemon_state.write().await; + state.keeper_account_id = Some(account.id); + tracing::info!("daemon token keeper_account_id set to {}", account.id); + } + + // 5. Delete token file (after commit — best effort) + if let Err(e) = tokio::fs::remove_file(token_path).await { + tracing::error!(error = %e, path = %token_path, "CRITICAL: failed to delete bootstrap token file"); + } + + // 6. Build session cookie and return + let cookie = sign_session_cookie(&app.keyring, &session_token); + let mut headers = HeaderMap::new(); + if let Ok(val) = cookie.parse() { + headers.insert(axum::http::header::SET_COOKIE, val); + } + + tracing::info!(username = %input.username, "bootstrap complete"); + + Ok(( + StatusCode::OK, + headers, + Json(BootstrapSuccess { + ok: true, + username: account.username, + }), + ) + .into_response()) +} + +/// Execute account/actor/permits/session creation within an open transaction. +async fn do_bootstrap_creates( + client: &deadpool_postgres::Object, + input: &BootstrapInput, + password_hash: &str, +) -> Result<(db::AccountRow, String), tokio_postgres::Error> { + let account = db::query_create_account(client, &input.username, password_hash).await?; + let actor = db::query_create_actor(client, &account.id, &input.username).await?; + db::query_grant_permit(client, &actor.id, "keeper").await?; + db::query_grant_permit(client, &actor.id, "admin").await?; + + let session_token = generate_session_token(); + let token_hash = auth::hash_session_token(&session_token); + db::query_create_session(client, &token_hash, &account.id).await?; + + Ok((account, session_token)) +} + +// -- Helpers ------------------------------------------------------------------ + +/// Timing-safe byte comparison. +fn timing_safe_eq(a: &[u8], b: &[u8]) -> bool { + if a.len() != b.len() { + return false; + } + let mut diff = 0u8; + for (x, y) in a.iter().zip(b.iter()) { + diff |= x ^ y; + } + diff == 0 +} diff --git a/crates/zzz_server/src/daemon_token.rs b/crates/zzz_server/src/daemon_token.rs new file mode 100644 index 000000000..98ff0f103 --- /dev/null +++ b/crates/zzz_server/src/daemon_token.rs @@ -0,0 +1,169 @@ +use std::path::PathBuf; +use std::sync::Arc; + +use base64::Engine; +use base64::engine::general_purpose::URL_SAFE_NO_PAD; +use rand::RngExt; +use tokio::sync::RwLock; + +// -- Daemon token state ------------------------------------------------------- + +/// In-memory daemon token state for `X-Daemon-Token` authentication. +/// +/// Mirrors `fuz_app`'s `DaemonTokenState`: +/// - `current_token`: 43-char base64url string (32 random bytes) +/// - `previous_token`: prior token, valid during rotation race window +/// - `keeper_account_id`: resolved after bootstrap +/// +/// Protected by `tokio::sync::RwLock` — async reads during validation, +/// write lock only during rotation. +#[derive(Debug)] +pub struct DaemonTokenState { + pub current_token: String, + pub previous_token: Option, + pub keeper_account_id: Option, + pub token_path: PathBuf, +} + +/// Shared handle to daemon token state. +pub type SharedDaemonTokenState = Arc>; + +// -- Token generation --------------------------------------------------------- + +/// Generate a daemon token: 32 random bytes → base64url (43 chars). +/// +/// Matches `fuz_app`'s `generate_daemon_token` / `generate_random_base64url`. +pub fn generate_daemon_token() -> String { + let mut bytes = [0u8; 32]; + rand::rng().fill(&mut bytes); + URL_SAFE_NO_PAD.encode(bytes) +} + +// -- Token validation --------------------------------------------------------- + +/// Validate a provided token against the current and previous tokens. +/// +/// Uses constant-time comparison to prevent timing attacks. +/// Accepts both current and previous token (rotation race window). +/// +/// Mirrors `fuz_app`'s `validate_daemon_token`. +pub fn validate_daemon_token(provided: &str, state: &DaemonTokenState) -> bool { + if timing_safe_eq(provided.as_bytes(), state.current_token.as_bytes()) { + return true; + } + if let Some(ref previous) = state.previous_token + && timing_safe_eq(provided.as_bytes(), previous.as_bytes()) + { + return true; + } + false +} + +/// Timing-safe byte comparison. +/// +/// Returns `false` immediately if lengths differ (length is not secret +/// for daemon tokens — they're always 43 chars). Content comparison +/// is constant-time via XOR accumulation. +fn timing_safe_eq(a: &[u8], b: &[u8]) -> bool { + if a.len() != b.len() { + return false; + } + let mut diff = 0u8; + for (x, y) in a.iter().zip(b.iter()) { + diff |= x ^ y; + } + diff == 0 +} + +// -- Token file I/O ----------------------------------------------------------- + +/// Write the daemon token to disk atomically (tempfile + rename). +/// +/// Mirrors `fuz_app`'s `write_daemon_token` with atomic write pattern. +/// File contains the token followed by a newline. +pub async fn write_token_file(path: &std::path::Path, token: &str) -> std::io::Result<()> { + let parent = path.parent().ok_or_else(|| { + std::io::Error::new(std::io::ErrorKind::InvalidInput, "token path has no parent dir") + })?; + + // Ensure parent directory exists + tokio::fs::create_dir_all(parent).await?; + + // Atomic write: write to temp file, then rename + let tmp_path = path.with_extension("tmp"); + tokio::fs::write(&tmp_path, format!("{token}\n")).await?; + tokio::fs::rename(&tmp_path, path).await?; + + // Best-effort chmod 0o600 (owner read-write only) + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let _ = tokio::fs::set_permissions(path, std::fs::Permissions::from_mode(0o600)).await; + } + + Ok(()) +} + +// -- Token rotation ----------------------------------------------------------- + +/// Rotation interval in milliseconds (30 seconds, matching `fuz_app`). +const ROTATION_INTERVAL_MS: u64 = 30_000; + +/// Spawn a background task that rotates the daemon token every 30 seconds. +/// +/// Rotation: `previous_token = current_token`, `current_token = new_token`, +/// then write to disk atomically. +/// +/// Returns a `tokio::task::JoinHandle` — caller should abort on shutdown. +pub fn spawn_rotation_task( + state: SharedDaemonTokenState, +) -> tokio::task::JoinHandle<()> { + tokio::spawn(async move { + let mut interval = + tokio::time::interval(std::time::Duration::from_millis(ROTATION_INTERVAL_MS)); + // First tick fires immediately — skip it (token was just written at startup) + interval.tick().await; + + loop { + interval.tick().await; + + let new_token = generate_daemon_token(); + let path = { + let mut state = state.write().await; + state.previous_token = Some(state.current_token.clone()); + state.current_token.clone_from(&new_token); + state.token_path.clone() + }; + + if let Err(e) = write_token_file(&path, &new_token).await { + tracing::error!(error = %e, "failed to write rotated daemon token"); + } else { + tracing::debug!("daemon token rotated"); + } + } + }) +} + +// -- Init --------------------------------------------------------------------- + +/// Initialize daemon token state: generate token, write to disk, return state. +/// +/// Called from `main.rs` during server startup. +pub async fn init_daemon_token( + zzz_dir: &str, +) -> Result { + let token_path = PathBuf::from(zzz_dir).join("run").join("daemon_token"); + let token = generate_daemon_token(); + + write_token_file(&token_path, &token).await?; + tracing::info!(path = %token_path.display(), "daemon token initialized"); + + let state = DaemonTokenState { + current_token: token, + previous_token: None, + keeper_account_id: None, + token_path, + }; + + Ok(Arc::new(RwLock::new(state))) +} diff --git a/crates/zzz_server/src/db.rs b/crates/zzz_server/src/db.rs new file mode 100644 index 000000000..00f7c8e22 --- /dev/null +++ b/crates/zzz_server/src/db.rs @@ -0,0 +1,614 @@ +use deadpool_postgres::{Config, Pool, Runtime, SslMode}; +use tokio_postgres::NoTls; + +use crate::error::ServerError; + +// -- Pool creation ------------------------------------------------------------ + +/// Create a connection pool from `DATABASE_URL`. +/// +/// Parses the URL into `deadpool_postgres::Config` and builds the pool. +/// Fails fast if the URL is missing or malformed. +pub fn create_pool(database_url: &str) -> Result { + let pg_config: tokio_postgres::Config = database_url + .parse() + .map_err(|e| ServerError::Database(format!("invalid DATABASE_URL: {e}")))?; + + let mut cfg = Config::new(); + if let Some(host) = pg_config.get_hosts().first() { + match host { + tokio_postgres::config::Host::Tcp(h) => cfg.host = Some(h.clone()), + #[cfg(unix)] + tokio_postgres::config::Host::Unix(p) => { + cfg.host = Some(p.to_string_lossy().into_owned()); + } + } + } + if let Some(port) = pg_config.get_ports().first() { + cfg.port = Some(*port); + } + if let Some(user) = pg_config.get_user() { + cfg.user = Some(user.to_owned()); + } + if let Some(dbname) = pg_config.get_dbname() { + cfg.dbname = Some(dbname.to_owned()); + } + if let Some(password) = pg_config.get_password() { + cfg.password = Some(String::from_utf8_lossy(password).into_owned()); + } + cfg.ssl_mode = Some(SslMode::Disable); + + cfg.create_pool(Some(Runtime::Tokio1), NoTls) + .map_err(|e| ServerError::Database(format!("failed to create pool: {e}"))) +} + +// -- Migrations --------------------------------------------------------------- + +/// Run auth table DDL (CREATE TABLE IF NOT EXISTS). +/// +/// Mirrors `fuz_app`'s auth DDL from `src/lib/auth/ddl.ts`. +/// Safe to run on every startup — all statements use IF NOT EXISTS. +pub async fn run_migrations(pool: &Pool) -> Result<(), ServerError> { + let client = pool + .get() + .await + .map_err(|e| ServerError::Database(format!("migration connection failed: {e}")))?; + + client + .batch_execute(AUTH_DDL) + .await + .map_err(|e| ServerError::Database(format!("migration failed: {e}")))?; + + tracing::info!("auth migrations complete"); + Ok(()) +} + +/// Auth DDL — mirrors `fuz_app`'s `src/lib/auth/ddl.ts`. +const AUTH_DDL: &str = r" +CREATE TABLE IF NOT EXISTS account ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + username TEXT UNIQUE NOT NULL, + email TEXT, + email_verified BOOLEAN NOT NULL DEFAULT false, + password_hash TEXT NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + created_by UUID, + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_by UUID +); + +CREATE UNIQUE INDEX IF NOT EXISTS idx_account_email + ON account (LOWER(email)) WHERE email IS NOT NULL; + +CREATE UNIQUE INDEX IF NOT EXISTS idx_account_username_ci + ON account (LOWER(username)); + +CREATE TABLE IF NOT EXISTS actor ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + account_id UUID NOT NULL REFERENCES account(id) ON DELETE CASCADE, + name TEXT NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ, + updated_by UUID REFERENCES actor(id) ON DELETE SET NULL +); + +CREATE INDEX IF NOT EXISTS idx_actor_account ON actor(account_id); + +CREATE TABLE IF NOT EXISTS permit ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + actor_id UUID NOT NULL REFERENCES actor(id) ON DELETE CASCADE, + role TEXT NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + expires_at TIMESTAMPTZ, + revoked_at TIMESTAMPTZ, + revoked_by UUID REFERENCES actor(id) ON DELETE SET NULL, + granted_by UUID REFERENCES actor(id) ON DELETE SET NULL +); + +CREATE INDEX IF NOT EXISTS idx_permit_actor ON permit(actor_id); +CREATE UNIQUE INDEX IF NOT EXISTS permit_actor_role_active_unique + ON permit (actor_id, role) WHERE revoked_at IS NULL; + +CREATE TABLE IF NOT EXISTS auth_session ( + id TEXT PRIMARY KEY, + account_id UUID NOT NULL REFERENCES account(id) ON DELETE CASCADE, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + expires_at TIMESTAMPTZ NOT NULL, + last_seen_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE INDEX IF NOT EXISTS idx_auth_session_account ON auth_session(account_id); +CREATE INDEX IF NOT EXISTS idx_auth_session_expires ON auth_session(expires_at); + +CREATE TABLE IF NOT EXISTS bootstrap_lock ( + id INTEGER PRIMARY KEY DEFAULT 1 CHECK (id = 1), + bootstrapped BOOLEAN NOT NULL DEFAULT false +); + +INSERT INTO bootstrap_lock (id, bootstrapped) + SELECT 1, EXISTS(SELECT 1 FROM account) + ON CONFLICT DO NOTHING; + +CREATE TABLE IF NOT EXISTS app_settings ( + id INTEGER PRIMARY KEY DEFAULT 1 CHECK (id = 1), + open_signup BOOLEAN NOT NULL DEFAULT false, + updated_at TIMESTAMPTZ, + updated_by UUID +); + +INSERT INTO app_settings (id) VALUES (1) ON CONFLICT DO NOTHING; + +CREATE TABLE IF NOT EXISTS api_token ( + id TEXT PRIMARY KEY, + account_id UUID NOT NULL REFERENCES account(id) ON DELETE CASCADE, + name TEXT NOT NULL, + token_hash TEXT NOT NULL, + expires_at TIMESTAMPTZ, + last_used_at TIMESTAMPTZ, + last_used_ip TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE INDEX IF NOT EXISTS idx_api_token_account ON api_token(account_id); +"; + +// -- Auth queries ------------------------------------------------------------- + +/// Row from the `auth_session` table. +#[derive(Debug)] +pub struct AuthSessionRow { + pub id: String, + pub account_id: uuid::Uuid, +} + +/// Row from the `account` table (fields needed for request context). +#[derive(Debug, Clone)] +pub struct AccountRow { + pub id: uuid::Uuid, + pub username: String, +} + +/// Row from the `actor` table. +#[derive(Debug, Clone)] +pub struct ActorRow { + pub id: uuid::Uuid, + pub account_id: uuid::Uuid, + pub name: String, +} + +/// Row from the `permit` table (active permits only). +#[derive(Debug, Clone)] +pub struct PermitRow { + pub id: uuid::Uuid, + pub actor_id: uuid::Uuid, + pub role: String, +} + +/// Look up a valid (non-expired) session by its token hash. +pub async fn query_session_get_valid( + client: &deadpool_postgres::Object, + token_hash: &str, +) -> Result, tokio_postgres::Error> { + let row = client + .query_opt( + "SELECT id, account_id FROM auth_session WHERE id = $1 AND expires_at > NOW()", + &[&token_hash], + ) + .await?; + + Ok(row.map(|r| AuthSessionRow { + id: r.get(0), + account_id: r.get(1), + })) +} + +/// Look up an account by id. +pub async fn query_account_by_id( + client: &deadpool_postgres::Object, + account_id: &uuid::Uuid, +) -> Result, tokio_postgres::Error> { + let row = client + .query_opt( + "SELECT id, username FROM account WHERE id = $1", + &[account_id], + ) + .await?; + + Ok(row.map(|r| AccountRow { + id: r.get(0), + username: r.get(1), + })) +} + +/// Look up an actor by account id. +pub async fn query_actor_by_account( + client: &deadpool_postgres::Object, + account_id: &uuid::Uuid, +) -> Result, tokio_postgres::Error> { + let row = client + .query_opt( + "SELECT id, account_id, name FROM actor WHERE account_id = $1", + &[account_id], + ) + .await?; + + Ok(row.map(|r| ActorRow { + id: r.get(0), + account_id: r.get(1), + name: r.get(2), + })) +} + +/// Look up active (non-revoked, non-expired) permits for an actor. +pub async fn query_permits_for_actor( + client: &deadpool_postgres::Object, + actor_id: &uuid::Uuid, +) -> Result, tokio_postgres::Error> { + let rows = client + .query( + "SELECT id, actor_id, role FROM permit + WHERE actor_id = $1 + AND revoked_at IS NULL + AND (expires_at IS NULL OR expires_at > NOW()) + ORDER BY created_at", + &[actor_id], + ) + .await?; + + Ok(rows + .into_iter() + .map(|r| PermitRow { + id: r.get(0), + actor_id: r.get(1), + role: r.get(2), + }) + .collect()) +} + +/// Row from the `api_token` table (fields needed for bearer auth). +#[derive(Debug)] +pub struct ApiTokenRow { + pub id: String, + pub account_id: uuid::Uuid, +} + +/// Look up a valid (non-expired) API token by its blake3 hash. +/// +/// Mirrors `fuz_app`'s `query_validate_api_token` from `api_token_queries.ts`. +pub async fn query_validate_api_token( + client: &deadpool_postgres::Object, + token_hash: &str, +) -> Result, tokio_postgres::Error> { + let row = client + .query_opt( + "SELECT id, account_id FROM api_token + WHERE token_hash = $1 + AND (expires_at IS NULL OR expires_at > NOW())", + &[&token_hash], + ) + .await?; + + Ok(row.map(|r| ApiTokenRow { + id: r.get(0), + account_id: r.get(1), + })) +} + +/// Touch an API token — update `last_used_at` (fire-and-forget). +pub async fn query_api_token_touch( + client: &deadpool_postgres::Object, + token_id: &str, +) -> Result<(), tokio_postgres::Error> { + client + .execute( + "UPDATE api_token SET last_used_at = NOW() WHERE id = $1", + &[&token_id], + ) + .await?; + Ok(()) +} + +/// Find the account ID for the keeper role (first active keeper permit). +/// +/// Used at startup to resolve the daemon token's keeper account. +/// Mirrors `fuz_app`'s `query_permit_find_account_id_for_role`. +pub async fn query_keeper_account_id( + client: &deadpool_postgres::Object, +) -> Result, tokio_postgres::Error> { + let row = client + .query_opt( + "SELECT a.id FROM account a + JOIN actor ac ON ac.account_id = a.id + JOIN permit p ON p.actor_id = ac.id + WHERE p.role = 'keeper' + AND p.revoked_at IS NULL + AND (p.expires_at IS NULL OR p.expires_at > NOW()) + LIMIT 1", + &[], + ) + .await?; + + Ok(row.map(|r| r.get(0))) +} + +/// Touch a session — update `last_seen_at` and extend expiry if < 1 day remaining. +/// +/// Fire-and-forget: caller should spawn this without blocking the request. +pub async fn query_session_touch( + client: &deadpool_postgres::Object, + token_hash: &str, +) -> Result<(), tokio_postgres::Error> { + client + .execute( + "UPDATE auth_session + SET last_seen_at = NOW(), + expires_at = CASE + WHEN expires_at - NOW() < INTERVAL '1 day' + THEN NOW() + INTERVAL '30 days' + ELSE expires_at + END + WHERE id = $1", + &[&token_hash], + ) + .await?; + Ok(()) +} + +/// Create a new auth session. +pub async fn query_create_session( + client: &deadpool_postgres::Object, + token_hash: &str, + account_id: &uuid::Uuid, +) -> Result<(), tokio_postgres::Error> { + client + .execute( + "INSERT INTO auth_session (id, account_id, expires_at) + VALUES ($1, $2, NOW() + INTERVAL '30 days')", + &[&token_hash, account_id], + ) + .await?; + Ok(()) +} + +/// Create an account and return the row. +pub async fn query_create_account( + client: &deadpool_postgres::Object, + username: &str, + password_hash: &str, +) -> Result { + let row = client + .query_one( + "INSERT INTO account (username, password_hash) VALUES ($1, $2) + RETURNING id, username", + &[&username, &password_hash], + ) + .await?; + + Ok(AccountRow { + id: row.get(0), + username: row.get(1), + }) +} + +/// Create an actor for an account. +pub async fn query_create_actor( + client: &deadpool_postgres::Object, + account_id: &uuid::Uuid, + name: &str, +) -> Result { + let row = client + .query_one( + "INSERT INTO actor (account_id, name) VALUES ($1, $2) + RETURNING id, account_id, name", + &[account_id, &name], + ) + .await?; + + Ok(ActorRow { + id: row.get(0), + account_id: row.get(1), + name: row.get(2), + }) +} + +/// Grant a permit to an actor (idempotent — ON CONFLICT DO NOTHING). +pub async fn query_grant_permit( + client: &deadpool_postgres::Object, + actor_id: &uuid::Uuid, + role: &str, +) -> Result { + // Try insert; if already exists (active permit for same role), fetch it + let inserted = client + .query_opt( + "INSERT INTO permit (actor_id, role) + VALUES ($1, $2) + ON CONFLICT (actor_id, role) WHERE revoked_at IS NULL + DO NOTHING + RETURNING id, actor_id, role", + &[actor_id, &role], + ) + .await?; + + if let Some(row) = inserted { + return Ok(PermitRow { + id: row.get(0), + actor_id: row.get(1), + role: row.get(2), + }); + } + + // Already existed — fetch it + let row = client + .query_one( + "SELECT id, actor_id, role FROM permit + WHERE actor_id = $1 AND role = $2 AND revoked_at IS NULL", + &[actor_id, &role], + ) + .await?; + + Ok(PermitRow { + id: row.get(0), + actor_id: row.get(1), + role: row.get(2), + }) +} + +// -- Account management queries ----------------------------------------------- + +/// Account row with password hash (for login / password change). +#[derive(Debug)] +pub struct AccountWithPasswordHash { + pub id: uuid::Uuid, + pub username: String, + pub password_hash: String, +} + +/// Look up an account by username (case-insensitive) with password hash. +pub async fn query_account_with_password_hash( + client: &deadpool_postgres::Object, + username: &str, +) -> Result, tokio_postgres::Error> { + let row = client + .query_opt( + "SELECT id, username, password_hash FROM account WHERE LOWER(username) = LOWER($1)", + &[&username], + ) + .await?; + + Ok(row.map(|r| AccountWithPasswordHash { + id: r.get(0), + username: r.get(1), + password_hash: r.get(2), + })) +} + +/// Look up an account by ID with password hash. +pub async fn query_account_with_password_hash_by_id( + client: &deadpool_postgres::Object, + account_id: &uuid::Uuid, +) -> Result, tokio_postgres::Error> { + let row = client + .query_opt( + "SELECT id, username, password_hash FROM account WHERE id = $1", + &[account_id], + ) + .await?; + + Ok(row.map(|r| AccountWithPasswordHash { + id: r.get(0), + username: r.get(1), + password_hash: r.get(2), + })) +} + +/// Delete a session by token hash. +pub async fn query_delete_session( + client: &deadpool_postgres::Object, + token_hash: &str, +) -> Result<(), tokio_postgres::Error> { + client + .execute("DELETE FROM auth_session WHERE id = $1", &[&token_hash]) + .await?; + Ok(()) +} + +/// Delete a session by token hash, scoped to an account. +/// +/// Returns `true` if a row was deleted, `false` if not found. +pub async fn query_delete_session_for_account( + client: &deadpool_postgres::Object, + token_hash: &str, + account_id: &uuid::Uuid, +) -> Result { + let count = client + .execute( + "DELETE FROM auth_session WHERE id = $1 AND account_id = $2", + &[&token_hash, account_id], + ) + .await?; + Ok(count > 0) +} + +/// Delete all sessions for an account. +pub async fn query_delete_all_sessions_for_account( + client: &deadpool_postgres::Object, + account_id: &uuid::Uuid, +) -> Result { + let count = client + .execute( + "DELETE FROM auth_session WHERE account_id = $1", + &[account_id], + ) + .await?; + Ok(count) +} + +/// Delete all API tokens for an account. +pub async fn query_delete_all_tokens_for_account( + client: &deadpool_postgres::Object, + account_id: &uuid::Uuid, +) -> Result { + let count = client + .execute( + "DELETE FROM api_token WHERE account_id = $1", + &[account_id], + ) + .await?; + Ok(count) +} + +/// Update an account's password hash. +pub async fn query_update_password( + client: &deadpool_postgres::Object, + account_id: &uuid::Uuid, + new_password_hash: &str, +) -> Result<(), tokio_postgres::Error> { + client + .execute( + "UPDATE account SET password_hash = $1, updated_at = NOW() WHERE id = $2", + &[&new_password_hash, account_id], + ) + .await?; + Ok(()) +} + +/// Session row for listing (no token hash exposed). +#[derive(Debug)] +pub struct SessionListRow { + pub id: String, + pub created_at: String, + pub last_seen_at: String, + pub expires_at: String, +} + +/// List all sessions for an account (for GET /sessions). +/// +/// Returns session metadata — the token hash ID is included as the +/// session identifier but the original token is never exposed. +pub async fn query_sessions_for_account( + client: &deadpool_postgres::Object, + account_id: &uuid::Uuid, +) -> Result, tokio_postgres::Error> { + let rows = client + .query( + "SELECT id, + to_char(created_at AT TIME ZONE 'UTC', 'YYYY-MM-DD\"T\"HH24:MI:SS\"Z\"'), + to_char(last_seen_at AT TIME ZONE 'UTC', 'YYYY-MM-DD\"T\"HH24:MI:SS\"Z\"'), + to_char(expires_at AT TIME ZONE 'UTC', 'YYYY-MM-DD\"T\"HH24:MI:SS\"Z\"') + FROM auth_session + WHERE account_id = $1 + ORDER BY created_at", + &[account_id], + ) + .await?; + + Ok(rows + .into_iter() + .map(|r| SessionListRow { + id: r.get(0), + created_at: r.get(1), + last_seen_at: r.get(2), + expires_at: r.get(3), + }) + .collect()) +} diff --git a/crates/zzz_server/src/error.rs b/crates/zzz_server/src/error.rs new file mode 100644 index 000000000..7bbff20bc --- /dev/null +++ b/crates/zzz_server/src/error.rs @@ -0,0 +1,18 @@ +use std::net::SocketAddr; + +/// Server-level errors for startup and runtime. +#[derive(Debug, thiserror::Error)] +pub enum ServerError { + #[error("failed to bind to {addr}")] + Bind { + addr: SocketAddr, + #[source] + source: std::io::Error, + }, + #[error("server error")] + Serve(#[source] std::io::Error), + #[error("database error: {0}")] + Database(String), + #[error("configuration error: {0}")] + Config(String), +} diff --git a/crates/zzz_server/src/filer.rs b/crates/zzz_server/src/filer.rs new file mode 100644 index 000000000..ad3ac6ae4 --- /dev/null +++ b/crates/zzz_server/src/filer.rs @@ -0,0 +1,555 @@ +use std::collections::HashMap; +use std::path::{Path, PathBuf}; +use std::sync::Arc; +use std::time::Duration; + +use notify::{EventKind, RecommendedWatcher, RecursiveMode, Watcher}; +use serde::Serialize; +use serde_json::Value; +use tokio::sync::{mpsc, RwLock}; +use tokio::time::Instant; + +use crate::handlers::App; +use crate::rpc; + +// -- Notification params ------------------------------------------------------ + +/// Params for `filer_change` `remote_notification`. +/// +/// Matches the TypeScript `filer_change_action_spec` input schema: +/// `{ change: DiskfileChange, disknode: SerializableDisknode }`. +#[derive(Serialize)] +struct FilerChangeParams { + change: DiskfileChange, + disknode: SerializableDisknode, +} + +/// Matches `DiskfileChange` from `diskfile_types.ts`. +#[derive(Serialize, Clone)] +struct DiskfileChange { + #[serde(rename = "type")] + change_type: String, + path: String, +} + +/// Matches `SerializableDisknode` from `diskfile_types.ts`. +/// +/// Simplified — `dependents` and `dependencies` are always empty (no +/// dependency tracking in the Rust backend). +#[derive(Serialize, Clone)] +pub struct SerializableDisknode { + pub id: String, + pub source_dir: String, + pub contents: Option, + pub ctime: Option, + pub mtime: Option, + pub dependents: Vec, + pub dependencies: Vec, +} + +// -- Default ignored directories ---------------------------------------------- + +/// Directories always ignored by all watchers. Individual filers +/// can add extra ignores on top of these via `FilerConfig`. +const DEFAULT_IGNORED_DIRS: &[&str] = &[ + ".git", + "node_modules", + ".svelte-kit", + "target", + "dist", +]; + +/// Check if a single directory name is in the ignore lists. +fn is_ignored_name(name: &str, extra_ignores: &[String]) -> bool { + DEFAULT_IGNORED_DIRS.contains(&name) || extra_ignores.iter().any(|ig| ig == name) +} + +/// Check if a path contains any ignored directory component below `source_dir`. +/// +/// Only checks components after the `source_dir` prefix — root path segments +/// like `/`, `home`, `user` can never match ignored names and are skipped. +fn is_ignored(path: &Path, source_dir: &Path, extra_ignores: &[String]) -> bool { + let suffix = path.strip_prefix(source_dir).unwrap_or(path); + suffix.components().any(|c| { + let s = c.as_os_str().to_str().unwrap_or(""); + is_ignored_name(s, extra_ignores) + }) +} + +// -- File metadata helpers ---------------------------------------------------- + +/// Convert a `SystemTime` to milliseconds since epoch (matching JS `Date` format). +fn system_time_to_ms(t: std::time::SystemTime) -> Option { + t.duration_since(std::time::UNIX_EPOCH) + .ok() + .map(|d| d.as_secs_f64() * 1000.0) +} + +/// Construct a `SerializableDisknode` from pre-read components. +fn make_disknode( + id: String, + source_dir: &str, + contents: Option, + ctime: Option, + mtime: Option, +) -> SerializableDisknode { + SerializableDisknode { + id, + source_dir: source_dir.to_owned(), + contents, + ctime, + mtime, + dependents: vec![], + dependencies: vec![], + } +} + +/// Build a `SerializableDisknode` for a watcher event, reading metadata and +/// contents on blocking threads (never blocks the tokio runtime). +async fn build_disknode(file_path: &Path, source_dir: &str, is_delete: bool) -> SerializableDisknode { + let path_str = file_path.to_string_lossy().to_string(); + + if is_delete { + return make_disknode(path_str, source_dir, None, None, None); + } + + let path_owned = file_path.to_path_buf(); + let (meta_result, contents) = tokio::join!( + tokio::task::spawn_blocking({ + let p = path_owned.clone(); + move || std::fs::metadata(&p).ok() + }), + tokio::task::spawn_blocking(move || { + if path_owned.is_dir() { + return None; + } + std::fs::read_to_string(&path_owned).ok() + }), + ); + + let meta = meta_result.ok().flatten(); + let ctime = meta.as_ref().and_then(|m| m.created().ok()).and_then(system_time_to_ms); + let mtime = meta.as_ref().and_then(|m| m.modified().ok()).and_then(system_time_to_ms); + + make_disknode(path_str, source_dir, contents.unwrap_or(None), ctime, mtime) +} + +// -- Event → notification mapping --------------------------------------------- + +/// Map a notify `EventKind` to a `DiskfileChangeType` string. +/// +/// Returns `None` for events we don't care about (access, other). +const fn event_kind_to_change_type(kind: EventKind) -> Option<&'static str> { + match kind { + EventKind::Create(_) => Some("add"), + EventKind::Modify(_) => Some("change"), + EventKind::Remove(_) => Some("delete"), + _ => None, + } +} + +// -- Debouncing --------------------------------------------------------------- + +/// Window for coalescing rapid events on the same path. +const DEBOUNCE_DURATION: Duration = Duration::from_millis(80); + +/// A pending debounced notification (broadcast only — index updates are immediate). +struct PendingNotification { + change_type: &'static str, + deadline: Instant, + disknode: SerializableDisknode, +} + +// -- Filer configuration ------------------------------------------------------ + +/// Per-filer configuration controlling which directories to ignore. +pub struct FilerConfig { + /// Extra directory names to ignore beyond the defaults. + /// For workspace watchers this includes `.zzz`; for the `zzz_dir` + /// watcher this is empty so it can see its own files. + pub extra_ignores: Vec, +} + +impl FilerConfig { + /// Config for the `zzz_dir` watcher — no extra ignores, since it needs + /// to see files inside the zzz directory. + pub const fn zzz_dir() -> Self { + Self { + extra_ignores: vec![], + } + } + + /// Config for workspace and `scoped_dir` watchers — ignores the zzz + /// directory name to avoid duplicate events when `zzz_dir` is nested + /// under a watched directory. + /// + /// Derives the ignore name from the actual `zzz_dir` path (e.g. `.zzz` + /// from `/home/user/.zzz/`) so it works with custom `PUBLIC_ZZZ_DIR`. + pub fn workspace(zzz_dir: &str) -> Self { + let zzz_dir_name = Path::new(zzz_dir.trim_end_matches('/')) + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or(".zzz") + .to_owned(); + Self { + extra_ignores: vec![zzz_dir_name], + } + } +} + +// -- Filer (replaces WorkspaceWatcher) ---------------------------------------- + +/// Watches a directory for file changes, maintains an in-memory file index, +/// and broadcasts `filer_change` notifications to WebSocket clients. +/// +/// Dropped when the filer is stopped (notify cleans up on Drop, +/// the tokio task is aborted). +pub struct Filer { + /// Held to keep the notify watcher alive — dropped when the filer stops. + _watcher: RecommendedWatcher, + /// Background task processing watcher events. + task: tokio::task::JoinHandle<()>, + /// In-memory file index — path → disknode. Updated by watcher events + /// and initial scan. Read by `session_load`. + pub files: Arc>>, +} + +impl Drop for Filer { + fn drop(&mut self) { + self.task.abort(); + } +} + +/// Start watching a directory, perform an initial file scan, and return a `Filer`. +/// +/// The initial scan populates the file index before returning, so callers +/// can immediately read from `filer.files`. The background task then +/// keeps the index updated and broadcasts changes. +pub async fn start_filer( + path: &str, + app: Arc, + config: FilerConfig, +) -> Result { + let (tx, rx) = mpsc::channel::(256); + + let mut watcher = RecommendedWatcher::new( + move |res: Result| { + if let Ok(event) = res { + let _ = tx.try_send(event); + } + }, + notify::Config::default(), + )?; + + watcher.watch(Path::new(path), RecursiveMode::Recursive)?; + + let source_dir = path.to_owned(); + let files: Arc>> = + Arc::new(RwLock::new(HashMap::new())); + + // Initial scan — populate the file index + let mut initial_files = HashMap::new(); + scan_directory(&source_dir, &source_dir, &config.extra_ignores, &mut initial_files).await; + { + let mut index = files.write().await; + *index = initial_files; + } + + let files_clone = Arc::clone(&files); + let task = tokio::spawn(filer_event_loop( + rx, + source_dir.clone(), + config.extra_ignores, + files_clone, + app, + )); + + Ok(Filer { + _watcher: watcher, + task, + files, + }) +} + +/// Recursively scan a directory and populate the file map. +async fn scan_directory( + dir: &str, + source_dir: &str, + extra_ignores: &[String], + files: &mut HashMap, +) { + let Ok(mut entries) = tokio::fs::read_dir(dir).await else { + return; + }; + while let Ok(Some(entry)) = entries.next_entry().await { + let path = entry.path(); + + // Skip ignored directories + if let Some(name) = path.file_name().and_then(|n| n.to_str()) + && is_ignored_name(name, extra_ignores) + { + continue; + } + + let Ok(meta) = tokio::fs::metadata(&path).await else { + continue; + }; + + if meta.is_dir() { + let mut dir_path = path.to_string_lossy().into_owned(); + if !dir_path.ends_with('/') { + dir_path.push('/'); + } + Box::pin(scan_directory(&dir_path, source_dir, extra_ignores, files)).await; + } else { + let path_str = path.to_string_lossy().into_owned(); + let ctime = meta.created().ok().and_then(system_time_to_ms); + let mtime = meta.modified().ok().and_then(system_time_to_ms); + let contents = tokio::fs::read_to_string(&path).await.ok(); + let disknode = make_disknode(path_str.clone(), source_dir, contents, ctime, mtime); + files.insert(path_str, disknode); + } + } +} + +/// Background event loop: receives notify events, debounces them, updates +/// the file index, and broadcasts `filer_change` notifications. +async fn filer_event_loop( + mut rx: mpsc::Receiver, + source_dir: String, + extra_ignores: Vec, + files: Arc>>, + app: Arc, +) { + let source_dir_path = Path::new(&source_dir); + // Pending notifications — index updates happen immediately, but + // filer_change broadcasts are debounced to avoid flooding clients. + let mut pending: HashMap = HashMap::new(); + + loop { + // If we have pending notifications, wait until the nearest deadline or a new event + let timeout = pending + .values() + .map(|p| p.deadline) + .min() + .map(|deadline| deadline.saturating_duration_since(Instant::now())); + + let event = if let Some(timeout) = timeout { + tokio::select! { + biased; + e = rx.recv() => e, + () = tokio::time::sleep(timeout) => None, + } + } else { + rx.recv().await + }; + + match event { + Some(event) => { + let Some(change_type) = event_kind_to_change_type(event.kind) else { + continue; + }; + + for file_path in event.paths { + if is_ignored(&file_path, source_dir_path, &extra_ignores) { + continue; + } + + let is_delete = change_type == "delete"; + + // Skip directory events — we only index files. + if !is_delete + && let Ok(meta) = tokio::fs::metadata(&file_path).await + && meta.is_dir() + { + continue; + } + + let disknode = build_disknode(&file_path, &source_dir, is_delete).await; + + // Update the file index immediately so reads always + // see the latest state (no debounce on the index). + { + let mut index = files.write().await; + if is_delete { + index.remove(&disknode.id); + } else { + index.insert(disknode.id.clone(), disknode.clone()); + } + } + + // Debounce the notification broadcast + let deadline = Instant::now() + DEBOUNCE_DURATION; + pending + .entry(file_path) + .and_modify(|p| { + // Extend the deadline but preserve "add" — a Create + // followed by Modify should still be seen as "add" + // by clients (the file is new). + p.deadline = deadline; + p.disknode = disknode.clone(); + if p.change_type != "add" { + p.change_type = change_type; + } + }) + .or_insert(PendingNotification { + change_type, + deadline, + disknode, + }); + } + } + None => { + // Channel closed or timeout fired — flush ready notifications + if pending.is_empty() { + // Channel truly closed (no pending, no new events) + break; + } + } + } + + // Flush notifications whose deadline has passed + let now = Instant::now(); + let ready: Vec<(PathBuf, PendingNotification)> = pending + .extract_if(|_, p| p.deadline <= now) + .collect(); + + for (_, event) in ready { + let params = FilerChangeParams { + change: DiskfileChange { + change_type: event.change_type.to_owned(), + path: event.disknode.id.clone(), + }, + disknode: event.disknode, + }; + + let notification = rpc::notification( + "filer_change", + serde_json::to_value(¶ms).unwrap_or_default(), + ); + app.broadcast(¬ification); + } + } +} + +// -- FilerManager ------------------------------------------------------------- + +/// Whether a filer was started at server startup (permanent) or via +/// `workspace_open` (can be stopped on `workspace_close`). +#[derive(Clone, Copy, PartialEq, Eq)] +pub enum FilerLifetime { + /// Started at server startup for `zzz_dir` or `scoped_dirs` — never stopped. + Permanent, + /// Started via `workspace_open` — stopped on `workspace_close`. + Workspace, +} + +/// Entry in the filer manager. +pub struct FilerEntry { + pub filer: Filer, + pub lifetime: FilerLifetime, +} + +/// Manages all active filers with deduplication and lifetime tracking. +/// +/// One filer per unique directory path. Permanent filers (`zzz_dir`, `scoped_dirs`) +/// survive `workspace_close`. Workspace filers are stopped on close. +pub struct FilerManager { + filers: RwLock>, +} + +impl FilerManager { + pub fn new() -> Self { + Self { + filers: RwLock::new(HashMap::new()), + } + } + + /// Start a filer for the given directory path. Returns `Ok(true)` if a new + /// filer was created, `Ok(false)` if one already existed for this path. + /// + /// If a filer already exists, its lifetime is upgraded to `Permanent` if + /// the new request is `Permanent` (but never downgraded). + pub async fn start_filer( + &self, + path: &str, + app: Arc, + config: FilerConfig, + lifetime: FilerLifetime, + ) -> Result { + debug_assert!( + path.ends_with('/'), + "FilerManager paths must have trailing slash: {path}" + ); + + // Fast path — already watching + { + let filers = self.filers.read().await; + if let Some(entry) = filers.get(path) { + // Upgrade lifetime if needed (workspace → permanent) + if lifetime == FilerLifetime::Permanent + && entry.lifetime == FilerLifetime::Workspace + { + drop(filers); + let mut filers = self.filers.write().await; + if let Some(entry) = filers.get_mut(path) { + entry.lifetime = FilerLifetime::Permanent; + } + } + return Ok(false); + } + } + + // Create new filer + let filer = start_filer(path, app, config).await?; + + let mut filers = self.filers.write().await; + // Double-check in case another task raced us + if filers.contains_key(path) { + // Filer was created by another task between our read and write + return Ok(false); + } + filers.insert(path.to_owned(), FilerEntry { filer, lifetime }); + Ok(true) + } + + /// Stop and remove a filer for the given path. Only stops workspace-scoped + /// filers — permanent filers are preserved. + /// + /// Returns `true` if the filer was actually stopped. + pub async fn stop_filer(&self, path: &str) -> bool { + debug_assert!( + path.ends_with('/'), + "FilerManager paths must have trailing slash: {path}" + ); + let mut filers = self.filers.write().await; + if let Some(entry) = filers.get(path) { + if entry.lifetime == FilerLifetime::Permanent { + return false; + } + filers.remove(path); + true + } else { + false + } + } + + /// Collect all files from all filers into a single Vec. + /// Used by `session_load` to return the complete file listing. + pub async fn collect_all_files(&self) -> Vec { + // Collect Arc handles under the outer lock, then release it before + // awaiting the inner per-filer locks — avoids holding the manager + // lock across await points (which would block start_filer/stop_filer). + let file_maps: Vec>>> = { + let filers = self.filers.read().await; + filers.values().map(|e| Arc::clone(&e.filer.files)).collect() + }; + + let mut all_files = Vec::new(); + for files in &file_maps { + let index = files.read().await; + all_files.extend(index.values().cloned()); + } + all_files + } +} diff --git a/crates/zzz_server/src/handlers.rs b/crates/zzz_server/src/handlers.rs new file mode 100644 index 000000000..808dd2418 --- /dev/null +++ b/crates/zzz_server/src/handlers.rs @@ -0,0 +1,855 @@ +use std::collections::HashMap; +use std::path::Path; +use std::sync::atomic::{AtomicBool, AtomicU64}; +use std::sync::{Arc, RwLock}; + +use deadpool_postgres::Pool; +use fuz_common::JsonRpcError; +use serde::Serialize; +use serde_json::Value; +use tokio::sync::mpsc; + +use crate::auth::{Keyring, RequestContext}; +use crate::daemon_token::SharedDaemonTokenState; +use crate::filer::{FilerConfig, FilerLifetime, FilerManager}; +use crate::provider::{self, CompletionHandlerOptions, CompletionOptions, ProviderManager, ProviderName}; +use crate::pty_manager::PtyManager; +use crate::rpc; +use crate::scoped_fs::ScopedFs; + +// -- Connection tracking types ------------------------------------------------ + +/// Unique ID for a WebSocket connection, allocated via `App::next_connection_id`. +pub type ConnectionId = u64; + +/// Handle to a connected WebSocket client — messages sent here are forwarded to the WS sink. +pub type ConnectionSender = mpsc::UnboundedSender; + +/// Metadata for an active WebSocket connection. +/// +/// Tracks the channel sender plus auth context for targeted revocation: +/// - `token_hash`: blake3 hash of the session token (for session-level revocation) +/// - `account_id`: account UUID (for account-level revocation on logout/password change) +pub struct ConnectionInfo { + pub sender: ConnectionSender, + pub token_hash: Option, + pub account_id: Option, +} + +// -- App state (long-lived, shared via Arc) ----------------------------------- + +/// Server state shared across all requests. +/// +/// Constructed once in `main`, wrapped in `Arc`, passed as axum `State`. +pub struct App { + pub workspaces: RwLock>, + pub db_pool: Pool, + pub keyring: Keyring, + pub allowed_origins: Vec, + pub bootstrap_token_path: Option, + pub bootstrap_available: AtomicBool, + pub scoped_fs: ScopedFs, + pub zzz_dir: String, + pub scoped_dirs: Vec, + /// Monotonic counter for assigning unique connection IDs. + next_connection_id: AtomicU64, + /// Active WebSocket connections — keyed by `ConnectionId`. + pub connections: RwLock>, + /// Active file watchers — one per unique directory path, with lifetime tracking. + pub filer_manager: FilerManager, + /// PTY terminal manager. + pub pty_manager: PtyManager, + /// Daemon token state for `X-Daemon-Token` auth. + pub daemon_token_state: Option, + /// AI provider manager (Anthropic, `OpenAI`, Gemini, Ollama). + pub provider_manager: ProviderManager, + /// Default completion options. + pub completion_options: CompletionOptions, +} + +impl App { + #[allow(clippy::too_many_arguments)] + pub fn new( + db_pool: Pool, + keyring: Keyring, + allowed_origins: Vec, + bootstrap_token_path: Option, + bootstrap_available: bool, + scoped_fs: ScopedFs, + zzz_dir: String, + scoped_dirs: Vec, + daemon_token_state: Option, + provider_manager: ProviderManager, + ) -> Self { + Self { + workspaces: RwLock::new(HashMap::new()), + db_pool, + keyring, + allowed_origins, + bootstrap_token_path, + bootstrap_available: AtomicBool::new(bootstrap_available), + scoped_fs, + zzz_dir, + scoped_dirs, + next_connection_id: AtomicU64::new(1), + connections: RwLock::new(HashMap::new()), + filer_manager: FilerManager::new(), + pty_manager: PtyManager::new(), + daemon_token_state, + provider_manager, + completion_options: CompletionOptions::default(), + } + } + + /// Allocate a new connection ID and register the sender with auth metadata. + /// + /// Returns the ID — caller must call `remove_connection` on disconnect. + pub fn add_connection( + &self, + sender: ConnectionSender, + token_hash: Option, + account_id: Option, + ) -> ConnectionId { + let id = self + .next_connection_id + .fetch_add(1, std::sync::atomic::Ordering::Relaxed); + if let Ok(mut conns) = self.connections.write() { + conns.insert( + id, + ConnectionInfo { + sender, + token_hash, + account_id, + }, + ); + } + id + } + + /// Remove a connection by ID (called on WS disconnect). + pub fn remove_connection(&self, id: ConnectionId) { + if let Ok(mut conns) = self.connections.write() { + conns.remove(&id); + } + } + + /// Broadcast a message to all connected clients. + pub fn broadcast(&self, message: &str) { + if let Ok(conns) = self.connections.read() { + for info in conns.values() { + let _ = info.sender.send(message.to_owned()); + } + } + } + + /// Send a message to a specific connection. + pub fn send_to(&self, id: ConnectionId, message: &str) { + if let Ok(conns) = self.connections.read() + && let Some(info) = conns.get(&id) + { + let _ = info.sender.send(message.to_owned()); + } + } + + /// Close all WebSocket connections for a given session token hash. + /// + /// Used for session revocation — the sender is dropped, which causes + /// the WS handler's `notify_rx.recv()` to return `None` and break + /// the connection loop. + /// + /// Returns the number of connections closed. + pub fn close_sockets_for_session(&self, target_hash: &str) -> usize { + let mut count = 0; + if let Ok(mut conns) = self.connections.write() { + conns.retain(|_, info| { + let matches = info + .token_hash + .as_deref() + .is_some_and(|h| h == target_hash); + if matches { + count += 1; + } + !matches // retain = keep non-matching + }); + } + count + } + + /// Close all WebSocket connections for a given account. + /// + /// Used on logout, password change, and token revocation. + /// Returns the number of connections closed. + pub fn close_sockets_for_account(&self, target_id: uuid::Uuid) -> usize { + let mut count = 0; + if let Ok(mut conns) = self.connections.write() { + conns.retain(|_, info| { + let matches = info.account_id.is_some_and(|id| id == target_id); + if matches { + count += 1; + } + !matches + }); + } + count + } +} + +// -- Per-request context (constructed by transport) --------------------------- + +/// Per-request context passed to handler functions. +/// +/// Borrows `App` and the request id from the parsed envelope. +/// The transport constructs this before calling `dispatch`. +pub struct Ctx<'a> { + pub app: &'a App, + /// Clone of the `Arc` — handlers that need to spawn tasks (e.g. + /// file watchers) can clone this to move into the spawned future. + pub app_arc: Arc, + pub request_id: &'a Value, + pub auth: Option<&'a RequestContext>, + /// WebSocket connection ID — `None` for HTTP requests. + /// Used for targeted `completion_progress` streaming notifications. + pub connection_id: Option, +} + +// -- Domain types ------------------------------------------------------------- + +/// Metadata for an open workspace directory. +/// +/// Matches the TypeScript `WorkspaceInfoJson` schema: +/// `{ path: string, name: string, opened_at: string }`. +#[derive(Debug, Clone, Serialize)] +pub struct WorkspaceInfo { + pub path: String, + pub name: String, + pub opened_at: String, +} + +// -- Notification params ------------------------------------------------------ + +/// Params for `workspace_changed` `remote_notification`. +/// +/// Matches the TypeScript `workspace_changed_action_spec` input schema: +/// `{ type: 'open' | 'close', workspace: WorkspaceInfoJson }`. +#[derive(Serialize)] +struct WorkspaceChangedParams<'a> { + #[serde(rename = "type")] + change_type: &'a str, + workspace: &'a WorkspaceInfo, +} + +// -- Typed response structs (avoid json!() macro allocation) ------------------ + +#[derive(Serialize)] +struct PingResult<'a> { + ping_id: &'a Value, +} + +#[derive(Serialize)] +struct WorkspaceListResult { + workspaces: Vec, +} + +#[derive(Serialize)] +struct WorkspaceOpenResult { + workspace: WorkspaceInfo, + files: Vec, // always empty — initial files sent via session_load, watcher handles updates +} + +#[derive(Serialize)] +struct SessionLoadData { + files: Vec, + zzz_dir: String, + scoped_dirs: Vec, + provider_status: Vec, + workspaces: Vec, +} + +#[derive(Serialize)] +struct SessionLoadResult { + data: SessionLoadData, +} + +// -- Path helpers ------------------------------------------------------------- + +/// Convert a resolved path to a normalized directory string with trailing `/`. +/// +/// Rejects non-UTF-8 paths explicitly — no lossy replacement with U+FFFD. +fn to_normalized_dir(path: &Path) -> Result { + let mut s = path + .to_str() + .ok_or_else(|| rpc::internal_error("path is not valid UTF-8"))? + .to_owned(); + if !s.ends_with('/') { + s.push('/'); + } + Ok(s) +} + +// -- Dispatch ----------------------------------------------------------------- + +/// Route a method to its handler. +/// +/// Auth is checked by the transport BEFORE calling dispatch. +/// Match statement dispatch — zero overhead, compiler can inline. +pub async fn dispatch(method: &str, params: &Value, ctx: &Ctx<'_>) -> Result { + match method { + "ping" => handle_ping(ctx), + "session_load" => handle_session_load(ctx).await, + "workspace_list" => handle_workspace_list(ctx), + "workspace_open" => handle_workspace_open(params, ctx).await, + "workspace_close" => handle_workspace_close(params, ctx).await, + "diskfile_update" => handle_diskfile_update(params, ctx).await, + "diskfile_delete" => handle_diskfile_delete(params, ctx).await, + "directory_create" => handle_directory_create(params, ctx).await, + "provider_load_status" => handle_provider_load_status(params, ctx).await, + "provider_update_api_key" => handle_provider_update_api_key(params, ctx).await, + "completion_create" => handle_completion_create(params, ctx).await, + "terminal_create" => handle_terminal_create(params, ctx).await, + "terminal_data_send" => handle_terminal_data_send(params, ctx).await, + "terminal_resize" => handle_terminal_resize(params, ctx).await, + "terminal_close" => handle_terminal_close(params, ctx).await, + other => Err(rpc::method_not_found(other)), + } +} + +// -- Handlers ----------------------------------------------------------------- + +fn handle_ping(ctx: &Ctx<'_>) -> Result { + let result = PingResult { + ping_id: ctx.request_id, + }; + serde_json::to_value(result).map_err(|_| rpc::internal_error("serialization failed")) +} + +async fn handle_session_load(ctx: &Ctx<'_>) -> Result { + let workspaces: Vec = { + let ws = ctx + .app + .workspaces + .read() + .map_err(|_| rpc::internal_error("lock poisoned"))?; + ws.values().cloned().collect() + }; + + // Read files from all filer indexes (matches Deno's session_load which + // iterates backend.filers.entries() — no filesystem walk at call time) + let files = ctx.app.filer_manager.collect_all_files().await; + + // Collect provider status from all registered providers + let mut provider_status = Vec::new(); + for p in ctx.app.provider_manager.all() { + let status = p.load_status(false).await; + if let Ok(v) = serde_json::to_value(&status) { + provider_status.push(v); + } + } + + let result = SessionLoadResult { + data: SessionLoadData { + files, + zzz_dir: ctx.app.zzz_dir.clone(), + scoped_dirs: ctx.app.scoped_dirs.clone(), + provider_status, + workspaces, + }, + }; + serde_json::to_value(result).map_err(|_| rpc::internal_error("serialization failed")) +} + +fn handle_workspace_list(ctx: &Ctx<'_>) -> Result { + // Clone values under read lock, release before serialization + let list: Vec = { + let workspaces = ctx + .app + .workspaces + .read() + .map_err(|_| rpc::internal_error("lock poisoned"))?; + workspaces.values().cloned().collect() + }; + let result = WorkspaceListResult { workspaces: list }; + serde_json::to_value(result).map_err(|_| rpc::internal_error("serialization failed")) +} + +async fn handle_workspace_open(params: &Value, ctx: &Ctx<'_>) -> Result { + // 1. Extract path from params (zero-copy — no from_value clone) + let path = params + .get("path") + .and_then(Value::as_str) + .ok_or_else(|| rpc::invalid_params("missing or invalid 'path' parameter"))?; + + // 2. Canonicalize and validate directory + // Error messages include trailing / to match Deno's resolved path format + let canonical = Path::new(path).canonicalize().map_err(|_| { + let suffix = if path.ends_with('/') { "" } else { "/" }; + rpc::internal_error(&format!( + "failed to open workspace: directory does not exist: {path}{suffix}" + )) + })?; + + if !canonical.is_dir() { + let suffix = if path.ends_with('/') { "" } else { "/" }; + return Err(rpc::internal_error(&format!( + "failed to open workspace: not a directory: {path}{suffix}" + ))); + } + + // 3. Normalize — absolute, UTF-8 validated, trailing / + let normalized = to_normalized_dir(&canonical)?; + + // 4. Fast path — return existing workspace (read lock, released before serialization) + let existing = { + let workspaces = ctx + .app + .workspaces + .read() + .map_err(|_| rpc::internal_error("lock poisoned"))?; + workspaces.get(&normalized).cloned() + }; + + if let Some(workspace) = existing { + let result = WorkspaceOpenResult { + workspace, + files: vec![], + }; + return serde_json::to_value(result) + .map_err(|_| rpc::internal_error("serialization failed")); + } + + // 5. Create new workspace entry (write lock, released before serialization) + // UTF-8 already validated by to_normalized_dir + let name = canonical + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or("") + .to_owned(); + + let info = WorkspaceInfo { + path: normalized.clone(), + name, + opened_at: fuz_common::rfc3339_now(), + }; + + // entry() handles the double-check naturally — if another thread inserted + // between our read and write locks, or_insert returns the existing entry + let workspace = { + let mut workspaces = ctx + .app + .workspaces + .write() + .map_err(|_| rpc::internal_error("lock poisoned"))?; + workspaces.entry(normalized).or_insert(info).clone() + }; + + // Add to ScopedFs so diskfile_update/diskfile_delete/directory_create can + // write inside the newly opened workspace (mirrors Deno backend.ts:284) + ctx.app.scoped_fs.add_path(Path::new(&workspace.path)); + + // Start file watcher for the new workspace (deduplicates — reuses existing filer) + if let Err(e) = ctx + .app + .filer_manager + .start_filer( + &workspace.path, + Arc::clone(&ctx.app_arc), + FilerConfig::workspace(&ctx.app.zzz_dir), + FilerLifetime::Workspace, + ) + .await + { + tracing::warn!(path = %workspace.path, error = %e, "failed to start file watcher"); + } + + // Broadcast workspace_changed notification to all connected clients + let notification = rpc::notification( + "workspace_changed", + serde_json::to_value(&WorkspaceChangedParams { + change_type: "open", + workspace: &workspace, + }) + .unwrap_or_default(), + ); + ctx.app.broadcast(¬ification); + + let result = WorkspaceOpenResult { + workspace, + files: vec![], + }; + serde_json::to_value(result).map_err(|_| rpc::internal_error("serialization failed")) +} + +async fn handle_workspace_close(params: &Value, ctx: &Ctx<'_>) -> Result { + let path = params + .get("path") + .and_then(Value::as_str) + .ok_or_else(|| rpc::invalid_params("missing or invalid 'path' parameter"))?; + + // Clients send the normalized path from workspace_open — no filesystem + // calls needed, just ensure trailing / for consistent HashMap lookup + let mut key = path.to_owned(); + if !key.ends_with('/') { + key.push('/'); + } + + let removed = { + let mut workspaces = ctx + .app + .workspaces + .write() + .map_err(|_| rpc::internal_error("lock poisoned"))?; + workspaces.remove(&key) + }; + + let Some(workspace) = removed else { + return Err(rpc::invalid_params(&format!( + "workspace not open: {path}" + ))); + }; + + // Only stop the filer and remove ScopedFs entry if this wasn't an initial + // scoped_dir — those filers and ScopedFs entries persist even after close + // (mirrors Deno backend.ts:330-341) + let is_initial_scoped_dir = ctx.app.scoped_dirs.contains(&key); + if !is_initial_scoped_dir { + ctx.app.filer_manager.stop_filer(&key).await; + ctx.app.scoped_fs.remove_path(Path::new(&key)); + } + + // Broadcast workspace_changed notification to all connected clients + let notification = rpc::notification( + "workspace_changed", + serde_json::to_value(&WorkspaceChangedParams { + change_type: "close", + workspace: &workspace, + }) + .unwrap_or_default(), + ); + ctx.app.broadcast(¬ification); + + Ok(Value::Null) +} + +// -- Provider handlers -------------------------------------------------------- + +#[derive(Serialize)] +struct ProviderStatusResult { + status: provider::ProviderStatus, +} + +async fn handle_provider_load_status( + params: &Value, + ctx: &Ctx<'_>, +) -> Result { + let name_str = params + .get("provider_name") + .and_then(Value::as_str) + .ok_or_else(|| rpc::invalid_params("missing or invalid 'provider_name' parameter"))?; + + let provider_name: ProviderName = serde_json::from_value(Value::String(name_str.to_owned())) + .map_err(|_| rpc::invalid_params(&format!("unknown provider: {name_str}")))?; + + let reload = params + .get("reload") + .and_then(Value::as_bool) + .unwrap_or(false); + + let provider = ctx.app.provider_manager.require(provider_name)?; + let status = provider.load_status(reload).await; + + serde_json::to_value(ProviderStatusResult { status }) + .map_err(|_| rpc::internal_error("serialization failed")) +} + +async fn handle_provider_update_api_key( + params: &Value, + ctx: &Ctx<'_>, +) -> Result { + let name_str = params + .get("provider_name") + .and_then(Value::as_str) + .ok_or_else(|| rpc::invalid_params("missing or invalid 'provider_name' parameter"))?; + + let provider_name: ProviderName = serde_json::from_value(Value::String(name_str.to_owned())) + .map_err(|_| rpc::invalid_params(&format!("unknown provider: {name_str}")))?; + + if provider_name == ProviderName::Ollama { + return Err(rpc::invalid_params("Ollama does not require an API key")); + } + + let api_key = params + .get("api_key") + .and_then(Value::as_str) + .ok_or_else(|| rpc::invalid_params("missing or invalid 'api_key' parameter"))?; + + let provider = ctx.app.provider_manager.require(provider_name)?; + provider.set_api_key(Some(api_key.to_owned())).await; + let status = provider.load_status(true).await; + + serde_json::to_value(ProviderStatusResult { status }) + .map_err(|_| rpc::internal_error("serialization failed")) +} + +async fn handle_completion_create( + params: &Value, + ctx: &Ctx<'_>, +) -> Result { + let request = params + .get("completion_request") + .ok_or_else(|| rpc::invalid_params("missing 'completion_request' parameter"))?; + + let provider_name_str = request + .get("provider_name") + .and_then(Value::as_str) + .ok_or_else(|| rpc::invalid_params("missing 'provider_name' in completion_request"))?; + + let provider_name: ProviderName = + serde_json::from_value(Value::String(provider_name_str.to_owned())) + .map_err(|_| rpc::invalid_params(&format!("unknown provider: {provider_name_str}")))?; + + let model = request + .get("model") + .and_then(Value::as_str) + .ok_or_else(|| rpc::invalid_params("missing 'model' in completion_request"))? + .to_owned(); + + let prompt = request + .get("prompt") + .and_then(Value::as_str) + .ok_or_else(|| rpc::invalid_params("missing 'prompt' in completion_request"))? + .to_owned(); + + let completion_messages: Option> = request + .get("completion_messages") + .and_then(|v| serde_json::from_value(v.clone()).ok()); + + let progress_token = params + .get("_meta") + .and_then(|m| m.get("progressToken")) + .and_then(Value::as_str) + .map(String::from); + + let completion_options = ctx.app.completion_options.clone(); + + let handler_options = CompletionHandlerOptions { + model, + completion_options, + completion_messages, + prompt, + progress_token: progress_token.clone(), + }; + + // Build progress sender for streaming (only works over WebSocket) + let progress_sender: Option = + match (ctx.connection_id, &progress_token) { + (Some(conn_id), Some(token)) => { + let app = Arc::clone(&ctx.app_arc); + let token = token.clone(); + Some(Box::new(move |chunk: Value| { + let notification = rpc::notification( + "completion_progress", + serde_json::json!({ + "chunk": chunk, + "_meta": { "progressToken": token }, + }), + ); + app.send_to(conn_id, ¬ification); + })) + } + _ => None, + }; + + let provider = ctx.app.provider_manager.require(provider_name)?; + let mut result = provider + .complete(&handler_options, progress_sender.as_ref()) + .await?; + + // Add _meta.progressToken to response if streaming was requested + if let Some(token) = &progress_token + && let Some(obj) = result.as_object_mut() + { + obj.insert( + "_meta".to_owned(), + serde_json::json!({"progressToken": token}), + ); + } + + Ok(result) +} + +// -- Filesystem handlers ------------------------------------------------------ + +async fn handle_diskfile_update(params: &Value, ctx: &Ctx<'_>) -> Result { + let path = params + .get("path") + .and_then(Value::as_str) + .ok_or_else(|| rpc::invalid_params("missing or invalid 'path' parameter"))?; + if !path.starts_with('/') { + return Err(rpc::invalid_params("path must be absolute")); + } + let content = params + .get("content") + .and_then(Value::as_str) + .ok_or_else(|| rpc::invalid_params("missing or invalid 'content' parameter"))?; + + ctx.app + .scoped_fs + .write_file(path, content) + .await + .map_err(|e| rpc::internal_error(&format!("failed to write file: {e}")))?; + + Ok(Value::Null) +} + +async fn handle_diskfile_delete(params: &Value, ctx: &Ctx<'_>) -> Result { + let path = params + .get("path") + .and_then(Value::as_str) + .ok_or_else(|| rpc::invalid_params("missing or invalid 'path' parameter"))?; + if !path.starts_with('/') { + return Err(rpc::invalid_params("path must be absolute")); + } + + ctx.app + .scoped_fs + .rm(path) + .await + .map_err(|e| rpc::internal_error(&format!("failed to delete file: {e}")))?; + + Ok(Value::Null) +} + +async fn handle_directory_create(params: &Value, ctx: &Ctx<'_>) -> Result { + let path = params + .get("path") + .and_then(Value::as_str) + .ok_or_else(|| rpc::invalid_params("missing or invalid 'path' parameter"))?; + if !path.starts_with('/') { + return Err(rpc::invalid_params("path must be absolute")); + } + + ctx.app + .scoped_fs + .mkdir(path) + .await + .map_err(|e| rpc::internal_error(&format!("failed to create directory: {e}")))?; + + Ok(Value::Null) +} + +// -- Terminal handlers -------------------------------------------------------- + +#[derive(Serialize)] +struct TerminalCreateResult { + terminal_id: String, +} + +#[derive(Serialize)] +struct TerminalCloseResult { + exit_code: Option, +} + +async fn handle_terminal_create(params: &Value, ctx: &Ctx<'_>) -> Result { + let command = params + .get("command") + .and_then(Value::as_str) + .ok_or_else(|| rpc::invalid_params("missing or invalid 'command' parameter"))?; + + let args: Vec = match params.get("args") { + Some(Value::Array(arr)) => arr + .iter() + .map(|v| { + v.as_str() + .map(String::from) + .ok_or_else(|| rpc::invalid_params("args must be an array of strings")) + }) + .collect::, _>>()?, + Some(Value::Null) | None => vec![], + _ => return Err(rpc::invalid_params("args must be an array of strings")), + }; + + let cwd = params.get("cwd").and_then(Value::as_str); + + let terminal_id = uuid::Uuid::new_v4().to_string(); + + ctx.app + .pty_manager + .spawn(&terminal_id, command, &args, cwd, Arc::clone(&ctx.app_arc)) + .await + .map_err(|e| rpc::internal_error(&format!("failed to create terminal: {e}")))?; + + serde_json::to_value(TerminalCreateResult { terminal_id }) + .map_err(|_| rpc::internal_error("serialization failed")) +} + +async fn handle_terminal_data_send(params: &Value, ctx: &Ctx<'_>) -> Result { + let terminal_id = params + .get("terminal_id") + .and_then(Value::as_str) + .ok_or_else(|| rpc::invalid_params("missing or invalid 'terminal_id' parameter"))?; + + let data = params + .get("data") + .and_then(Value::as_str) + .ok_or_else(|| rpc::invalid_params("missing or invalid 'data' parameter"))?; + + // No-ops silently if terminal doesn't exist (matching Deno behavior) + ctx.app.pty_manager.write(terminal_id, data).await; + + Ok(Value::Null) +} + +async fn handle_terminal_resize(params: &Value, ctx: &Ctx<'_>) -> Result { + let terminal_id = params + .get("terminal_id") + .and_then(Value::as_str) + .ok_or_else(|| rpc::invalid_params("missing or invalid 'terminal_id' parameter"))?; + + let cols = params + .get("cols") + .and_then(Value::as_u64) + .ok_or_else(|| rpc::invalid_params("missing or invalid 'cols' parameter"))?; + + let rows = params + .get("rows") + .and_then(Value::as_u64) + .ok_or_else(|| rpc::invalid_params("missing or invalid 'rows' parameter"))?; + + // No-ops silently if terminal doesn't exist; resize failures are non-fatal + #[expect(clippy::cast_possible_truncation, reason = "terminal dimensions fit u16")] + { + ctx.app + .pty_manager + .resize(terminal_id, cols as u16, rows as u16) + .await; + } + + Ok(Value::Null) +} + +async fn handle_terminal_close(params: &Value, ctx: &Ctx<'_>) -> Result { + let terminal_id = params + .get("terminal_id") + .and_then(Value::as_str) + .ok_or_else(|| rpc::invalid_params("missing or invalid 'terminal_id' parameter"))?; + + let signal_str = params + .get("signal") + .and_then(Value::as_str) + .unwrap_or("SIGTERM"); + + let signal = match signal_str { + "SIGKILL" => libc::SIGKILL, + _ => libc::SIGTERM, // default to SIGTERM + }; + + // Returns {exit_code: null} if terminal doesn't exist (matching Deno behavior) + let exit_code = ctx + .app + .pty_manager + .kill(terminal_id, signal) + .await + .flatten(); + + serde_json::to_value(TerminalCloseResult { exit_code }) + .map_err(|_| rpc::internal_error("serialization failed")) +} diff --git a/crates/zzz_server/src/main.rs b/crates/zzz_server/src/main.rs new file mode 100644 index 000000000..b10c7a4e9 --- /dev/null +++ b/crates/zzz_server/src/main.rs @@ -0,0 +1,417 @@ +mod account; +mod auth; +mod bootstrap; +mod daemon_token; +mod db; +mod error; +mod filer; +mod handlers; +mod provider; +mod pty_manager; +mod rpc; +mod scoped_fs; +mod ws; + +use std::net::SocketAddr; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +use axum::routing::{get, post}; +use axum::{Json, Router}; +use error::ServerError; +use serde::Serialize; +use tokio::net::TcpListener; +use tokio_util::sync::CancellationToken; +use tower_http::services::ServeDir; +use tracing_subscriber::EnvFilter; + +const DEFAULT_PORT: u16 = 1174; + +#[tokio::main] +async fn main() { + tracing_subscriber::fmt() + .with_env_filter( + EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info")), + ) + .init(); + + if let Err(e) = run().await { + tracing::error!(error = %e, "fatal"); + std::process::exit(1); + } +} + +async fn run() -> Result<(), ServerError> { + let config = parse_config()?; + + // Database — required + let pool = db::create_pool(&config.database_url)?; + db::run_migrations(&pool).await?; + + // Keyring — required + let keyring = auth::Keyring::new(&config.secret_cookie_keys).ok_or_else(|| { + ServerError::Config("SECRET_COOKIE_KEYS is required (no valid keys found)".to_owned()) + })?; + + let errors = auth::Keyring::validate(&config.secret_cookie_keys); + if !errors.is_empty() { + return Err(ServerError::Config(format!( + "SECRET_COOKIE_KEYS validation failed: {}", + errors.join(", ") + ))); + } + + // Bootstrap availability check + let bootstrap_available = check_bootstrap_available(&pool, config.bootstrap_token_path.as_ref()).await; + + let allowed_origins = config + .allowed_origins + .as_deref() + .map(auth::parse_allowed_origins) + .unwrap_or_default(); + + let scoped_dir_strings: Vec = config + .scoped_dirs + .iter() + .map(|p| resolve_dir(p)) + .collect(); + + // Include zzz_dir first (like Deno: `new ScopedFs([this.zzz_dir, ...this.scoped_dirs])`) + // Use canonicalized paths, not raw config paths + let mut scoped_fs_paths: Vec = Vec::with_capacity(1 + scoped_dir_strings.len()); + scoped_fs_paths.push(PathBuf::from(&config.zzz_dir)); + scoped_fs_paths.extend(scoped_dir_strings.iter().map(PathBuf::from)); + let scoped_fs = scoped_fs::ScopedFs::new(scoped_fs_paths); + + // Daemon token — initialize state, write token to disk + let daemon_token_state = match daemon_token::init_daemon_token(&config.zzz_dir).await { + Ok(state) => { + // Resolve keeper_account_id if an account with keeper role already exists + if let Ok(client) = pool.get().await + && let Ok(Some(account_id)) = + db::query_keeper_account_id(&client).await + { + state.write().await.keeper_account_id = Some(account_id); + tracing::info!(%account_id, "daemon token: keeper account resolved"); + } + Some(state) + } + Err(e) => { + tracing::warn!(error = %e, "daemon token init failed — running without daemon token auth"); + None + } + }; + + // AI providers — read API keys from env, construct ProviderManager + let mut provider_manager = provider::ProviderManager::new(); + provider_manager.add(provider::Provider::Anthropic( + provider::anthropic::AnthropicProvider::new( + std::env::var("SECRET_ANTHROPIC_API_KEY").ok(), + ), + )); + provider_manager.add(provider::Provider::OpenAi( + provider::openai::OpenAiProvider::new( + std::env::var("SECRET_OPENAI_API_KEY").ok(), + ), + )); + provider_manager.add(provider::Provider::Gemini( + provider::gemini::GeminiProvider::new( + std::env::var("SECRET_GOOGLE_API_KEY").ok(), + ), + )); + provider_manager.add(provider::Provider::Ollama( + provider::ollama::OllamaProvider::new(), + )); + + let app_state = Arc::new(handlers::App::new( + pool, + keyring, + allowed_origins, + config.bootstrap_token_path, + bootstrap_available, + scoped_fs, + config.zzz_dir, + scoped_dir_strings, + daemon_token_state.clone(), + provider_manager, + )); + + // Start file watchers at startup (matches Deno's Backend constructor + // which calls `this.#start_filer(this.zzz_dir)` then iterates scoped_dirs). + // zzz_dir uses FilerConfig::zzz_dir() (no .zzz ignore); scoped_dirs use workspace config. + match app_state + .filer_manager + .start_filer( + &app_state.zzz_dir, + Arc::clone(&app_state), + filer::FilerConfig::zzz_dir(), + filer::FilerLifetime::Permanent, + ) + .await + { + Ok(_) => tracing::info!(path = %app_state.zzz_dir, "started zzz_dir filer"), + Err(e) => tracing::warn!(path = %app_state.zzz_dir, error = %e, "failed to start zzz_dir filer"), + } + + for dir in &app_state.scoped_dirs { + if *dir == app_state.zzz_dir { + continue; + } + match app_state + .filer_manager + .start_filer( + dir, + Arc::clone(&app_state), + filer::FilerConfig::workspace(&app_state.zzz_dir), + filer::FilerLifetime::Permanent, + ) + .await + { + Ok(_) => tracing::info!(path = %dir, "started scoped_dir filer"), + Err(e) => tracing::warn!(path = %dir, error = %e, "failed to start scoped_dir filer"), + } + } + + // Spawn daemon token rotation task + let rotation_handle = daemon_token_state.map(daemon_token::spawn_rotation_task); + + let app_state_for_shutdown = Arc::clone(&app_state); + + let mut app = Router::new() + .route("/api/rpc", get(rpc::rpc_get_handler).post(rpc::rpc_handler)) + .route("/api/ws", get(ws::ws_handler)) + .route("/health", get(health_handler)) + .route("/api/account/bootstrap", post(bootstrap::bootstrap_handler)) + .route("/api/account/status", get(account::status_handler)) + .route("/api/account/login", post(account::login_handler)) + .route("/api/account/logout", post(account::logout_handler)) + .route("/api/account/password", post(account::password_handler)) + .route("/api/account/sessions", get(account::sessions_list_handler)) + .route("/api/account/sessions/{id}/revoke", post(account::session_revoke_handler)) + .with_state(app_state); + + if let Some(ref dir) = config.static_dir { + tracing::info!(dir = %dir.display(), "serving static files"); + app = app.fallback_service(ServeDir::new(dir)); + } + + let addr = SocketAddr::from(([127, 0, 0, 1], config.port)); + let listener = TcpListener::bind(addr) + .await + .map_err(|source| ServerError::Bind { addr, source })?; + + tracing::info!("zzz_server listening on {addr}"); + + let shutdown = CancellationToken::new(); + let shutdown_signal = shutdown.clone(); + tokio::spawn(async move { + wait_for_shutdown_signal().await; + tracing::info!("shutdown signal received"); + shutdown_signal.cancel(); + }); + + axum::serve(listener, app) + .with_graceful_shutdown(shutdown.cancelled_owned()) + .await + .map_err(ServerError::Serve)?; + + // Stop daemon token rotation + if let Some(handle) = rotation_handle { + handle.abort(); + } + + // Clean up spawned terminal processes before exiting + app_state_for_shutdown.pty_manager.destroy().await; + + tracing::info!("server shutdown complete"); + Ok(()) +} + +#[derive(Serialize)] +struct HealthResponse { + status: &'static str, +} + +async fn health_handler() -> Json { + Json(HealthResponse { status: "ok" }) +} + +// -- Config ------------------------------------------------------------------- + +struct Config { + port: u16, + static_dir: Option, + database_url: String, + secret_cookie_keys: String, + bootstrap_token_path: Option, + allowed_origins: Option, + scoped_dirs: Vec, + zzz_dir: String, +} + +/// Resolve a path to an absolute, canonical, normalized directory string +/// with trailing `/`. Tries `canonicalize` (resolves symlinks, requires path +/// to exist), falls back to `absolute` (no I/O), falls back to the raw path. +fn resolve_dir(path: &Path) -> String { + let mut s = std::fs::canonicalize(path) + .unwrap_or_else(|_| std::path::absolute(path).unwrap_or_else(|_| path.to_path_buf())) + .to_string_lossy() + .into_owned(); + if !s.ends_with('/') { + s.push('/'); + } + s +} + +fn parse_config() -> Result { + let mut port: Option = None; + let mut static_dir: Option = None; + + let args: Vec = std::env::args().collect(); + let mut i = 1; + while i < args.len() { + match args[i].as_str() { + "--port" => { + i += 1; + if let Some(val) = args.get(i) { + if let Ok(p) = val.parse() { + port = Some(p); + } else { + tracing::warn!(value = val.as_str(), "invalid --port value, ignoring"); + } + } + } + "--static-dir" => { + i += 1; + if let Some(val) = args.get(i) { + static_dir = Some(PathBuf::from(val)); + } + } + _ => {} + } + i += 1; + } + + // Fall back to env vars for port/static_dir + if port.is_none() + && let Ok(val) = std::env::var("ZZZ_PORT") { + if let Ok(p) = val.parse() { + port = Some(p); + } else { + tracing::warn!(value = val.as_str(), "invalid ZZZ_PORT value, ignoring"); + } + } + if static_dir.is_none() + && let Ok(val) = std::env::var("ZZZ_STATIC_DIR") { + static_dir = Some(PathBuf::from(val)); + } + + // Required env vars + let database_url = std::env::var("DATABASE_URL").map_err(|_| { + ServerError::Config("DATABASE_URL is required".to_owned()) + })?; + + let secret_cookie_keys = std::env::var("SECRET_COOKIE_KEYS").map_err(|_| { + ServerError::Config("SECRET_COOKIE_KEYS is required".to_owned()) + })?; + + let bootstrap_token_path = std::env::var("BOOTSTRAP_TOKEN_PATH").ok(); + let allowed_origins = std::env::var("ALLOWED_ORIGINS").ok(); + + let scoped_dirs = std::env::var("PUBLIC_ZZZ_SCOPED_DIRS") + .unwrap_or_default() + .split(',') + .map(str::trim) + .filter(|s| !s.is_empty()) + .map(PathBuf::from) + .collect(); + + let zzz_dir = { + let raw = std::env::var("PUBLIC_ZZZ_DIR").unwrap_or_else(|_| ".zzz/".to_owned()); + resolve_dir(Path::new(&raw)) + }; + + Ok(Config { + port: port.unwrap_or(DEFAULT_PORT), + static_dir, + database_url, + secret_cookie_keys, + bootstrap_token_path, + allowed_origins, + scoped_dirs, + zzz_dir, + }) +} + +/// Check if bootstrap is available (token file exists and not yet bootstrapped). +async fn check_bootstrap_available( + pool: &deadpool_postgres::Pool, + token_path: Option<&String>, +) -> bool { + let Some(path) = token_path else { + return false; + }; + + // Check if token file exists + if tokio::fs::metadata(path).await.is_err() { + tracing::info!("bootstrap unavailable: token file not found"); + return false; + } + + // Check bootstrap_lock table + let Ok(client) = pool.get().await else { + return false; + }; + + let Ok(row) = client + .query_opt( + "SELECT bootstrapped FROM bootstrap_lock WHERE id = 1", + &[], + ) + .await + else { + return false; + }; + + if let Some(row) = row { + let bootstrapped: bool = row.get(0); + if bootstrapped { + tracing::info!("bootstrap unavailable: already bootstrapped"); + return false; + } + } + + tracing::info!(path = %path, "bootstrap token available"); + true +} + +// -- Shutdown ----------------------------------------------------------------- + +async fn wait_for_shutdown_signal() { + let ctrl_c = async { + tokio::signal::ctrl_c().await.ok(); + }; + + #[cfg(unix)] + { + let sigterm = async { + match tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()) { + Ok(mut sig) => { + sig.recv().await; + } + Err(e) => { + tracing::warn!(error = %e, "failed to install SIGTERM handler"); + std::future::pending::<()>().await; + } + } + }; + + tokio::select! { + () = ctrl_c => {} + () = sigterm => {} + } + } + + #[cfg(not(unix))] + ctrl_c.await; +} diff --git a/crates/zzz_server/src/provider/anthropic.rs b/crates/zzz_server/src/provider/anthropic.rs new file mode 100644 index 000000000..b1fe64c5e --- /dev/null +++ b/crates/zzz_server/src/provider/anthropic.rs @@ -0,0 +1,358 @@ +use fuz_common::JsonRpcError; +use futures_util::StreamExt; +use serde_json::{json, Value}; +use tokio::sync::RwLock; + +use super::{ + ai_provider_error, CompletionHandlerOptions, CompletionMessage, ProgressSender, + ProviderStatus, PROVIDER_ERROR_NEEDS_API_KEY, +}; + +const API_URL: &str = "https://api.anthropic.com/v1/messages"; +const API_VERSION: &str = "2023-06-01"; + +// -- Provider state ----------------------------------------------------------- + +struct AnthropicState { + api_key: Option, + client: Option, + cached_status: Option, +} + +/// Anthropic/Claude AI provider. +/// +/// Uses the Messages API with optional SSE streaming. +/// State is behind `tokio::sync::RwLock` because: +/// - `set_api_key` writes from keeper RPC handlers +/// - `load_status` reads and caches status +pub struct AnthropicProvider { + state: RwLock, +} + +impl AnthropicProvider { + pub fn new(api_key: Option) -> Self { + let client = api_key.as_ref().map(|key| build_client(key)); + Self { + state: RwLock::new(AnthropicState { + api_key, + client, + cached_status: None, + }), + } + } + + pub async fn load_status(&self, reload: bool) -> ProviderStatus { + let state = self.state.read().await; + if !reload && let Some(ref status) = state.cached_status { + return status.clone(); + } + // Drop read lock before acquiring write lock + let has_client = state.client.is_some(); + drop(state); + + let status = if has_client { + ProviderStatus::available("claude") + } else { + ProviderStatus::unavailable("claude", PROVIDER_ERROR_NEEDS_API_KEY) + }; + + let mut state = self.state.write().await; + state.cached_status = Some(status.clone()); + status + } + + pub async fn set_api_key(&self, key: Option) { + let mut state = self.state.write().await; + state.client = key.as_ref().map(|k| build_client(k)); + state.api_key = key; + state.cached_status = None; + } + + pub async fn complete( + &self, + options: &CompletionHandlerOptions, + progress_sender: Option<&ProgressSender>, + ) -> Result { + // Clone the client (cheap — internally Arc'd) and release the lock + // before the HTTP call. This avoids blocking set_api_key for the + // duration of a potentially long-running streaming response. + let client = { + let state = self.state.read().await; + state + .client + .clone() + .ok_or_else(|| ai_provider_error("claude", PROVIDER_ERROR_NEEDS_API_KEY))? + }; + + let streaming = options.progress_token.is_some() && progress_sender.is_some(); + let body = build_request_body(options, streaming); + + let response: reqwest::Response = client + .post(API_URL) + .json(&body) + .send() + .await + .map_err(|e: reqwest::Error| ai_provider_error("claude", &e.to_string()))?; + + if !response.status().is_success() { + let error_body: String = response + .text() + .await + .unwrap_or_else(|_: reqwest::Error| String::from("unknown error")); + let error_msg = parse_api_error(&error_body).unwrap_or(error_body); + return Err(ai_provider_error("claude", &error_msg)); + } + + if let (true, Some(sender)) = (streaming, progress_sender) { + handle_streaming_response(response, options, sender).await + } else { + handle_non_streaming_response(response, options).await + } + } +} + +async fn handle_non_streaming_response( + response: reqwest::Response, + options: &CompletionHandlerOptions, +) -> Result { + let api_response: Value = response + .json::() + .await + .map_err(|e: reqwest::Error| ai_provider_error("claude", &format!("failed to parse response: {e}")))?; + + Ok(build_completion_response(&options.model, &api_response)) +} + +async fn handle_streaming_response( + response: reqwest::Response, + options: &CompletionHandlerOptions, + progress_sender: &ProgressSender, +) -> Result { + let mut stream = response.bytes_stream(); + let mut buffer = String::new(); + let mut accumulated_content = String::new(); + let mut message_id = String::new(); + let mut final_usage: Option = None; + let mut stop_reason = String::from("end_turn"); + + while let Some(chunk) = stream.next().await { + let chunk = chunk.map_err(|e| { + ai_provider_error("claude", &format!("stream read error: {e}")) + })?; + let text = String::from_utf8_lossy(&chunk); + // Normalize line endings per SSE spec (RFC 8895 §9.2): + // \r\n → \n, then lone \r → \n + if text.contains('\r') { + buffer.push_str(&text.replace("\r\n", "\n").replace('\r', "\n")); + } else { + buffer.push_str(&text); + } + + // Process complete SSE events (separated by \n\n) + while let Some(boundary) = buffer.find("\n\n") { + let event_text = buffer[..boundary].to_owned(); + buffer = buffer[boundary + 2..].to_owned(); + + if let Some((event_type, data)) = parse_sse_event(&event_text) { + match event_type { + "message_start" => { + if let Some(id) = data + .get("message") + .and_then(|m| m.get("id")) + .and_then(Value::as_str) + { + id.clone_into(&mut message_id); + } + } + "content_block_delta" => { + if let Some(text) = data + .get("delta") + .and_then(|d| d.get("text")) + .and_then(Value::as_str) + { + accumulated_content.push_str(text); + progress_sender(json!({ + "message": { + "role": "assistant", + "content": text, + } + })); + } + } + "message_delta" => { + if let Some(sr) = data + .get("delta") + .and_then(|d| d.get("stop_reason")) + .and_then(Value::as_str) + { + sr.clone_into(&mut stop_reason); + } + if let Some(usage) = data.get("usage") { + final_usage = Some(usage.clone()); + } + } + _ => {} + } + } + } + } + + let api_response = json!({ + "id": message_id, + "type": "message", + "role": "assistant", + "content": [{"type": "text", "text": accumulated_content}], + "model": options.model, + "stop_reason": stop_reason, + "stop_sequence": null, + "usage": final_usage, + }); + + Ok(build_completion_response(&options.model, &api_response)) +} + +// -- Request building --------------------------------------------------------- + +fn build_request_body(options: &CompletionHandlerOptions, stream: bool) -> Value { + let messages = build_messages(options.completion_messages.as_deref(), &options.prompt); + let opts = &options.completion_options; + + let mut body = json!({ + "model": options.model, + "max_tokens": opts.output_token_max, + "stream": stream, + "messages": messages, + }); + + let obj = body.as_object_mut().unwrap_or_else(|| unreachable!()); + + if !opts.system_message.is_empty() { + obj.insert("system".to_owned(), json!(opts.system_message)); + } + if let Some(t) = opts.temperature { + obj.insert("temperature".to_owned(), json!(t)); + } + if let Some(k) = opts.top_k { + obj.insert("top_k".to_owned(), json!(k)); + } + if let Some(p) = opts.top_p { + obj.insert("top_p".to_owned(), json!(p)); + } + if let Some(ref seqs) = opts.stop_sequences + && !seqs.is_empty() + { + obj.insert("stop_sequences".to_owned(), json!(seqs)); + } + + body +} + +/// Convert `CompletionMessage[]` + prompt into the Anthropic messages format. +/// +/// Filters out system role messages (system is passed as a separate field). +/// Appends the prompt as a final user message. +fn build_messages( + completion_messages: Option<&[CompletionMessage]>, + prompt: &str, +) -> Vec { + let capacity = completion_messages.map_or(0, <[_]>::len) + 1; // +1 for prompt + let mut messages: Vec = Vec::with_capacity(capacity); + + if let Some(msgs) = completion_messages { + for msg in msgs { + if msg.role == "system" { + continue; + } + messages.push(json!({ + "role": msg.role, + "content": [{"type": "text", "text": msg.content}], + })); + } + } + + messages.push(json!({ + "role": "user", + "content": [{"type": "text", "text": prompt}], + })); + + messages +} + +// -- Response building -------------------------------------------------------- + +fn build_completion_response(model: &str, api_response: &Value) -> Value { + let created = fuz_common::rfc3339_now(); + json!({ + "completion_response": { + "created": created, + "provider_name": "claude", + "model": model, + "data": { + "type": "claude", + "value": api_response, + }, + }, + }) +} + +// -- HTTP client -------------------------------------------------------------- + +fn build_client(api_key: &str) -> reqwest::Client { + let mut headers = reqwest::header::HeaderMap::new(); + if let Ok(val) = reqwest::header::HeaderValue::from_str(api_key) { + headers.insert("x-api-key", val); + } + headers.insert( + "anthropic-version", + reqwest::header::HeaderValue::from_static(API_VERSION), + ); + reqwest::Client::builder() + .default_headers(headers) + .build() + .unwrap_or_else(|_| reqwest::Client::new()) +} + +// -- SSE parsing -------------------------------------------------------------- + +/// Parse a single SSE event block into (`event_type`, `parsed_data`). +/// +/// An SSE event looks like: +/// ```text +/// event: message_start +/// data: {"type":"message_start","message":{...}} +/// ``` +fn parse_sse_event(event_text: &str) -> Option<(&str, Value)> { + let mut event_type: Option<&str> = None; + let mut data_lines: Vec<&str> = Vec::new(); + + for line in event_text.lines() { + if let Some(rest) = line.strip_prefix("event: ") { + event_type = Some(rest.trim()); + } else if let Some(rest) = line.strip_prefix("data: ") { + data_lines.push(rest); + } + } + + let event_type = event_type?; + if data_lines.is_empty() { + return None; + } + + let data_str = data_lines.join("\n"); + let data: Value = serde_json::from_str(&data_str).ok()?; + Some((event_type, data)) +} + +// -- Error parsing ------------------------------------------------------------ + +/// Parse an Anthropic API error response body. +/// +/// Anthropic errors look like: `{"type":"error","error":{"type":"...","message":"..."}}` +fn parse_api_error(body: &str) -> Option { + let v: Value = serde_json::from_str(body).ok()?; + v.get("error") + .and_then(|e| e.get("message")) + .and_then(Value::as_str) + .map(String::from) +} + diff --git a/crates/zzz_server/src/provider/gemini.rs b/crates/zzz_server/src/provider/gemini.rs new file mode 100644 index 000000000..9e7723b19 --- /dev/null +++ b/crates/zzz_server/src/provider/gemini.rs @@ -0,0 +1,51 @@ +use tokio::sync::RwLock; + +use super::{ProviderStatus, PROVIDER_ERROR_NEEDS_API_KEY}; + +struct GeminiState { + api_key: Option, + cached_status: Option, +} + +/// Google Gemini provider stub. +/// +/// Full implementation will follow the Anthropic provider pattern. +pub struct GeminiProvider { + state: RwLock, +} + +impl GeminiProvider { + pub fn new(api_key: Option) -> Self { + Self { + state: RwLock::new(GeminiState { + api_key, + cached_status: None, + }), + } + } + + pub async fn load_status(&self, reload: bool) -> ProviderStatus { + let state = self.state.read().await; + if !reload && let Some(ref status) = state.cached_status { + return status.clone(); + } + let has_key = state.api_key.is_some(); + drop(state); + + let status = if has_key { + ProviderStatus::available("gemini") + } else { + ProviderStatus::unavailable("gemini", PROVIDER_ERROR_NEEDS_API_KEY) + }; + + let mut state = self.state.write().await; + state.cached_status = Some(status.clone()); + status + } + + pub async fn set_api_key(&self, key: Option) { + let mut state = self.state.write().await; + state.api_key = key; + state.cached_status = None; + } +} diff --git a/crates/zzz_server/src/provider/mod.rs b/crates/zzz_server/src/provider/mod.rs new file mode 100644 index 000000000..6aeb82594 --- /dev/null +++ b/crates/zzz_server/src/provider/mod.rs @@ -0,0 +1,270 @@ +pub mod anthropic; +pub mod gemini; +pub mod ollama; +pub mod openai; + +use std::collections::HashMap; +use std::fmt; +use std::time::{SystemTime, UNIX_EPOCH}; + +use fuz_common::JsonRpcError; +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +use crate::rpc; + +// -- Provider name enum ------------------------------------------------------- + +/// Known AI provider names. +/// +/// Matches the TypeScript `ProviderName = 'ollama' | 'claude' | 'chatgpt' | 'gemini'`. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum ProviderName { + Ollama, + Claude, + Chatgpt, + Gemini, +} + +impl ProviderName { + #[allow(dead_code)] + pub const ALL: [Self; 4] = [Self::Ollama, Self::Claude, Self::Chatgpt, Self::Gemini]; +} + +impl fmt::Display for ProviderName { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Ollama => write!(f, "ollama"), + Self::Claude => write!(f, "claude"), + Self::Chatgpt => write!(f, "chatgpt"), + Self::Gemini => write!(f, "gemini"), + } + } +} + +// -- Provider status ---------------------------------------------------------- + +/// Status of an AI provider. +/// +/// Matches the TypeScript `ProviderStatus` discriminated union: +/// `{name, available: true, checked_at}` or `{name, available: false, error, checked_at}`. +/// +/// When `error` is `None`, the `error` field is omitted from JSON output, +/// producing `{name, available: true, checked_at}`. When `Some`, produces +/// `{name, available: false, error, checked_at}`. +#[derive(Debug, Clone, Serialize)] +pub struct ProviderStatus { + pub name: String, + pub available: bool, + pub checked_at: u64, + #[serde(skip_serializing_if = "Option::is_none")] + pub error: Option, +} + +impl ProviderStatus { + pub fn available(name: &str) -> Self { + Self { + name: name.to_owned(), + available: true, + checked_at: now_millis(), + error: None, + } + } + + pub fn unavailable(name: &str, error: &str) -> Self { + Self { + name: name.to_owned(), + available: false, + checked_at: now_millis(), + error: Some(error.to_owned()), + } + } +} + +// -- Completion types --------------------------------------------------------- + +/// Options controlling completion generation. +/// +/// Matches the TypeScript `CompletionOptions` interface from `backend_provider.ts`. +/// Also serves as server-level defaults (stored on `App`, cloned per-request). +#[derive(Debug, Clone)] +#[allow(dead_code)] +pub struct CompletionOptions { + pub frequency_penalty: Option, + pub output_token_max: u32, + pub presence_penalty: Option, + pub seed: Option, + pub stop_sequences: Option>, + pub system_message: String, + pub temperature: Option, + pub top_k: Option, + pub top_p: Option, +} + +impl Default for CompletionOptions { + fn default() -> Self { + Self { + output_token_max: 8192, + system_message: String::new(), + frequency_penalty: None, + presence_penalty: None, + seed: None, + stop_sequences: None, + temperature: None, + top_k: None, + top_p: None, + } + } +} + +/// A single message in a completion conversation. +/// +/// Matches the TypeScript `CompletionMessage = {role: string, content: string}`. +#[derive(Debug, Clone, Deserialize)] +pub struct CompletionMessage { + pub role: String, + pub content: String, +} + +/// Options passed to a provider's complete method. +pub struct CompletionHandlerOptions { + pub model: String, + pub completion_options: CompletionOptions, + pub completion_messages: Option>, + pub prompt: String, + pub progress_token: Option, +} + +/// Callback for sending streaming progress notifications. +/// +/// Captures `app_arc`, `connection_id`, and `progress_token` to send +/// `completion_progress` notifications to the requesting WebSocket connection. +pub type ProgressSender = Box; + +// -- Provider enum ------------------------------------------------------------ + +/// Enum-dispatched AI provider. +/// +/// Uses enum instead of trait objects: exactly 4 providers, known at compile +/// time. Gives exhaustive matching, no heap indirection, simpler lifetimes. +pub enum Provider { + Anthropic(anthropic::AnthropicProvider), + OpenAi(openai::OpenAiProvider), + Gemini(gemini::GeminiProvider), + Ollama(ollama::OllamaProvider), +} + +impl fmt::Debug for Provider { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Provider({})", self.name()) + } +} + +impl Provider { + pub const fn name(&self) -> ProviderName { + match self { + Self::Anthropic(_) => ProviderName::Claude, + Self::OpenAi(_) => ProviderName::Chatgpt, + Self::Gemini(_) => ProviderName::Gemini, + Self::Ollama(_) => ProviderName::Ollama, + } + } + + pub async fn load_status(&self, reload: bool) -> ProviderStatus { + match self { + Self::Anthropic(p) => p.load_status(reload).await, + Self::OpenAi(p) => p.load_status(reload).await, + Self::Gemini(p) => p.load_status(reload).await, + Self::Ollama(p) => p.load_status(reload).await, + } + } + + pub async fn set_api_key(&self, key: Option) { + match self { + Self::Anthropic(p) => p.set_api_key(key).await, + Self::OpenAi(p) => p.set_api_key(key).await, + Self::Gemini(p) => p.set_api_key(key).await, + Self::Ollama(_) => {} + } + } + + pub async fn complete( + &self, + options: &CompletionHandlerOptions, + progress_sender: Option<&ProgressSender>, + ) -> Result { + match self { + Self::Anthropic(p) => p.complete(options, progress_sender).await, + Self::OpenAi(_) | Self::Gemini(_) | Self::Ollama(_) => { + Err(rpc::internal_error(&format!( + "{}: not yet implemented in Rust backend", + self.name() + ))) + } + } + } +} + +// -- Provider manager --------------------------------------------------------- + +/// Manages all AI providers. +/// +/// Constructed once in `main`, stored in `App`. +pub struct ProviderManager { + providers: HashMap, +} + +impl fmt::Debug for ProviderManager { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ProviderManager") + .field("providers", &self.providers.keys().collect::>()) + .finish() + } +} + +impl ProviderManager { + pub fn new() -> Self { + Self { + providers: HashMap::new(), + } + } + + pub fn add(&mut self, provider: Provider) { + self.providers.insert(provider.name(), provider); + } + + pub fn get(&self, name: ProviderName) -> Option<&Provider> { + self.providers.get(&name) + } + + /// Get a provider or return a `method_not_found`-style error. + pub fn require(&self, name: ProviderName) -> Result<&Provider, JsonRpcError> { + self.get(name) + .ok_or_else(|| rpc::internal_error(&format!("provider not found: {name}"))) + } + + /// Iterate all providers (for `session_load` status collection). + pub fn all(&self) -> impl Iterator { + self.providers.values() + } +} + +// -- Error helpers ------------------------------------------------------------ + +pub const PROVIDER_ERROR_NEEDS_API_KEY: &str = "needs API key"; +pub const PROVIDER_ERROR_NOT_INSTALLED: &str = "not installed"; + +pub fn ai_provider_error(provider_name: &str, message: &str) -> JsonRpcError { + rpc::internal_error(&format!("{provider_name}: {message}")) +} + +// -- Helpers ------------------------------------------------------------------ + +#[expect(clippy::cast_possible_truncation, reason = "millis won't exceed u64 for centuries")] +fn now_millis() -> u64 { + SystemTime::now() + .duration_since(UNIX_EPOCH) + .map(|d| d.as_millis() as u64) + .unwrap_or(0) +} diff --git a/crates/zzz_server/src/provider/ollama.rs b/crates/zzz_server/src/provider/ollama.rs new file mode 100644 index 000000000..ec9232502 --- /dev/null +++ b/crates/zzz_server/src/provider/ollama.rs @@ -0,0 +1,40 @@ +use tokio::sync::RwLock; + +use super::{ProviderStatus, PROVIDER_ERROR_NOT_INSTALLED}; + +struct OllamaState { + cached_status: Option, +} + +/// Ollama local provider stub. +/// +/// Full implementation will check local Ollama installation via HTTP client +/// and provide model management + completion support. +pub struct OllamaProvider { + state: RwLock, +} + +impl OllamaProvider { + pub fn new() -> Self { + Self { + state: RwLock::new(OllamaState { + cached_status: None, + }), + } + } + + pub async fn load_status(&self, reload: bool) -> ProviderStatus { + let state = self.state.read().await; + if !reload && let Some(ref status) = state.cached_status { + return status.clone(); + } + drop(state); + + // Stub: always unavailable until Ollama integration is implemented + let status = ProviderStatus::unavailable("ollama", PROVIDER_ERROR_NOT_INSTALLED); + + let mut state = self.state.write().await; + state.cached_status = Some(status.clone()); + status + } +} diff --git a/crates/zzz_server/src/provider/openai.rs b/crates/zzz_server/src/provider/openai.rs new file mode 100644 index 000000000..dcc1a6d84 --- /dev/null +++ b/crates/zzz_server/src/provider/openai.rs @@ -0,0 +1,51 @@ +use tokio::sync::RwLock; + +use super::{ProviderStatus, PROVIDER_ERROR_NEEDS_API_KEY}; + +struct OpenAiState { + api_key: Option, + cached_status: Option, +} + +/// OpenAI/ChatGPT provider stub. +/// +/// Full implementation will follow the Anthropic provider pattern. +pub struct OpenAiProvider { + state: RwLock, +} + +impl OpenAiProvider { + pub fn new(api_key: Option) -> Self { + Self { + state: RwLock::new(OpenAiState { + api_key, + cached_status: None, + }), + } + } + + pub async fn load_status(&self, reload: bool) -> ProviderStatus { + let state = self.state.read().await; + if !reload && let Some(ref status) = state.cached_status { + return status.clone(); + } + let has_key = state.api_key.is_some(); + drop(state); + + let status = if has_key { + ProviderStatus::available("chatgpt") + } else { + ProviderStatus::unavailable("chatgpt", PROVIDER_ERROR_NEEDS_API_KEY) + }; + + let mut state = self.state.write().await; + state.cached_status = Some(status.clone()); + status + } + + pub async fn set_api_key(&self, key: Option) { + let mut state = self.state.write().await; + state.api_key = key; + state.cached_status = None; + } +} diff --git a/crates/zzz_server/src/pty_manager.rs b/crates/zzz_server/src/pty_manager.rs new file mode 100644 index 000000000..b2d4f291a --- /dev/null +++ b/crates/zzz_server/src/pty_manager.rs @@ -0,0 +1,233 @@ +use std::collections::HashMap; +use std::sync::Arc; + +use fuz_pty::{Pty, ReadResult, WaitResult}; +use serde::Serialize; +use serde_json::Value; +use tokio::sync::RwLock; +use tokio_util::sync::CancellationToken; + +use crate::handlers::App; +use crate::rpc; + +// -- Notification params ------------------------------------------------------ + +#[derive(Serialize)] +struct TerminalDataParams<'a> { + terminal_id: &'a str, + data: &'a str, +} + +#[derive(Serialize)] +struct TerminalExitedParams<'a> { + terminal_id: &'a str, + exit_code: Option, +} + +// -- Per-terminal state ------------------------------------------------------- + +/// State for a single spawned terminal. +struct TerminalEntry { + pty: Pty, + /// Cancel the async read loop before killing the process. + cancel: CancellationToken, +} + +// -- PtyManager --------------------------------------------------------------- + +/// Manages spawned PTY processes keyed by `terminal_id` (UUID string). +/// +/// Held in `App`, shared via `Arc`. Each terminal has an async read loop +/// that broadcasts `terminal_data` notifications and sends `terminal_exited` +/// when the process exits. +pub struct PtyManager { + terminals: RwLock>, +} + +impl PtyManager { + pub fn new() -> Self { + Self { + terminals: RwLock::new(HashMap::new()), + } + } + + /// Spawn a new PTY process and start its async read loop. + pub async fn spawn( + &self, + terminal_id: &str, + command: &str, + args: &[String], + cwd: Option<&str>, + app: Arc, + ) -> Result<(), String> { + let arg_refs: Vec<&str> = args.iter().map(String::as_str).collect(); + let pty = Pty::spawn(command, &arg_refs, cwd, 80, 24) + .map_err(|e| e.to_string())?; + + let cancel = CancellationToken::new(); + let cancel_clone = cancel.clone(); + let tid = terminal_id.to_owned(); + + // Capture fd and pid for the read loop — it uses raw values, not a + // Pty struct, because the TerminalEntry owns the Pty (and its close). + let read_fd = pty.master_fd; + let read_pid = pty.pid; + + { + let mut terminals = self.terminals.write().await; + terminals.insert( + terminal_id.to_owned(), + TerminalEntry { pty, cancel }, + ); + } + + tokio::spawn(async move { + read_loop(read_fd, read_pid, &tid, cancel_clone, app).await; + }); + + Ok(()) + } + + /// Write data to a terminal's stdin. Silently no-ops if terminal not found. + pub async fn write(&self, terminal_id: &str, data: &str) { + let terminals = self.terminals.read().await; + if let Some(entry) = terminals.get(terminal_id) { + let _ = entry.pty.write(data.as_bytes()); + } + } + + /// Resize a terminal's PTY window. Silently no-ops if terminal not found. + pub async fn resize(&self, terminal_id: &str, cols: u16, rows: u16) { + let terminals = self.terminals.read().await; + if let Some(entry) = terminals.get(terminal_id) { + let _ = entry.pty.resize(cols, rows); + } + } + + /// Kill a terminal process and return its exit code. + /// + /// Returns `None` if the `terminal_id` doesn't exist. + pub async fn kill(&self, terminal_id: &str, signal: i32) -> Option> { + let entry = { + let mut terminals = self.terminals.write().await; + terminals.remove(terminal_id)? + }; + + // Cancel the read loop first — it checks cancellation before each read, + // so it will exit before we close the fd below. + entry.cancel.cancel(); + + // Send signal (process may already be dead) + let _ = entry.pty.kill(signal); + + // Give process time to exit (matching Deno's 50ms wait) + tokio::time::sleep(std::time::Duration::from_millis(50)).await; + + let exit_code = match entry.pty.waitpid() { + WaitResult::Exited(code) => Some(code), + WaitResult::StillRunning => None, + }; + + let _ = entry.pty.close(); + + Some(exit_code) + } + + /// Kill all terminals. Called on shutdown. + pub async fn destroy(&self) { + let entries: Vec<(String, TerminalEntry)> = { + let mut terminals = self.terminals.write().await; + terminals.drain().collect() + }; + + for (tid, entry) in entries { + tracing::info!(terminal_id = %tid, "destroying terminal"); + entry.cancel.cancel(); + let _ = entry.pty.kill(libc::SIGTERM); + tokio::time::sleep(std::time::Duration::from_millis(50)).await; + let _ = entry.pty.waitpid(); + let _ = entry.pty.close(); + } + } +} + +impl std::fmt::Debug for PtyManager { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("PtyManager").finish_non_exhaustive() + } +} + +// -- Async read loop ---------------------------------------------------------- + +/// Poll the PTY master fd for output and broadcast to WebSocket clients. +/// +/// Uses raw fd/pid values — does NOT own the fd. The `TerminalEntry` in the +/// map owns the `Pty` and is responsible for `close()`. On natural exit (EOF), +/// this loop removes the entry from the map and closes it. On cancellation +/// (from `kill`), the caller already removed the entry — this loop just exits. +async fn read_loop( + master_fd: i32, + pid: i32, + terminal_id: &str, + cancel: CancellationToken, + app: Arc, +) { + let read_pty = Pty { master_fd, pid }; + let mut buf = [0u8; 8192]; + + loop { + if cancel.is_cancelled() { + return; + } + + match read_pty.read(&mut buf) { + ReadResult::Data(n) => { + let data = String::from_utf8_lossy(&buf[..n]); + if !data.is_empty() { + let notification = rpc::notification( + "terminal_data", + serde_json::to_value(&TerminalDataParams { + terminal_id, + data: &data, + }) + .unwrap_or(Value::Null), + ); + app.broadcast(¬ification); + } + } + ReadResult::WouldBlock => { + // No data — yield and retry after 10ms (matching Deno behavior) + tokio::select! { + () = cancel.cancelled() => return, + () = tokio::time::sleep(std::time::Duration::from_millis(10)) => {}, + } + } + ReadResult::Eof => { + tracing::info!(terminal_id, "terminal EOF"); + let exit_code = match read_pty.waitpid() { + WaitResult::Exited(code) => Some(code), + WaitResult::StillRunning => None, + }; + + let notification = rpc::notification( + "terminal_exited", + serde_json::to_value(&TerminalExitedParams { + terminal_id, + exit_code, + }) + .unwrap_or(Value::Null), + ); + app.broadcast(¬ification); + + // Remove and close the terminal entry (natural exit cleanup). + // If kill() already removed it, this is a no-op. + let removed = app.pty_manager.terminals.write().await.remove(terminal_id); + if let Some(entry) = removed { + let _ = entry.pty.close(); + } + + return; + } + } + } +} diff --git a/crates/zzz_server/src/rpc.rs b/crates/zzz_server/src/rpc.rs new file mode 100644 index 000000000..f5ff1cc39 --- /dev/null +++ b/crates/zzz_server/src/rpc.rs @@ -0,0 +1,420 @@ +use std::sync::Arc; + +use axum::body::Bytes; +use axum::extract::{Query, State}; +use axum::http::{HeaderMap, StatusCode}; +use axum::response::{IntoResponse, Response}; +use axum::Json; +use fuz_common::{ + JsonRpcError, JSONRPC_INTERNAL_ERROR, JSONRPC_INVALID_PARAMS, JSONRPC_INVALID_REQUEST, + JSONRPC_METHOD_NOT_FOUND, JSONRPC_PARSE_ERROR, JSONRPC_VERSION, +}; +use serde::Serialize; +use serde_json::{Map, Value}; + +use crate::auth::{check_action_auth, check_origin, method_auth, resolve_auth_from_headers}; +use crate::handlers::{self, App, Ctx}; + +// -- JSON-RPC types ----------------------------------------------------------- + +/// Successful JSON-RPC 2.0 response. +#[derive(Debug, Serialize)] +pub struct JsonRpcResponse { + pub jsonrpc: &'static str, + pub id: Value, + pub result: Value, +} + +/// JSON-RPC 2.0 error response (full envelope). +#[derive(Debug, Serialize)] +pub struct JsonRpcErrorResponse { + pub jsonrpc: &'static str, + pub id: Value, + pub error: JsonRpcError, +} + +// -- Error constructors ------------------------------------------------------- +// Intentional divergence: Rust omits `error.data` for security — Zod validation +// details (field names, types, enum values) can leak schema info to unauthenticated +// callers on public actions. Deno includes them for DX. Future: environment-conditional +// in both backends (include in dev, strip in prod). See `normalize_error_data` +// in integration tests for cross-backend handling. + +pub fn parse_error() -> JsonRpcError { + JsonRpcError { + code: JSONRPC_PARSE_ERROR, + message: "parse error".to_string(), + data: None, + } +} + +pub fn invalid_request() -> JsonRpcError { + JsonRpcError { + code: JSONRPC_INVALID_REQUEST, + message: "invalid request".to_string(), + data: None, + } +} + +pub fn method_not_found(method: &str) -> JsonRpcError { + JsonRpcError { + code: JSONRPC_METHOD_NOT_FOUND, + message: format!("method not found: {method}"), + data: None, + } +} + +pub fn invalid_params(detail: &str) -> JsonRpcError { + JsonRpcError { + code: JSONRPC_INVALID_PARAMS, + message: detail.to_string(), + data: None, + } +} + +pub fn internal_error(detail: &str) -> JsonRpcError { + JsonRpcError { + code: JSONRPC_INTERNAL_ERROR, + message: detail.to_string(), + data: None, + } +} + +// -- Notification builder ----------------------------------------------------- + +/// JSON-RPC 2.0 notification (no `id` field — server-initiated push). +#[derive(Debug, Serialize)] +pub struct JsonRpcNotification { + pub jsonrpc: &'static str, + pub method: String, + pub params: Value, +} + +/// Build a JSON-RPC notification string for broadcasting to WebSocket clients. +/// +/// Returns the serialized JSON string. On serialization failure (shouldn't +/// happen with valid `Value` inputs), returns an empty string. +pub fn notification(method: &str, params: Value) -> String { + let n = JsonRpcNotification { + jsonrpc: JSONRPC_VERSION, + method: method.to_owned(), + params, + }; + serde_json::to_string(&n).unwrap_or_default() +} + +// -- Response builders -------------------------------------------------------- + +pub const fn success_response(id: Value, result: Value) -> JsonRpcResponse { + JsonRpcResponse { + jsonrpc: JSONRPC_VERSION, + id, + result, + } +} + +pub const fn error_response(id: Value, error: JsonRpcError) -> JsonRpcErrorResponse { + JsonRpcErrorResponse { + jsonrpc: JSONRPC_VERSION, + id, + error, + } +} + +// -- HTTP status mapping ------------------------------------------------------ + +/// Map a JSON-RPC error code to an HTTP status code. +/// +/// Matches `fuz_app`'s `jsonrpc_error_code_to_http_status` from +/// `fuz_app/src/lib/http/jsonrpc_errors.ts:230-244`. +/// Returns 500 for unrecognized codes. +const fn error_code_to_http_status(code: i32) -> StatusCode { + match code { + // -32700, -32600, -32602 → 400 + JSONRPC_PARSE_ERROR | JSONRPC_INVALID_REQUEST | JSONRPC_INVALID_PARAMS => { + StatusCode::BAD_REQUEST + } + JSONRPC_METHOD_NOT_FOUND => StatusCode::NOT_FOUND, // -32601 → 404 + -32001 => StatusCode::UNAUTHORIZED, // unauthenticated → 401 + -32002 => StatusCode::FORBIDDEN, // forbidden → 403 + _ => StatusCode::INTERNAL_SERVER_ERROR, // -32603 and others → 500 + } +} + +// -- Message classification --------------------------------------------------- + +/// Default params when the JSON-RPC envelope omits the `params` field. +static NULL_PARAMS: Value = Value::Null; + +/// Classification result from `classify`. +/// +/// Transport-agnostic — callers apply transport-specific semantics: +/// - HTTP: `Notification` → reject as `invalid_request`; error → mapped HTTP status +/// - WS: `Notification` → silence (no response sent); error → send envelope +pub enum Classified<'a> { + /// Valid request — method, validated id, and params ready for dispatch. + Request { + method: &'a str, + id: Value, + params: &'a Value, + }, + /// Error — id and error object for the error response envelope. + Invalid { + id: Value, + error: JsonRpcError, + }, + /// Notification (has method, no id) — caller decides behavior. + Notification, +} + +/// Classify a parsed JSON value as a JSON-RPC message. +/// +/// Distinguishes between: +/// - Request (has `method` + valid `id`) → `Classified::Request` +/// - Notification (has `method`, no `id`) → `Classified::Notification` +/// - Invalid (missing `method`, bad `jsonrpc`, non-object, null id) → `Classified::Invalid` +/// +/// Id validation matches `fuz_app`: id must be string or number (excludes null, +/// following MCP). Non-object values always get `id: null` (matching +/// `create_rpc_endpoint`'s safeParse failure path, not `ActionPeer`'s +/// `to_jsonrpc_message_id`). +// TODO Phase 2: Support batch requests (JSON arrays) +pub fn classify(value: &Value) -> Classified<'_> { + let Some(obj) = value.as_object() else { + // Non-object body: fuz_app returns id: null (safeParse fails, no object to extract from) + return Classified::Invalid { + id: Value::Null, + error: invalid_request(), + }; + }; + + // Validate jsonrpc version + let jsonrpc = obj.get("jsonrpc").and_then(Value::as_str); + if jsonrpc != Some(JSONRPC_VERSION) { + let id = extract_id(obj); + return Classified::Invalid { + id, + error: invalid_request(), + }; + } + + // Must have method + let Some(method) = obj.get("method").and_then(Value::as_str) else { + let id = extract_id(obj); + return Classified::Invalid { + id, + error: invalid_request(), + }; + }; + + // No `id` field → notification (caller decides behavior) + let Some(id_val) = obj.get("id") else { + return Classified::Notification; + }; + + // Validate id is string or number (fuz_app's JsonrpcRequestId excludes null, per MCP) + let id = if id_val.is_string() || id_val.is_number() { + id_val.clone() + } else { + // null, bool, array, object ids → invalid request (safeParse would fail) + return Classified::Invalid { + id: Value::Null, + error: invalid_request(), + }; + }; + + // Extract params (default to Null if absent — handlers validate) + let params = obj.get("params").unwrap_or(&NULL_PARAMS); + + Classified::Request { method, id, params } +} + +/// Extract `id` from a JSON-RPC message object for error responses. +/// +/// Matches `fuz_app`'s safeParse failure path: extracts id only if it's +/// a string or number, otherwise returns null. +fn extract_id(obj: &Map) -> Value { + match obj.get("id") { + Some(id) if id.is_string() || id.is_number() => id.clone(), + _ => Value::Null, + } +} + +// -- HTTP handler ------------------------------------------------------------- + +/// Axum handler for `GET /api/rpc`. +/// +/// Extracts `method`, `id`, and optional `params` from query parameters. +/// Matches fuz_app's `create_rpc_endpoint` GET handler. +pub async fn rpc_get_handler( + State(app): State>, + headers: HeaderMap, + Query(query): Query>, +) -> Response { + // Origin verification + if let Some(origin) = headers.get("origin").and_then(|v| v.to_str().ok()) + && !check_origin(origin, &app.allowed_origins) { + return (StatusCode::FORBIDDEN, "origin not allowed").into_response(); + } + + // Extract method + let Some(method) = query.get("method") else { + let error = invalid_request(); + return ( + StatusCode::BAD_REQUEST, + Json(error_response(Value::Null, error)), + ) + .into_response(); + }; + + // Extract id (required) + let Some(id_raw) = query.get("id") else { + let error = invalid_request(); + return ( + StatusCode::BAD_REQUEST, + Json(error_response(Value::Null, error)), + ) + .into_response(); + }; + + // Parse id — try as number first, fall back to string + let id: Value = if let Ok(n) = id_raw.parse::() { + Value::Number(n.into()) + } else { + Value::String(id_raw.clone()) + }; + + // Parse params from query string (optional) + let params: Value = if let Some(params_raw) = query.get("params") { + match serde_json::from_str(params_raw) { + Ok(v) => v, + Err(_) => { + let error = invalid_params("params query parameter is not valid JSON"); + return ( + StatusCode::BAD_REQUEST, + Json(error_response(id, error)), + ) + .into_response(); + } + } + } else { + Value::Null + }; + + // Resolve auth context + let resolved = resolve_auth_from_headers( + &headers, + &app.keyring, + &app.db_pool, + app.daemon_token_state.as_ref(), + ) + .await; + let auth_context = resolved.as_ref().map(|r| &r.context); + let credential_type = resolved.as_ref().map(|r| r.credential_type); + + // Per-action auth check + let spec_auth = method_auth(method); + if let Some(auth_error) = check_action_auth(spec_auth, auth_context, credential_type) { + let status = error_code_to_http_status(auth_error.code); + return (status, Json(error_response(id, auth_error))).into_response(); + } + + let ctx = Ctx { + app: &app, + app_arc: Arc::clone(&app), + request_id: &id, + auth: auth_context, + connection_id: None, + }; + match handlers::dispatch(method, ¶ms, &ctx).await { + Ok(result) => Json(success_response(id, result)).into_response(), + Err(error) => { + let status = error_code_to_http_status(error.code); + (status, Json(error_response(id, error))).into_response() + } + } +} + +/// Axum handler for `POST /api/rpc`. +/// +/// Applies HTTP-specific transport semantics: +/// - Origin verification before processing +/// - Auth context resolution from Cookie header +/// - Per-action auth check before dispatch +/// - Parse errors → full JSON-RPC envelope, HTTP 400 +/// - Notifications → rejected as `invalid_request`, HTTP 400 +/// - Error responses → HTTP status mapped from JSON-RPC error code +pub async fn rpc_handler( + State(app): State>, + headers: HeaderMap, + body: Bytes, +) -> Response { + // Origin verification + if let Some(origin) = headers.get("origin").and_then(|v| v.to_str().ok()) + && !check_origin(origin, &app.allowed_origins) { + return (StatusCode::FORBIDDEN, "origin not allowed").into_response(); + } + + // 1. Parse body as generic JSON value + let Ok(value) = serde_json::from_slice::(&body) else { + tracing::debug!("JSON parse error"); + return ( + StatusCode::BAD_REQUEST, + Json(error_response(Value::Null, parse_error())), + ) + .into_response(); + }; + + tracing::debug!( + method = value.get("method").and_then(|v| v.as_str()).unwrap_or(""), + "rpc request" + ); + + // 2. Resolve auth context (daemon token → cookie → bearer → None) + let resolved = resolve_auth_from_headers( + &headers, + &app.keyring, + &app.db_pool, + app.daemon_token_state.as_ref(), + ) + .await; + let auth_context = resolved.as_ref().map(|r| &r.context); + let credential_type = resolved.as_ref().map(|r| r.credential_type); + + // 3. Classify, check auth, then dispatch + match classify(&value) { + Classified::Request { method, id, params } => { + // Per-action auth check + let spec_auth = method_auth(method); + if let Some(auth_error) = check_action_auth(spec_auth, auth_context, credential_type) { + let status = error_code_to_http_status(auth_error.code); + return (status, Json(error_response(id, auth_error))).into_response(); + } + + let ctx = Ctx { + app: &app, + app_arc: Arc::clone(&app), + request_id: &id, + auth: auth_context, + connection_id: None, + }; + match handlers::dispatch(method, params, &ctx).await { + Ok(result) => Json(success_response(id, result)).into_response(), + Err(error) => { + let status = error_code_to_http_status(error.code); + (status, Json(error_response(id, error))).into_response() + } + } + } + Classified::Invalid { id, error } => { + let status = error_code_to_http_status(error.code); + (status, Json(error_response(id, error))).into_response() + } + Classified::Notification => { + // HTTP requires id — reject notifications (fuz_app's safeParse enforces this) + let error = invalid_request(); + let status = error_code_to_http_status(error.code); + (status, Json(error_response(Value::Null, error))).into_response() + } + } +} diff --git a/crates/zzz_server/src/scoped_fs.rs b/crates/zzz_server/src/scoped_fs.rs new file mode 100644 index 000000000..f9aa257d0 --- /dev/null +++ b/crates/zzz_server/src/scoped_fs.rs @@ -0,0 +1,212 @@ +use std::path::{Component, Path, PathBuf}; +use std::sync::RwLock; + +// -- Errors ------------------------------------------------------------------- + +/// Errors from scoped filesystem operations. +#[derive(Debug, thiserror::Error)] +pub enum ScopedFsError { + #[error("Path is not allowed: {0}")] + PathNotAllowed(String), + #[error("Path is a symlink which is not allowed: {0}")] + SymlinkNotAllowed(String), + #[error("{0}")] + Io(#[from] std::io::Error), +} + +// -- ScopedFs ----------------------------------------------------------------- + +/// Secure wrapper around filesystem operations. +/// +/// Restricts all operations to specified allowed directories. Rejects +/// relative paths, path traversal, and symlinks. Mirrors the TypeScript +/// `ScopedFs` from `src/lib/server/scoped_fs.ts`. +/// +/// NOTE: There is an inherent TOCTOU gap between the symlink check (`lstat`) +/// and the caller's subsequent filesystem operation. A symlink could be +/// created after validation. This is the same caveat as the Deno implementation. +pub struct ScopedFs { + allowed_paths: RwLock>, +} + +impl ScopedFs { + /// Create a new `ScopedFs` with the given allowed directory paths. + /// + /// Each path is normalized with a trailing `/` and must be absolute. + pub fn new(paths: Vec) -> Self { + let allowed_paths = paths + .into_iter() + .map(|p| { + let mut s = p.to_string_lossy().into_owned(); + if !s.ends_with('/') { + s.push('/'); + } + PathBuf::from(s) + }) + .collect(); + Self { + allowed_paths: RwLock::new(allowed_paths), + } + } + + /// Add a path to the allowed set. No-op if already present. + /// + /// Mirrors `ScopedFs.add_path` in `src/lib/server/scoped_fs.ts`. + pub fn add_path(&self, path: &Path) -> bool { + let normalized = normalize_trailing_slash(path); + let mut paths = self.allowed_paths.write().expect("ScopedFs lock poisoned"); + if paths.iter().any(|p| p == &normalized) { + return false; + } + paths.push(normalized); + true + } + + /// Remove a path from the allowed set. + /// + /// Mirrors `ScopedFs.remove_path` in `src/lib/server/scoped_fs.ts`. + pub fn remove_path(&self, path: &Path) -> bool { + let normalized = normalize_trailing_slash(path); + let mut paths = self.allowed_paths.write().expect("ScopedFs lock poisoned"); + if let Some(index) = paths.iter().position(|p| p == &normalized) { + paths.remove(index); + true + } else { + false + } + } + + /// Check if a path falls under one of the allowed directories. + fn is_path_allowed(&self, path: &Path) -> bool { + let path_str = path.to_string_lossy(); + let paths = self.allowed_paths.read().expect("ScopedFs lock poisoned"); + for allowed in paths.iter() { + let allowed_str = allowed.to_string_lossy(); + if path_str.starts_with(allowed_str.as_ref()) + || path_str == allowed_str.trim_end_matches('/') + { + return true; + } + } + false + } + + /// Validate and normalize a path for safe filesystem access. + /// + /// - Rejects relative paths and null bytes + /// - Normalizes path components (resolves `.` and `..`) + /// - Checks against allowed directories + /// - Rejects symlinks (target and all parent directories) + async fn ensure_safe_path(&self, path: &str) -> Result { + // Reject null bytes + if path.contains('\0') { + return Err(ScopedFsError::PathNotAllowed(path.to_owned())); + } + + // Must be absolute + let raw = Path::new(path); + if !raw.is_absolute() { + return Err(ScopedFsError::PathNotAllowed(path.to_owned())); + } + + // Normalize path (resolve . and .. without touching the filesystem) + let normalized = normalize_path(raw); + + // Check against allowed paths + if !self.is_path_allowed(&normalized) { + return Err(ScopedFsError::PathNotAllowed( + normalized.to_string_lossy().into_owned(), + )); + } + + // Check the target path for symlinks if it exists + match tokio::fs::symlink_metadata(&normalized).await { + Ok(meta) => { + if meta.file_type().is_symlink() { + return Err(ScopedFsError::SymlinkNotAllowed( + normalized.to_string_lossy().into_owned(), + )); + } + } + Err(e) if e.kind() == std::io::ErrorKind::NotFound => { + // File doesn't exist yet — that's fine for write/mkdir + } + Err(e) => return Err(ScopedFsError::Io(e)), + } + + // Check all parent directories for symlinks + let mut current = normalized.as_path(); + while let Some(parent) = current.parent() { + if parent == Path::new("/") || parent == current { + break; + } + match tokio::fs::symlink_metadata(parent).await { + Ok(meta) => { + if meta.file_type().is_symlink() { + return Err(ScopedFsError::SymlinkNotAllowed( + parent.to_string_lossy().into_owned(), + )); + } + } + Err(e) if e.kind() == std::io::ErrorKind::NotFound => { + // Parent doesn't exist — will fail at the actual operation + } + Err(e) => return Err(ScopedFsError::Io(e)), + } + current = parent; + } + + Ok(normalized) + } + + /// Write content to a file (creates parent directories if needed). + pub async fn write_file(&self, path: &str, content: &str) -> Result<(), ScopedFsError> { + let safe_path = self.ensure_safe_path(path).await?; + if let Some(parent) = safe_path.parent() { + tokio::fs::create_dir_all(parent).await?; + } + tokio::fs::write(&safe_path, content).await?; + Ok(()) + } + + /// Remove a file. + pub async fn rm(&self, path: &str) -> Result<(), ScopedFsError> { + let safe_path = self.ensure_safe_path(path).await?; + tokio::fs::remove_file(&safe_path).await?; + Ok(()) + } + + /// Create a directory (recursive). + pub async fn mkdir(&self, path: &str) -> Result<(), ScopedFsError> { + let safe_path = self.ensure_safe_path(path).await?; + tokio::fs::create_dir_all(&safe_path).await?; + Ok(()) + } +} + +/// Ensure a path has a trailing `/` for consistent allowed-path comparison. +fn normalize_trailing_slash(path: &Path) -> PathBuf { + let mut s = path.to_string_lossy().into_owned(); + if !s.ends_with('/') { + s.push('/'); + } + PathBuf::from(s) +} + +/// Normalize a path by resolving `.` and `..` components without filesystem access. +fn normalize_path(path: &Path) -> PathBuf { + let mut components = Vec::new(); + for component in path.components() { + match component { + Component::CurDir => {} // skip . + Component::ParentDir => { + // Pop the last normal component (don't go above root) + if let Some(Component::Normal(_)) = components.last() { + components.pop(); + } + } + c => components.push(c), + } + } + components.iter().collect() +} diff --git a/crates/zzz_server/src/ws.rs b/crates/zzz_server/src/ws.rs new file mode 100644 index 000000000..6ea5cf9bf --- /dev/null +++ b/crates/zzz_server/src/ws.rs @@ -0,0 +1,130 @@ +use std::sync::Arc; + +use axum::extract::ws::{Message, WebSocket, WebSocketUpgrade}; +use axum::extract::State; +use axum::http::{HeaderMap, StatusCode}; +use axum::response::{IntoResponse, Response}; +use futures_util::{SinkExt, StreamExt}; +use serde_json::Value; + +use crate::auth::{ + check_action_auth, method_auth, resolve_auth_from_headers, ResolvedAuth, +}; +use crate::handlers::{self, App, Ctx}; +use crate::rpc::{self, Classified}; + +/// Axum handler for `GET /ws` — upgrades to WebSocket with auth. +/// +/// Authenticates at upgrade time via cookie session. Rejects with 401 +/// if unauthenticated. Mirrors `register_websocket_actions.ts`'s +/// `require_auth` middleware. +/// +/// On upgrade, registers the connection with auth metadata for targeted +/// socket revocation. +pub async fn ws_handler( + State(app): State>, + headers: HeaderMap, + ws: WebSocketUpgrade, +) -> Response { + // Resolve auth from headers (daemon token → cookie → bearer) + let resolved = resolve_auth_from_headers( + &headers, + &app.keyring, + &app.db_pool, + app.daemon_token_state.as_ref(), + ) + .await; + + let Some(resolved) = resolved else { + return (StatusCode::UNAUTHORIZED, "unauthenticated").into_response(); + }; + + ws.on_upgrade(move |socket| handle_connection(socket, app, resolved)) +} + +async fn handle_connection(socket: WebSocket, app: Arc, resolved: ResolvedAuth) { + let (mut tx, mut rx) = socket.split(); + + // Register connection with auth metadata for targeted revocation. + // Bearer token connections pass None for token_hash — they're revocable + // only via account-level revocation (matching Deno behavior). + let (notify_tx, mut notify_rx) = tokio::sync::mpsc::unbounded_channel::(); + let account_id = Some(resolved.context.account.id); + let conn_id = app.add_connection(notify_tx, resolved.token_hash, account_id); + let auth_context = resolved.context; + let credential_type = resolved.credential_type; + + loop { + tokio::select! { + // Server-initiated message (broadcast or send_to) + Some(msg) = notify_rx.recv() => { + if tx.send(Message::Text(msg.into())).await.is_err() { + break; + } + } + // Client message + msg = rx.next() => { + let Some(Ok(msg)) = msg else { break }; + let text = match msg { + Message::Text(t) => t, + Message::Close(_) => break, + _ => continue, + }; + + // 1. Parse JSON — on failure send full envelope (matching Deno) + let Ok(value) = serde_json::from_str::(&text) else { + tracing::debug!("ws: JSON parse error"); + if let Ok(json) = + serde_json::to_string(&rpc::error_response(Value::Null, rpc::parse_error())) + && tx.send(Message::Text(json.into())).await.is_err() + { + break; + } + continue; + }; + + tracing::debug!( + method = value.get("method").and_then(|v| v.as_str()).unwrap_or(""), + "ws message" + ); + + // 2. Classify, check per-action auth, then dispatch + let json = match rpc::classify(&value) { + Classified::Request { method, id, params } => { + let spec_auth = method_auth(method); + if let Some(auth_error) = check_action_auth(spec_auth, Some(&auth_context), Some(credential_type)) { + serde_json::to_string(&rpc::error_response(id, auth_error)) + } else { + let ctx = Ctx { + app: &app, + app_arc: Arc::clone(&app), + request_id: &id, + auth: Some(&auth_context), + connection_id: Some(conn_id), + }; + match handlers::dispatch(method, params, &ctx).await { + Ok(result) => serde_json::to_string(&rpc::success_response(id, result)), + Err(error) => serde_json::to_string(&rpc::error_response(id, error)), + } + } + } + Classified::Invalid { id, error } => { + serde_json::to_string(&rpc::error_response(id, error)) + } + Classified::Notification => continue, + }; + + // 3. Send response + if let Ok(json) = json + && tx.send(Message::Text(json.into())).await.is_err() + { + break; + } + } + } + } + + // Disconnect: clean up connection tracking + app.remove_connection(conn_id); + tracing::debug!(conn_id, "ws: connection closed"); +} diff --git a/deno.json b/deno.json new file mode 100644 index 000000000..ab217c7a4 --- /dev/null +++ b/deno.json @@ -0,0 +1,38 @@ +{ + "nodeModulesDir": "manual", + "unstable": ["sloppy-imports"], + "exclude": ["**/*.test.ts", "**/*.svelte.ts", "**/*.gen.ts", "src/test/"], + "tasks": { + "dev": "deno run --allow-all scripts/dev.ts", + "dev:setup": "deno run --allow-read --allow-write --allow-env --allow-run=openssl scripts/dev_setup.ts", + "prod:setup": "deno run --allow-read --allow-write --allow-env --allow-run=openssl scripts/prod_setup.ts", + "dev:start": "NODE_ENV=development deno run --allow-all --env=.env.development src/lib/zzz/main.ts daemon start", + "install": "gro build && mkdir -p ~/.zzz/bin && cp dist_cli/zzz ~/.zzz/bin/zzz", + "check": "deno check src/lib/zzz/**/*.ts", + "test": "gro test && deno task test:integration", + "test:integration": "deno run --allow-net --allow-run --allow-read --allow-write --allow-env test/integration/run.ts" + }, + "imports": { + "@std/": "jsr:@std/", + "esm-env": "npm:esm-env@^1", + "hono": "npm:hono@^4", + "svelte": "npm:svelte@^5", + "zod": "npm:zod@^4", + "@electric-sql/pglite": "npm:@electric-sql/pglite@^0.3", + "@fuzdev/blake3_wasm": "npm:@fuzdev/blake3_wasm@^0.1.1", + "@fuzdev/fuz_app/": "npm:/@fuzdev/fuz_app@^0.12.0/", + "@fuzdev/fuz_util/": "npm:/@fuzdev/fuz_util@^0.55.0/", + "@fuzdev/gro/": "npm:/@fuzdev/gro@^0.197.3/", + "date-fns": "npm:date-fns@^4", + "ollama": "npm:ollama@^0.6", + "@anthropic-ai/sdk": "npm:@anthropic-ai/sdk@^0.71.2", + "openai": "npm:openai@^6.10.0", + "@google/generative-ai": "npm:@google/generative-ai@^0.24.1" + }, + "fmt": { + "useTabs": true, + "lineWidth": 100, + "indentWidth": 2, + "singleQuote": true + } +} diff --git a/deno.lock b/deno.lock new file mode 100644 index 000000000..02bcaea2d --- /dev/null +++ b/deno.lock @@ -0,0 +1,2310 @@ +{ + "version": "5", + "specifiers": { + "npm:@anthropic-ai/sdk@~0.71.2": "0.71.2_zod@4.3.6", + "npm:@changesets/changelog-git@~0.2.1": "0.2.1", + "npm:@electric-sql/pglite@0.3": "0.3.16", + "npm:@fuzdev/fuz_code@~0.45.1": "0.45.1_@fuzdev+fuz_css@0.58.0__@fuzdev+blake3_wasm@0.1.1__@fuzdev+fuz_util@0.55.0___@fuzdev+blake3_wasm@0.1.1___@types+estree@1.0.8___@types+node@24.12.0___esm-env@1.2.2___svelte@5.55.2___zod@4.3.6__@fuzdev+gro@0.197.3___@fuzdev+blake3_wasm@0.1.1___@fuzdev+fuz_util@0.55.0____@fuzdev+blake3_wasm@0.1.1____@types+estree@1.0.8____@types+node@24.12.0____esm-env@1.2.2____svelte@5.55.2____zod@4.3.6___@sveltejs+kit@2.55.0____@sveltejs+vite-plugin-svelte@6.2.4_____svelte@5.55.2_____vite@7.3.1______@types+node@24.12.0____svelte@5.55.2____typescript@5.9.3____vite@7.3.1_____@types+node@24.12.0___esbuild@0.27.7___svelte@5.55.2___typescript@5.9.3___vitest@4.1.0____@types+node@24.12.0____jsdom@27.4.0____vite@7.3.1_____@types+node@24.12.0___@types+estree@1.0.8___@types+node@24.12.0__@sveltejs+acorn-typescript@1.0.9___acorn@8.16.0__@webref+css@8.4.1___css-tree@3.2.1__zimmerframe@1.1.4__zod@4.3.6__@sveltejs+kit@2.55.0___@sveltejs+vite-plugin-svelte@6.2.4____svelte@5.55.2____vite@7.3.1_____@types+node@24.12.0___svelte@5.55.2___typescript@5.9.3___vite@7.3.1____@types+node@24.12.0__@types+estree@1.0.8__@types+node@24.12.0__esbuild@0.27.7__esm-env@1.2.2__svelte@5.55.2__typescript@5.9.3__vitest@4.1.0___@types+node@24.12.0___jsdom@27.4.0___vite@7.3.1____@types+node@24.12.0_@fuzdev+fuz_util@0.55.0__@fuzdev+blake3_wasm@0.1.1__@types+estree@1.0.8__@types+node@24.12.0__esm-env@1.2.2__svelte@5.55.2__zod@4.3.6_esm-env@1.2.2_magic-string@0.30.21_svelte@5.55.2_zimmerframe@1.1.4_@fuzdev+blake3_wasm@0.1.1_@fuzdev+gro@0.197.3__@fuzdev+blake3_wasm@0.1.1__@fuzdev+fuz_util@0.55.0___@fuzdev+blake3_wasm@0.1.1___@types+estree@1.0.8___@types+node@24.12.0___esm-env@1.2.2___svelte@5.55.2___zod@4.3.6__@sveltejs+kit@2.55.0___@sveltejs+vite-plugin-svelte@6.2.4____svelte@5.55.2____vite@7.3.1_____@types+node@24.12.0___svelte@5.55.2___typescript@5.9.3___vite@7.3.1____@types+node@24.12.0__esbuild@0.27.7__svelte@5.55.2__typescript@5.9.3__vitest@4.1.0___@types+node@24.12.0___jsdom@27.4.0___vite@7.3.1____@types+node@24.12.0__@types+estree@1.0.8__@types+node@24.12.0_@sveltejs+acorn-typescript@1.0.9__acorn@8.16.0_@sveltejs+kit@2.55.0__@sveltejs+vite-plugin-svelte@6.2.4___svelte@5.55.2___vite@7.3.1____@types+node@24.12.0__svelte@5.55.2__typescript@5.9.3__vite@7.3.1___@types+node@24.12.0_@types+estree@1.0.8_@types+node@24.12.0_@webref+css@8.4.1__css-tree@3.2.1_esbuild@0.27.7_typescript@5.9.3_vitest@4.1.0__@types+node@24.12.0__jsdom@27.4.0__vite@7.3.1___@types+node@24.12.0_zod@4.3.6", + "npm:@fuzdev/fuz_css@0.58": "0.58.0_@fuzdev+blake3_wasm@0.1.1_@fuzdev+fuz_util@0.55.0__@fuzdev+blake3_wasm@0.1.1__@types+estree@1.0.8__@types+node@24.12.0__esm-env@1.2.2__svelte@5.55.2__zod@4.3.6_@fuzdev+gro@0.197.3__@fuzdev+blake3_wasm@0.1.1__@fuzdev+fuz_util@0.55.0___@fuzdev+blake3_wasm@0.1.1___@types+estree@1.0.8___@types+node@24.12.0___esm-env@1.2.2___svelte@5.55.2___zod@4.3.6__@sveltejs+kit@2.55.0___@sveltejs+vite-plugin-svelte@6.2.4____svelte@5.55.2____vite@7.3.1_____@types+node@24.12.0___svelte@5.55.2___typescript@5.9.3___vite@7.3.1____@types+node@24.12.0__esbuild@0.27.7__svelte@5.55.2__typescript@5.9.3__vitest@4.1.0___@types+node@24.12.0___jsdom@27.4.0___vite@7.3.1____@types+node@24.12.0__@types+estree@1.0.8__@types+node@24.12.0_@sveltejs+acorn-typescript@1.0.9__acorn@8.16.0_@webref+css@8.4.1__css-tree@3.2.1_zimmerframe@1.1.4_zod@4.3.6_@sveltejs+kit@2.55.0__@sveltejs+vite-plugin-svelte@6.2.4___svelte@5.55.2___vite@7.3.1____@types+node@24.12.0__svelte@5.55.2__typescript@5.9.3__vite@7.3.1___@types+node@24.12.0_@types+estree@1.0.8_@types+node@24.12.0_esbuild@0.27.7_esm-env@1.2.2_svelte@5.55.2_typescript@5.9.3_vitest@4.1.0__@types+node@24.12.0__jsdom@27.4.0__vite@7.3.1___@types+node@24.12.0", + "npm:@fuzdev/fuz_util@0.55": "0.55.0_@fuzdev+blake3_wasm@0.1.1_@types+estree@1.0.8_@types+node@24.12.0_esm-env@1.2.2_svelte@5.55.2_zod@4.3.6", + "npm:@google/generative-ai@~0.24.1": "0.24.1", + "npm:@jridgewell/trace-mapping@~0.3.31": "0.3.31", + "npm:@ryanatkn/eslint-config@~0.10.1": "0.10.1_eslint@9.39.4_eslint-plugin-svelte@3.15.2__eslint@9.39.4__svelte@5.55.2_svelte@5.55.2_typescript@5.9.3_typescript-eslint@8.57.1__eslint@9.39.4__typescript@5.9.3", + "npm:@sveltejs/acorn-typescript@^1.0.9": "1.0.9_acorn@8.16.0", + "npm:@sveltejs/adapter-static@^3.0.10": "3.0.10_@sveltejs+kit@2.55.0__@sveltejs+vite-plugin-svelte@6.2.4___svelte@5.55.2___vite@7.3.1____@types+node@24.12.0__svelte@5.55.2__typescript@5.9.3__vite@7.3.1___@types+node@24.12.0_svelte@5.55.2", + "npm:@sveltejs/vite-plugin-svelte@^6.2.4": "6.2.4_svelte@5.55.2_vite@7.3.1__@types+node@24.12.0", + "npm:@types/deno@^2.5.0": "2.5.0", + "npm:@types/estree@^1.0.8": "1.0.8", + "npm:@types/node@^24.10.1": "24.12.0", + "npm:@webref/css@^8.2.0": "8.4.1_css-tree@3.2.1", + "npm:@xterm/xterm@6": "6.0.0", + "npm:date-fns@^4.1.0": "4.1.0", + "npm:eslint-plugin-svelte@^3.13.1": "3.15.2_eslint@9.39.4_svelte@5.55.2", + "npm:eslint@^9.39.1": "9.39.4", + "npm:esm-env@1": "1.2.2", + "npm:esm-env@^1.2.2": "1.2.2", + "npm:hono@4": "4.12.8", + "npm:hono@^4.12.7": "4.12.8", + "npm:jsdom@^27.2.0": "27.4.0", + "npm:magic-string@~0.30.21": "0.30.21", + "npm:ollama@0.6": "0.6.3", + "npm:ollama@~0.6.3": "0.6.3", + "npm:openai@^6.10.0": "6.29.0_zod@4.3.6", + "npm:prettier-plugin-svelte@^3.4.1": "3.5.1_prettier@3.8.1_svelte@5.55.2", + "npm:prettier@^3.7.4": "3.8.1", + "npm:svelte-check@^4.4.5": "4.4.6_svelte@5.55.2_typescript@5.9.3", + "npm:svelte2tsx@~0.7.52": "0.7.52_svelte@5.55.2_typescript@5.9.3", + "npm:tslib@^2.8.1": "2.8.1", + "npm:typescript-eslint@^8.48.1": "8.57.1_eslint@9.39.4_typescript@5.9.3", + "npm:typescript@^5.9.3": "5.9.3", + "npm:vite@^7.3.1": "7.3.1_@types+node@24.12.0", + "npm:vitest@^4.0.15": "4.1.0_@types+node@24.12.0_jsdom@27.4.0_vite@7.3.1__@types+node@24.12.0", + "npm:zimmerframe@^1.1.4": "1.1.4", + "npm:zod@4": "4.3.6", + "npm:zod@^4.3.6": "4.3.6" + }, + "npm": { + "@acemir/cssom@0.9.31": { + "integrity": "sha512-ZnR3GSaH+/vJ0YlHau21FjfLYjMpYVIzTD8M8vIEQvIGxeOXyXdzCI140rrCY862p/C/BbzWsjc1dgnM9mkoTA==" + }, + "@anthropic-ai/sdk@0.71.2_zod@4.3.6": { + "integrity": "sha512-TGNDEUuEstk/DKu0/TflXAEt+p+p/WhTlFzEnoosvbaDU2LTjm42igSdlL0VijrKpWejtOKxX0b8A7uc+XiSAQ==", + "dependencies": [ + "json-schema-to-ts", + "zod" + ], + "optionalPeers": [ + "zod" + ], + "bin": true + }, + "@asamuzakjp/css-color@4.1.2": { + "integrity": "sha512-NfBUvBaYgKIuq6E/RBLY1m0IohzNHAYyaJGuTK79Z23uNwmz2jl1mPsC5ZxCCxylinKhT1Amn5oNTlx1wN8cQg==", + "dependencies": [ + "@csstools/css-calc", + "@csstools/css-color-parser", + "@csstools/css-parser-algorithms", + "@csstools/css-tokenizer", + "lru-cache" + ] + }, + "@asamuzakjp/dom-selector@6.8.1": { + "integrity": "sha512-MvRz1nCqW0fsy8Qz4dnLIvhOlMzqDVBabZx6lH+YywFDdjXhMY37SmpV1XFX3JzG5GWHn63j6HX6QPr3lZXHvQ==", + "dependencies": [ + "@asamuzakjp/nwsapi", + "bidi-js", + "css-tree", + "is-potential-custom-element-name", + "lru-cache" + ] + }, + "@asamuzakjp/nwsapi@2.3.9": { + "integrity": "sha512-n8GuYSrI9bF7FFZ/SjhwevlHc8xaVlb/7HmHelnc/PZXBD2ZR49NnN9sMMuDdEGPeeRQ5d0hqlSlEpgCX3Wl0Q==" + }, + "@babel/runtime@7.28.6": { + "integrity": "sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA==" + }, + "@changesets/changelog-git@0.2.1": { + "integrity": "sha512-x/xEleCFLH28c3bQeQIyeZf8lFXyDFVn1SgcBiR2Tw/r4IAWlk1fzxCEZ6NxQAjF2Nwtczoen3OA2qR+UawQ8Q==", + "dependencies": [ + "@changesets/types" + ] + }, + "@changesets/types@6.1.0": { + "integrity": "sha512-rKQcJ+o1nKNgeoYRHKOS07tAMNd3YSN0uHaJOZYjBAgxfV7TUE7JE+z4BzZdQwb5hKaYbayKN5KrYV7ODb2rAA==" + }, + "@csstools/color-helpers@6.0.2": { + "integrity": "sha512-LMGQLS9EuADloEFkcTBR3BwV/CGHV7zyDxVRtVDTwdI2Ca4it0CCVTT9wCkxSgokjE5Ho41hEPgb8OEUwoXr6Q==" + }, + "@csstools/css-calc@3.1.1_@csstools+css-parser-algorithms@4.0.0__@csstools+css-tokenizer@4.0.0_@csstools+css-tokenizer@4.0.0": { + "integrity": "sha512-HJ26Z/vmsZQqs/o3a6bgKslXGFAungXGbinULZO3eMsOyNJHeBBZfup5FiZInOghgoM4Hwnmw+OgbJCNg1wwUQ==", + "dependencies": [ + "@csstools/css-parser-algorithms", + "@csstools/css-tokenizer" + ] + }, + "@csstools/css-color-parser@4.0.2_@csstools+css-parser-algorithms@4.0.0__@csstools+css-tokenizer@4.0.0_@csstools+css-tokenizer@4.0.0": { + "integrity": "sha512-0GEfbBLmTFf0dJlpsNU7zwxRIH0/BGEMuXLTCvFYxuL1tNhqzTbtnFICyJLTNK4a+RechKP75e7w42ClXSnJQw==", + "dependencies": [ + "@csstools/color-helpers", + "@csstools/css-calc", + "@csstools/css-parser-algorithms", + "@csstools/css-tokenizer" + ] + }, + "@csstools/css-parser-algorithms@4.0.0_@csstools+css-tokenizer@4.0.0": { + "integrity": "sha512-+B87qS7fIG3L5h3qwJ/IFbjoVoOe/bpOdh9hAjXbvx0o8ImEmUsGXN0inFOnk2ChCFgqkkGFQ+TpM5rbhkKe4w==", + "dependencies": [ + "@csstools/css-tokenizer" + ] + }, + "@csstools/css-syntax-patches-for-csstree@1.1.1_css-tree@3.2.1": { + "integrity": "sha512-BvqN0AMWNAnLk9G8jnUT77D+mUbY/H2b3uDTvg2isJkHaOufUE2R3AOwxWo7VBQKT1lOdwdvorddo2B/lk64+w==", + "dependencies": [ + "css-tree" + ], + "optionalPeers": [ + "css-tree" + ] + }, + "@csstools/css-tokenizer@4.0.0": { + "integrity": "sha512-QxULHAm7cNu72w97JUNCBFODFaXpbDg+dP8b/oWFAZ2MTRppA3U00Y2L1HqaS4J6yBqxwa/Y3nMBaxVKbB/NsA==" + }, + "@electric-sql/pglite@0.3.16": { + "integrity": "sha512-mZkZfOd9OqTMHsK+1cje8OSzfAQcpD7JmILXTl5ahdempjUDdmg4euf1biDex5/LfQIDJ3gvCu6qDgdnDxfJmA==" + }, + "@emnapi/core@1.9.0": { + "integrity": "sha512-0DQ98G9ZQZOxfUcQn1waV2yS8aWdZ6kJMbYCJB3oUBecjWYO1fqJ+a1DRfPF3O5JEkwqwP1A9QEN/9mYm2Yd0w==", + "dependencies": [ + "@emnapi/wasi-threads", + "tslib" + ] + }, + "@emnapi/runtime@1.9.0": { + "integrity": "sha512-QN75eB0IH2ywSpRpNddCRfQIhmJYBCJ1x5Lb3IscKAL8bMnVAKnRg8dCoXbHzVLLH7P38N2Z3mtulB7W0J0FKw==", + "dependencies": [ + "tslib" + ] + }, + "@emnapi/wasi-threads@1.2.0": { + "integrity": "sha512-N10dEJNSsUx41Z6pZsXU8FjPjpBEplgH24sfkmITrBED1/U2Esum9F3lfLrMjKHHjmi557zQn7kR9R+XWXu5Rg==", + "dependencies": [ + "tslib" + ] + }, + "@esbuild/aix-ppc64@0.27.7": { + "integrity": "sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==", + "os": ["aix"], + "cpu": ["ppc64"] + }, + "@esbuild/android-arm64@0.27.7": { + "integrity": "sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==", + "os": ["android"], + "cpu": ["arm64"] + }, + "@esbuild/android-arm@0.27.7": { + "integrity": "sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==", + "os": ["android"], + "cpu": ["arm"] + }, + "@esbuild/android-x64@0.27.7": { + "integrity": "sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==", + "os": ["android"], + "cpu": ["x64"] + }, + "@esbuild/darwin-arm64@0.27.7": { + "integrity": "sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==", + "os": ["darwin"], + "cpu": ["arm64"] + }, + "@esbuild/darwin-x64@0.27.7": { + "integrity": "sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==", + "os": ["darwin"], + "cpu": ["x64"] + }, + "@esbuild/freebsd-arm64@0.27.7": { + "integrity": "sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==", + "os": ["freebsd"], + "cpu": ["arm64"] + }, + "@esbuild/freebsd-x64@0.27.7": { + "integrity": "sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==", + "os": ["freebsd"], + "cpu": ["x64"] + }, + "@esbuild/linux-arm64@0.27.7": { + "integrity": "sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==", + "os": ["linux"], + "cpu": ["arm64"] + }, + "@esbuild/linux-arm@0.27.7": { + "integrity": "sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==", + "os": ["linux"], + "cpu": ["arm"] + }, + "@esbuild/linux-ia32@0.27.7": { + "integrity": "sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==", + "os": ["linux"], + "cpu": ["ia32"] + }, + "@esbuild/linux-loong64@0.27.7": { + "integrity": "sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==", + "os": ["linux"], + "cpu": ["loong64"] + }, + "@esbuild/linux-mips64el@0.27.7": { + "integrity": "sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==", + "os": ["linux"], + "cpu": ["mips64el"] + }, + "@esbuild/linux-ppc64@0.27.7": { + "integrity": "sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==", + "os": ["linux"], + "cpu": ["ppc64"] + }, + "@esbuild/linux-riscv64@0.27.7": { + "integrity": "sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==", + "os": ["linux"], + "cpu": ["riscv64"] + }, + "@esbuild/linux-s390x@0.27.7": { + "integrity": "sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==", + "os": ["linux"], + "cpu": ["s390x"] + }, + "@esbuild/linux-x64@0.27.7": { + "integrity": "sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==", + "os": ["linux"], + "cpu": ["x64"] + }, + "@esbuild/netbsd-arm64@0.27.7": { + "integrity": "sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==", + "os": ["netbsd"], + "cpu": ["arm64"] + }, + "@esbuild/netbsd-x64@0.27.7": { + "integrity": "sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==", + "os": ["netbsd"], + "cpu": ["x64"] + }, + "@esbuild/openbsd-arm64@0.27.7": { + "integrity": "sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==", + "os": ["openbsd"], + "cpu": ["arm64"] + }, + "@esbuild/openbsd-x64@0.27.7": { + "integrity": "sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==", + "os": ["openbsd"], + "cpu": ["x64"] + }, + "@esbuild/openharmony-arm64@0.27.7": { + "integrity": "sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==", + "os": ["openharmony"], + "cpu": ["arm64"] + }, + "@esbuild/sunos-x64@0.27.7": { + "integrity": "sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==", + "os": ["sunos"], + "cpu": ["x64"] + }, + "@esbuild/win32-arm64@0.27.7": { + "integrity": "sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==", + "os": ["win32"], + "cpu": ["arm64"] + }, + "@esbuild/win32-ia32@0.27.7": { + "integrity": "sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==", + "os": ["win32"], + "cpu": ["ia32"] + }, + "@esbuild/win32-x64@0.27.7": { + "integrity": "sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==", + "os": ["win32"], + "cpu": ["x64"] + }, + "@eslint-community/eslint-utils@4.9.1_eslint@9.39.4": { + "integrity": "sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==", + "dependencies": [ + "eslint", + "eslint-visitor-keys@3.4.3" + ] + }, + "@eslint-community/regexpp@4.12.2": { + "integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==" + }, + "@eslint/config-array@0.21.2": { + "integrity": "sha512-nJl2KGTlrf9GjLimgIru+V/mzgSK0ABCDQRvxw5BjURL7WfH5uoWmizbH7QB6MmnMBd8cIC9uceWnezL1VZWWw==", + "dependencies": [ + "@eslint/object-schema", + "debug", + "minimatch@3.1.5" + ] + }, + "@eslint/config-helpers@0.4.2": { + "integrity": "sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==", + "dependencies": [ + "@eslint/core" + ] + }, + "@eslint/core@0.17.0": { + "integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==", + "dependencies": [ + "@types/json-schema" + ] + }, + "@eslint/eslintrc@3.3.5": { + "integrity": "sha512-4IlJx0X0qftVsN5E+/vGujTRIFtwuLbNsVUe7TO6zYPDR1O6nFwvwhIKEKSrl6dZchmYBITazxKoUYOjdtjlRg==", + "dependencies": [ + "ajv", + "debug", + "espree", + "globals@14.0.0", + "ignore@5.3.2", + "import-fresh", + "js-yaml", + "minimatch@3.1.5", + "strip-json-comments" + ] + }, + "@eslint/js@9.39.4": { + "integrity": "sha512-nE7DEIchvtiFTwBw4Lfbu59PG+kCofhjsKaCWzxTpt4lfRjRMqG6uMBzKXuEcyXhOHoUp9riAm7/aWYGhXZ9cw==" + }, + "@eslint/object-schema@2.1.7": { + "integrity": "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==" + }, + "@eslint/plugin-kit@0.4.1": { + "integrity": "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==", + "dependencies": [ + "@eslint/core", + "levn" + ] + }, + "@exodus/bytes@1.15.0": { + "integrity": "sha512-UY0nlA+feH81UGSHv92sLEPLCeZFjXOuHhrIo0HQydScuQc8s0A7kL/UdgwgDq8g8ilksmuoF35YVTNphV2aBQ==" + }, + "@fuzdev/blake3_wasm@0.1.1": { + "integrity": "sha512-JikFOouJEVLKJvsEQ7+fRdo3GElL4nmu2sV8rg+xu2bv+BAMk+GvoO3TOSPYX9fdHeXJ7U4N0IdIP/mNh7WNfw==" + }, + "@fuzdev/fuz_app@0.3.2_@electric-sql+pglite@0.3.16_@fuzdev+blake3_wasm@0.1.1_@fuzdev+fuz_util@0.55.0__@fuzdev+blake3_wasm@0.1.1__@types+estree@1.0.8__@types+node@24.12.0__esm-env@1.2.2__svelte@5.55.2__zod@4.3.6_@node-rs+argon2@2.0.2_@sveltejs+kit@2.55.0__@sveltejs+vite-plugin-svelte@6.2.4___svelte@5.55.2___vite@7.3.1____@types+node@24.12.0__svelte@5.55.2__typescript@5.9.3__vite@7.3.1___@types+node@24.12.0_hono@4.12.8_svelte@5.55.2_zod@4.3.6_@types+estree@1.0.8_@types+node@24.12.0_esm-env@1.2.2": { + "integrity": "sha512-FN9EknZ9r+nJePVC83ATyF4V5q6teM7jEowY18eSV1EUqG35UMZlVW8XZHfSjH8c8rr13D1APJ0ykONY7jLcWg==", + "dependencies": [ + "@electric-sql/pglite", + "@fuzdev/blake3_wasm", + "@fuzdev/fuz_util@0.55.0_@fuzdev+blake3_wasm@0.1.1_@types+estree@1.0.8_@types+node@24.12.0_esm-env@1.2.2_svelte@5.55.2_zod@4.3.6", + "@node-rs/argon2", + "@sveltejs/kit", + "hono", + "svelte", + "zod" + ], + "optionalPeers": [ + "@electric-sql/pglite" + ] + }, + "@fuzdev/fuz_code@0.45.1_@fuzdev+fuz_css@0.58.0__@fuzdev+blake3_wasm@0.1.1__@fuzdev+fuz_util@0.55.0___@fuzdev+blake3_wasm@0.1.1___@types+estree@1.0.8___@types+node@24.12.0___esm-env@1.2.2___svelte@5.55.2___zod@4.3.6__@fuzdev+gro@0.197.3___@fuzdev+blake3_wasm@0.1.1___@fuzdev+fuz_util@0.55.0____@fuzdev+blake3_wasm@0.1.1____@types+estree@1.0.8____@types+node@24.12.0____esm-env@1.2.2____svelte@5.55.2____zod@4.3.6___@sveltejs+kit@2.55.0____@sveltejs+vite-plugin-svelte@6.2.4_____svelte@5.55.2_____vite@7.3.1______@types+node@24.12.0____svelte@5.55.2____typescript@5.9.3____vite@7.3.1_____@types+node@24.12.0___esbuild@0.27.7___svelte@5.55.2___typescript@5.9.3___vitest@4.1.0____@types+node@24.12.0____jsdom@27.4.0____vite@7.3.1_____@types+node@24.12.0___@types+estree@1.0.8___@types+node@24.12.0__@sveltejs+acorn-typescript@1.0.9___acorn@8.16.0__@webref+css@8.4.1___css-tree@3.2.1__zimmerframe@1.1.4__zod@4.3.6__@sveltejs+kit@2.55.0___@sveltejs+vite-plugin-svelte@6.2.4____svelte@5.55.2____vite@7.3.1_____@types+node@24.12.0___svelte@5.55.2___typescript@5.9.3___vite@7.3.1____@types+node@24.12.0__@types+estree@1.0.8__@types+node@24.12.0__esbuild@0.27.7__esm-env@1.2.2__svelte@5.55.2__typescript@5.9.3__vitest@4.1.0___@types+node@24.12.0___jsdom@27.4.0___vite@7.3.1____@types+node@24.12.0_@fuzdev+fuz_util@0.55.0__@fuzdev+blake3_wasm@0.1.1__@types+estree@1.0.8__@types+node@24.12.0__esm-env@1.2.2__svelte@5.55.2__zod@4.3.6_esm-env@1.2.2_magic-string@0.30.21_svelte@5.55.2_zimmerframe@1.1.4_@fuzdev+blake3_wasm@0.1.1_@fuzdev+gro@0.197.3__@fuzdev+blake3_wasm@0.1.1__@fuzdev+fuz_util@0.55.0___@fuzdev+blake3_wasm@0.1.1___@types+estree@1.0.8___@types+node@24.12.0___esm-env@1.2.2___svelte@5.55.2___zod@4.3.6__@sveltejs+kit@2.55.0___@sveltejs+vite-plugin-svelte@6.2.4____svelte@5.55.2____vite@7.3.1_____@types+node@24.12.0___svelte@5.55.2___typescript@5.9.3___vite@7.3.1____@types+node@24.12.0__esbuild@0.27.7__svelte@5.55.2__typescript@5.9.3__vitest@4.1.0___@types+node@24.12.0___jsdom@27.4.0___vite@7.3.1____@types+node@24.12.0__@types+estree@1.0.8__@types+node@24.12.0_@sveltejs+acorn-typescript@1.0.9__acorn@8.16.0_@sveltejs+kit@2.55.0__@sveltejs+vite-plugin-svelte@6.2.4___svelte@5.55.2___vite@7.3.1____@types+node@24.12.0__svelte@5.55.2__typescript@5.9.3__vite@7.3.1___@types+node@24.12.0_@types+estree@1.0.8_@types+node@24.12.0_@webref+css@8.4.1__css-tree@3.2.1_esbuild@0.27.7_typescript@5.9.3_vitest@4.1.0__@types+node@24.12.0__jsdom@27.4.0__vite@7.3.1___@types+node@24.12.0_zod@4.3.6": { + "integrity": "sha512-aVWWJHJ3U/bV9ZqooBuZ1XQrFgKdbSgRgs4NQOXDHl20JmmoR0jf7BkxQM/lxhtT/WU5kFJhiaGFYZCSmSgUuw==", + "dependencies": [ + "@fuzdev/fuz_css", + "@fuzdev/fuz_util@0.55.0_@fuzdev+blake3_wasm@0.1.1_@types+estree@1.0.8_@types+node@24.12.0_esm-env@1.2.2_svelte@5.55.2_zod@4.3.6", + "esm-env", + "magic-string", + "svelte", + "zimmerframe" + ], + "optionalPeers": [ + "@fuzdev/fuz_css", + "@fuzdev/fuz_util@0.55.0_@fuzdev+blake3_wasm@0.1.1_@types+estree@1.0.8_@types+node@24.12.0_esm-env@1.2.2_svelte@5.55.2_zod@4.3.6", + "magic-string", + "svelte", + "zimmerframe" + ] + }, + "@fuzdev/fuz_css@0.58.0_@fuzdev+blake3_wasm@0.1.1_@fuzdev+fuz_util@0.55.0__@fuzdev+blake3_wasm@0.1.1__@types+estree@1.0.8__@types+node@24.12.0__esm-env@1.2.2__svelte@5.55.2__zod@4.3.6_@fuzdev+gro@0.197.3__@fuzdev+blake3_wasm@0.1.1__@fuzdev+fuz_util@0.55.0___@fuzdev+blake3_wasm@0.1.1___@types+estree@1.0.8___@types+node@24.12.0___esm-env@1.2.2___svelte@5.55.2___zod@4.3.6__@sveltejs+kit@2.55.0___@sveltejs+vite-plugin-svelte@6.2.4____svelte@5.55.2____vite@7.3.1_____@types+node@24.12.0___svelte@5.55.2___typescript@5.9.3___vite@7.3.1____@types+node@24.12.0__esbuild@0.27.7__svelte@5.55.2__typescript@5.9.3__vitest@4.1.0___@types+node@24.12.0___jsdom@27.4.0___vite@7.3.1____@types+node@24.12.0__@types+estree@1.0.8__@types+node@24.12.0_@sveltejs+acorn-typescript@1.0.9__acorn@8.16.0_@webref+css@8.4.1__css-tree@3.2.1_zimmerframe@1.1.4_zod@4.3.6_@sveltejs+kit@2.55.0__@sveltejs+vite-plugin-svelte@6.2.4___svelte@5.55.2___vite@7.3.1____@types+node@24.12.0__svelte@5.55.2__typescript@5.9.3__vite@7.3.1___@types+node@24.12.0_@types+estree@1.0.8_@types+node@24.12.0_esbuild@0.27.7_esm-env@1.2.2_svelte@5.55.2_typescript@5.9.3_vitest@4.1.0__@types+node@24.12.0__jsdom@27.4.0__vite@7.3.1___@types+node@24.12.0": { + "integrity": "sha512-+66ZulIMyZj6xdh61kpTznZaSucpMtUggJOQjEmzdLYAY74GUFsda6801J6YgQhlqJ3QX7wzP8z7+lw8NMcMUQ==", + "dependencies": [ + "@fuzdev/blake3_wasm", + "@fuzdev/fuz_util@0.55.0_@fuzdev+blake3_wasm@0.1.1_@types+estree@1.0.8_@types+node@24.12.0_esm-env@1.2.2_svelte@5.55.2_zod@4.3.6", + "@fuzdev/gro", + "@sveltejs/acorn-typescript", + "@webref/css", + "zimmerframe", + "zod" + ], + "optionalPeers": [ + "@fuzdev/blake3_wasm", + "@fuzdev/fuz_util@0.55.0_@fuzdev+blake3_wasm@0.1.1_@types+estree@1.0.8_@types+node@24.12.0_esm-env@1.2.2_svelte@5.55.2_zod@4.3.6", + "@fuzdev/gro", + "@sveltejs/acorn-typescript", + "@webref/css", + "zimmerframe", + "zod" + ] + }, + "@fuzdev/fuz_ui@0.191.4_@fuzdev+fuz_code@0.45.1__@fuzdev+fuz_css@0.58.0___@fuzdev+blake3_wasm@0.1.1___@fuzdev+fuz_util@0.55.0____@fuzdev+blake3_wasm@0.1.1____@types+estree@1.0.8____@types+node@24.12.0____esm-env@1.2.2____svelte@5.55.2____zod@4.3.6___@fuzdev+gro@0.197.3____@fuzdev+blake3_wasm@0.1.1____@fuzdev+fuz_util@0.55.0_____@fuzdev+blake3_wasm@0.1.1_____@types+estree@1.0.8_____@types+node@24.12.0_____esm-env@1.2.2_____svelte@5.55.2_____zod@4.3.6____@sveltejs+kit@2.55.0_____@sveltejs+vite-plugin-svelte@6.2.4______svelte@5.55.2______vite@7.3.1_______@types+node@24.12.0_____svelte@5.55.2_____typescript@5.9.3_____vite@7.3.1______@types+node@24.12.0____esbuild@0.27.7____svelte@5.55.2____typescript@5.9.3____vitest@4.1.0_____@types+node@24.12.0_____jsdom@27.4.0_____vite@7.3.1______@types+node@24.12.0____@types+estree@1.0.8____@types+node@24.12.0___@sveltejs+acorn-typescript@1.0.9____acorn@8.16.0___@webref+css@8.4.1____css-tree@3.2.1___zimmerframe@1.1.4___zod@4.3.6___@sveltejs+kit@2.55.0____@sveltejs+vite-plugin-svelte@6.2.4_____svelte@5.55.2_____vite@7.3.1______@types+node@24.12.0____svelte@5.55.2____typescript@5.9.3____vite@7.3.1_____@types+node@24.12.0___@types+estree@1.0.8___@types+node@24.12.0___esbuild@0.27.7___esm-env@1.2.2___svelte@5.55.2___typescript@5.9.3___vitest@4.1.0____@types+node@24.12.0____jsdom@27.4.0____vite@7.3.1_____@types+node@24.12.0__@fuzdev+fuz_util@0.55.0___@fuzdev+blake3_wasm@0.1.1___@types+estree@1.0.8___@types+node@24.12.0___esm-env@1.2.2___svelte@5.55.2___zod@4.3.6__esm-env@1.2.2__magic-string@0.30.21__svelte@5.55.2__zimmerframe@1.1.4__@fuzdev+blake3_wasm@0.1.1__@fuzdev+gro@0.197.3___@fuzdev+blake3_wasm@0.1.1___@fuzdev+fuz_util@0.55.0____@fuzdev+blake3_wasm@0.1.1____@types+estree@1.0.8____@types+node@24.12.0____esm-env@1.2.2____svelte@5.55.2____zod@4.3.6___@sveltejs+kit@2.55.0____@sveltejs+vite-plugin-svelte@6.2.4_____svelte@5.55.2_____vite@7.3.1______@types+node@24.12.0____svelte@5.55.2____typescript@5.9.3____vite@7.3.1_____@types+node@24.12.0___esbuild@0.27.7___svelte@5.55.2___typescript@5.9.3___vitest@4.1.0____@types+node@24.12.0____jsdom@27.4.0____vite@7.3.1_____@types+node@24.12.0___@types+estree@1.0.8___@types+node@24.12.0__@sveltejs+acorn-typescript@1.0.9___acorn@8.16.0__@sveltejs+kit@2.55.0___@sveltejs+vite-plugin-svelte@6.2.4____svelte@5.55.2____vite@7.3.1_____@types+node@24.12.0___svelte@5.55.2___typescript@5.9.3___vite@7.3.1____@types+node@24.12.0__@types+estree@1.0.8__@types+node@24.12.0__@webref+css@8.4.1___css-tree@3.2.1__esbuild@0.27.7__typescript@5.9.3__vitest@4.1.0___@types+node@24.12.0___jsdom@27.4.0___vite@7.3.1____@types+node@24.12.0__zod@4.3.6_@fuzdev+fuz_css@0.58.0__@fuzdev+blake3_wasm@0.1.1__@fuzdev+fuz_util@0.55.0___@fuzdev+blake3_wasm@0.1.1___@types+estree@1.0.8___@types+node@24.12.0___esm-env@1.2.2___svelte@5.55.2___zod@4.3.6__@fuzdev+gro@0.197.3___@fuzdev+blake3_wasm@0.1.1___@fuzdev+fuz_util@0.55.0____@fuzdev+blake3_wasm@0.1.1____@types+estree@1.0.8____@types+node@24.12.0____esm-env@1.2.2____svelte@5.55.2____zod@4.3.6___@sveltejs+kit@2.55.0____@sveltejs+vite-plugin-svelte@6.2.4_____svelte@5.55.2_____vite@7.3.1______@types+node@24.12.0____svelte@5.55.2____typescript@5.9.3____vite@7.3.1_____@types+node@24.12.0___esbuild@0.27.7___svelte@5.55.2___typescript@5.9.3___vitest@4.1.0____@types+node@24.12.0____jsdom@27.4.0____vite@7.3.1_____@types+node@24.12.0___@types+estree@1.0.8___@types+node@24.12.0__@sveltejs+acorn-typescript@1.0.9___acorn@8.16.0__@webref+css@8.4.1___css-tree@3.2.1__zimmerframe@1.1.4__zod@4.3.6__@sveltejs+kit@2.55.0___@sveltejs+vite-plugin-svelte@6.2.4____svelte@5.55.2____vite@7.3.1_____@types+node@24.12.0___svelte@5.55.2___typescript@5.9.3___vite@7.3.1____@types+node@24.12.0__@types+estree@1.0.8__@types+node@24.12.0__esbuild@0.27.7__esm-env@1.2.2__svelte@5.55.2__typescript@5.9.3__vitest@4.1.0___@types+node@24.12.0___jsdom@27.4.0___vite@7.3.1____@types+node@24.12.0_@fuzdev+fuz_util@0.55.0__@fuzdev+blake3_wasm@0.1.1__@types+estree@1.0.8__@types+node@24.12.0__esm-env@1.2.2__svelte@5.55.2__zod@4.3.6_@fuzdev+gro@0.197.3__@fuzdev+blake3_wasm@0.1.1__@fuzdev+fuz_util@0.55.0___@fuzdev+blake3_wasm@0.1.1___@types+estree@1.0.8___@types+node@24.12.0___esm-env@1.2.2___svelte@5.55.2___zod@4.3.6__@sveltejs+kit@2.55.0___@sveltejs+vite-plugin-svelte@6.2.4____svelte@5.55.2____vite@7.3.1_____@types+node@24.12.0___svelte@5.55.2___typescript@5.9.3___vite@7.3.1____@types+node@24.12.0__esbuild@0.27.7__svelte@5.55.2__typescript@5.9.3__vitest@4.1.0___@types+node@24.12.0___jsdom@27.4.0___vite@7.3.1____@types+node@24.12.0__@types+estree@1.0.8__@types+node@24.12.0_@jridgewell+trace-mapping@0.3.31_@sveltejs+kit@2.55.0__@sveltejs+vite-plugin-svelte@6.2.4___svelte@5.55.2___vite@7.3.1____@types+node@24.12.0__svelte@5.55.2__typescript@5.9.3__vite@7.3.1___@types+node@24.12.0_@types+estree@1.0.8_esm-env@1.2.2_svelte@5.55.2_svelte2tsx@0.7.52__svelte@5.55.2__typescript@5.9.3_vite@7.3.1__@types+node@24.12.0_zod@4.3.6_@fuzdev+blake3_wasm@0.1.1_@sveltejs+acorn-typescript@1.0.9__acorn@8.16.0_@types+node@24.12.0_@webref+css@8.4.1__css-tree@3.2.1_esbuild@0.27.7_typescript@5.9.3_vitest@4.1.0__@types+node@24.12.0__jsdom@27.4.0__vite@7.3.1___@types+node@24.12.0_zimmerframe@1.1.4": { + "integrity": "sha512-OYF6k1GR2v2wy5BbnYZ6GRGCloS3zQ5y+Nvn1trhN38YdX5HPgBDRWUOUdI9EbHCiMb6t0Ey+dUuEqu97OQFlg==", + "dependencies": [ + "@fuzdev/fuz_code", + "@fuzdev/fuz_css", + "@fuzdev/fuz_util@0.55.0_@fuzdev+blake3_wasm@0.1.1_@types+estree@1.0.8_@types+node@24.12.0_esm-env@1.2.2_svelte@5.55.2_zod@4.3.6", + "@fuzdev/gro", + "@jridgewell/trace-mapping", + "@sveltejs/kit", + "@types/estree", + "esm-env", + "svelte", + "svelte2tsx", + "vite", + "zod" + ], + "optionalPeers": [ + "@fuzdev/fuz_code", + "@fuzdev/fuz_util@0.55.0_@fuzdev+blake3_wasm@0.1.1_@types+estree@1.0.8_@types+node@24.12.0_esm-env@1.2.2_svelte@5.55.2_zod@4.3.6", + "@fuzdev/gro", + "@jridgewell/trace-mapping", + "@types/estree", + "esm-env", + "svelte2tsx", + "vite", + "zod" + ] + }, + "@fuzdev/fuz_util@0.53.4_@fuzdev+blake3_wasm@0.1.1_@types+estree@1.0.8_@types+node@24.12.0_esm-env@1.2.2_svelte@5.55.2_zod@4.3.6": { + "integrity": "sha512-pkb0vsEviCnVv3Oc9ZPYUkkRxtg6CInFvm4S3AcXTdJv6T8efzwkQh7pqPAEVxVT3ZM1Tse2Ej7MEbxK477j2Q==", + "dependencies": [ + "@fuzdev/blake3_wasm", + "@types/estree", + "@types/node", + "esm-env", + "svelte", + "zod" + ], + "optionalPeers": [ + "@fuzdev/blake3_wasm", + "@types/estree", + "@types/node", + "esm-env", + "svelte", + "zod" + ] + }, + "@fuzdev/fuz_util@0.55.0_@fuzdev+blake3_wasm@0.1.1_@types+estree@1.0.8_@types+node@24.12.0_esm-env@1.2.2_svelte@5.55.2_zod@4.3.6": { + "integrity": "sha512-nHjwB6RIExT4+n+1OWhy+mlq0KGlkdY/NtABYncHqo6AuD+Pq8+7PDIhhGEWcBWB49NySxroujdaGfyS8xrsBw==", + "dependencies": [ + "@fuzdev/blake3_wasm", + "@types/estree", + "@types/node", + "esm-env", + "svelte", + "zod" + ], + "optionalPeers": [ + "@fuzdev/blake3_wasm", + "@types/estree", + "@types/node", + "esm-env", + "svelte", + "zod" + ] + }, + "@fuzdev/gro@0.197.3_@fuzdev+blake3_wasm@0.1.1_@fuzdev+fuz_util@0.55.0__@fuzdev+blake3_wasm@0.1.1__@types+estree@1.0.8__@types+node@24.12.0__esm-env@1.2.2__svelte@5.55.2__zod@4.3.6_@sveltejs+kit@2.55.0__@sveltejs+vite-plugin-svelte@6.2.4___svelte@5.55.2___vite@7.3.1____@types+node@24.12.0__svelte@5.55.2__typescript@5.9.3__vite@7.3.1___@types+node@24.12.0_esbuild@0.27.7_svelte@5.55.2_typescript@5.9.3_vitest@4.1.0__@types+node@24.12.0__jsdom@27.4.0__vite@7.3.1___@types+node@24.12.0_@types+estree@1.0.8_@types+node@24.12.0": { + "integrity": "sha512-LSwnzCL968EQ/abuUWlgJBSKQVAmCXVrQWdSx6ogGhe3xWvSVPJIeH8fi16azSjqrSUOD/oOUcvdQczmvt/H6g==", + "dependencies": [ + "@fuzdev/blake3_wasm", + "@fuzdev/fuz_util@0.55.0_@fuzdev+blake3_wasm@0.1.1_@types+estree@1.0.8_@types+node@24.12.0_esm-env@1.2.2_svelte@5.55.2_zod@4.3.6", + "@sveltejs/kit", + "chokidar@5.0.0", + "dotenv", + "esbuild", + "esm-env", + "oxc-parser", + "prettier", + "prettier-plugin-svelte", + "svelte", + "ts-blank-space", + "tslib", + "typescript", + "zod" + ], + "optionalDependencies": [ + "vitest" + ], + "optionalPeers": [ + "@sveltejs/kit", + "vitest" + ], + "bin": true + }, + "@google/generative-ai@0.24.1": { + "integrity": "sha512-MqO+MLfM6kjxcKoy0p1wRzG3b4ZZXtPI+z2IE26UogS2Cm/XHO+7gGRBh6gcJsOiIVoH93UwKvW4HdgiOZCy9Q==" + }, + "@humanfs/core@0.19.1": { + "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==" + }, + "@humanfs/node@0.16.7": { + "integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==", + "dependencies": [ + "@humanfs/core", + "@humanwhocodes/retry" + ] + }, + "@humanwhocodes/module-importer@1.0.1": { + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==" + }, + "@humanwhocodes/retry@0.4.3": { + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==" + }, + "@jridgewell/gen-mapping@0.3.13": { + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dependencies": [ + "@jridgewell/sourcemap-codec", + "@jridgewell/trace-mapping" + ] + }, + "@jridgewell/remapping@2.3.5": { + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dependencies": [ + "@jridgewell/gen-mapping", + "@jridgewell/trace-mapping" + ] + }, + "@jridgewell/resolve-uri@3.1.2": { + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==" + }, + "@jridgewell/sourcemap-codec@1.5.5": { + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==" + }, + "@jridgewell/trace-mapping@0.3.31": { + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dependencies": [ + "@jridgewell/resolve-uri", + "@jridgewell/sourcemap-codec" + ] + }, + "@napi-rs/wasm-runtime@0.2.12": { + "integrity": "sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==", + "dependencies": [ + "@emnapi/core", + "@emnapi/runtime", + "@tybys/wasm-util" + ] + }, + "@napi-rs/wasm-runtime@1.1.1": { + "integrity": "sha512-p64ah1M1ld8xjWv3qbvFwHiFVWrq1yFvV4f7w+mzaqiR4IlSgkqhcRdHwsGgomwzBH51sRY4NEowLxnaBjcW/A==", + "dependencies": [ + "@emnapi/core", + "@emnapi/runtime", + "@tybys/wasm-util" + ] + }, + "@node-rs/argon2-android-arm-eabi@2.0.2": { + "integrity": "sha512-DV/H8p/jt40lrao5z5g6nM9dPNPGEHL+aK6Iy/og+dbL503Uj0AHLqj1Hk9aVUSCNnsDdUEKp4TVMi0YakDYKw==", + "os": ["android"], + "cpu": ["arm"] + }, + "@node-rs/argon2-android-arm64@2.0.2": { + "integrity": "sha512-1LKwskau+8O1ktKx7TbK7jx1oMOMt4YEXZOdSNIar1TQKxm6isZ0cRXgHLibPHEcNHgYRsJWDE9zvDGBB17QDg==", + "os": ["android"], + "cpu": ["arm64"] + }, + "@node-rs/argon2-darwin-arm64@2.0.2": { + "integrity": "sha512-3TTNL/7wbcpNju5YcqUrCgXnXUSbD7ogeAKatzBVHsbpjZQbNb1NDxDjqqrWoTt6XL3z9mJUMGwbAk7zQltHtA==", + "os": ["darwin"], + "cpu": ["arm64"] + }, + "@node-rs/argon2-darwin-x64@2.0.2": { + "integrity": "sha512-vNPfkLj5Ij5111UTiYuwgxMqE7DRbOS2y58O2DIySzSHbcnu+nipmRKg+P0doRq6eKIJStyBK8dQi5Ic8pFyDw==", + "os": ["darwin"], + "cpu": ["x64"] + }, + "@node-rs/argon2-freebsd-x64@2.0.2": { + "integrity": "sha512-M8vQZk01qojQfCqQU0/O1j1a4zPPrz93zc9fSINY7Q/6RhQRBCYwDw7ltDCZXg5JRGlSaeS8cUXWyhPGar3cGg==", + "os": ["freebsd"], + "cpu": ["x64"] + }, + "@node-rs/argon2-linux-arm-gnueabihf@2.0.2": { + "integrity": "sha512-7EmmEPHLzcu0G2GDh30L6G48CH38roFC2dqlQJmtRCxs6no3tTE/pvgBGatTp/o2n2oyOJcfmgndVFcUpwMnww==", + "os": ["linux"], + "cpu": ["arm"] + }, + "@node-rs/argon2-linux-arm64-gnu@2.0.2": { + "integrity": "sha512-6lsYh3Ftbk+HAIZ7wNuRF4SZDtxtFTfK+HYFAQQyW7Ig3LHqasqwfUKRXVSV5tJ+xTnxjqgKzvZSUJCAyIfHew==", + "os": ["linux"], + "cpu": ["arm64"] + }, + "@node-rs/argon2-linux-arm64-musl@2.0.2": { + "integrity": "sha512-p3YqVMNT/4DNR67tIHTYGbedYmXxW9QlFmF39SkXyEbGQwpgSf6pH457/fyXBIYznTU/smnG9EH+C1uzT5j4hA==", + "os": ["linux"], + "cpu": ["arm64"] + }, + "@node-rs/argon2-linux-x64-gnu@2.0.2": { + "integrity": "sha512-ZM3jrHuJ0dKOhvA80gKJqBpBRmTJTFSo2+xVZR+phQcbAKRlDMSZMFDiKbSTnctkfwNFtjgDdh5g1vaEV04AvA==", + "os": ["linux"], + "cpu": ["x64"] + }, + "@node-rs/argon2-linux-x64-musl@2.0.2": { + "integrity": "sha512-of5uPqk7oCRF/44a89YlWTEfjsftPywyTULwuFDKyD8QtVZoonrJR6ZWvfFE/6jBT68S0okAkAzzMEdBVWdxWw==", + "os": ["linux"], + "cpu": ["x64"] + }, + "@node-rs/argon2-wasm32-wasi@2.0.2": { + "integrity": "sha512-U3PzLYKSQYzTERstgtHLd4ZTkOF9co57zTXT77r0cVUsleGZOrd6ut7rHzeWwoJSiHOVxxa0OhG1JVQeB7lLoQ==", + "dependencies": [ + "@napi-rs/wasm-runtime@0.2.12" + ], + "cpu": ["wasm32"] + }, + "@node-rs/argon2-win32-arm64-msvc@2.0.2": { + "integrity": "sha512-Eisd7/NM0m23ijrGr6xI2iMocdOuyl6gO27gfMfya4C5BODbUSP7ljKJ7LrA0teqZMdYHesRDzx36Js++/vhiQ==", + "os": ["win32"], + "cpu": ["arm64"] + }, + "@node-rs/argon2-win32-ia32-msvc@2.0.2": { + "integrity": "sha512-GsE2ezwAYwh72f9gIjbGTZOf4HxEksb5M2eCaj+Y5rGYVwAdt7C12Q2e9H5LRYxWcFvLH4m4jiSZpQQ4upnPAQ==", + "os": ["win32"], + "cpu": ["ia32"] + }, + "@node-rs/argon2-win32-x64-msvc@2.0.2": { + "integrity": "sha512-cJxWXanH4Ew9CfuZ4IAEiafpOBCe97bzoKowHCGk5lG/7kR4WF/eknnBlHW9m8q7t10mKq75kruPLtbSDqgRTw==", + "os": ["win32"], + "cpu": ["x64"] + }, + "@node-rs/argon2@2.0.2": { + "integrity": "sha512-t64wIsPEtNd4aUPuTAyeL2ubxATCBGmeluaKXEMAFk/8w6AJIVVkeLKMBpgLW6LU2t5cQxT+env/c6jxbtTQBg==", + "optionalDependencies": [ + "@node-rs/argon2-android-arm-eabi", + "@node-rs/argon2-android-arm64", + "@node-rs/argon2-darwin-arm64", + "@node-rs/argon2-darwin-x64", + "@node-rs/argon2-freebsd-x64", + "@node-rs/argon2-linux-arm-gnueabihf", + "@node-rs/argon2-linux-arm64-gnu", + "@node-rs/argon2-linux-arm64-musl", + "@node-rs/argon2-linux-x64-gnu", + "@node-rs/argon2-linux-x64-musl", + "@node-rs/argon2-wasm32-wasi", + "@node-rs/argon2-win32-arm64-msvc", + "@node-rs/argon2-win32-ia32-msvc", + "@node-rs/argon2-win32-x64-msvc" + ] + }, + "@oxc-parser/binding-android-arm64@0.99.0": { + "integrity": "sha512-V4jhmKXgQQdRnm73F+r3ZY4pUEsijQeSraFeaCGng7abSNJGs76X6l82wHnmjLGFAeY00LWtjcELs7ZmbJ9+lA==", + "os": ["android"], + "cpu": ["arm64"] + }, + "@oxc-parser/binding-darwin-arm64@0.99.0": { + "integrity": "sha512-Rp41nf9zD5FyLZciS9l1GfK8PhYqrD5kEGxyTOA2esTLeAy37rZxetG2E3xteEolAkeb2WDkVrlxPtibeAncMg==", + "os": ["darwin"], + "cpu": ["arm64"] + }, + "@oxc-parser/binding-darwin-x64@0.99.0": { + "integrity": "sha512-WVonp40fPPxo5Gs0POTI57iEFv485TvNKOHMwZRhigwZRhZY2accEAkYIhei9eswF4HN5B44Wybkz7Gd1Qr/5Q==", + "os": ["darwin"], + "cpu": ["x64"] + }, + "@oxc-parser/binding-freebsd-x64@0.99.0": { + "integrity": "sha512-H30bjOOttPmG54gAqu6+HzbLEzuNOYO2jZYrIq4At+NtLJwvNhXz28Hf5iEAFZIH/4hMpLkM4VN7uc+5UlNW3Q==", + "os": ["freebsd"], + "cpu": ["x64"] + }, + "@oxc-parser/binding-linux-arm-gnueabihf@0.99.0": { + "integrity": "sha512-0Z/Th0SYqzSRDPs6tk5lQdW0i73UCupnim3dgq2oW0//UdLonV/5wIZCArfKGC7w9y4h8TxgXpgtIyD1kKzzlQ==", + "os": ["linux"], + "cpu": ["arm"] + }, + "@oxc-parser/binding-linux-arm-musleabihf@0.99.0": { + "integrity": "sha512-xo0wqNd5bpbzQVNpAIFbHk1xa+SaS/FGBABCd942SRTnrpxl6GeDj/s1BFaGcTl8MlwlKVMwOcyKrw/2Kdfquw==", + "os": ["linux"], + "cpu": ["arm"] + }, + "@oxc-parser/binding-linux-arm64-gnu@0.99.0": { + "integrity": "sha512-u26I6LKoLTPTd4Fcpr0aoAtjnGf5/ulMllo+QUiBhupgbVCAlaj4RyXH/mvcjcsl2bVBv9E/gYJZz2JjxQWXBA==", + "os": ["linux"], + "cpu": ["arm64"] + }, + "@oxc-parser/binding-linux-arm64-musl@0.99.0": { + "integrity": "sha512-qhftDo2D37SqCEl3ZTa367NqWSZNb1Ddp34CTmShLKFrnKdNiUn55RdokLnHtf1AL5ssaQlYDwBECX7XiBWOhw==", + "os": ["linux"], + "cpu": ["arm64"] + }, + "@oxc-parser/binding-linux-riscv64-gnu@0.99.0": { + "integrity": "sha512-zxn/xkf519f12FKkpL5XwJipsylfSSnm36h6c1zBDTz4fbIDMGyIhHfWfwM7uUmHo9Aqw1pLxFpY39Etv398+Q==", + "os": ["linux"], + "cpu": ["riscv64"] + }, + "@oxc-parser/binding-linux-s390x-gnu@0.99.0": { + "integrity": "sha512-Y1eSDKDS5E4IVC7Oxw+NbYAKRmJPMJTIjW+9xOWwteDHkFqpocKe0USxog+Q1uhzalD9M0p9eXWEWdGQCMDBMQ==", + "os": ["linux"], + "cpu": ["s390x"] + }, + "@oxc-parser/binding-linux-x64-gnu@0.99.0": { + "integrity": "sha512-YVJMfk5cFWB8i2/nIrbk6n15bFkMHqWnMIWkVx7r2KwpTxHyFMfu2IpeVKo1ITDSmt5nBrGdLHD36QRlu2nDLg==", + "os": ["linux"], + "cpu": ["x64"] + }, + "@oxc-parser/binding-linux-x64-musl@0.99.0": { + "integrity": "sha512-2+SDPrie5f90A1b9EirtVggOgsqtsYU5raZwkDYKyS1uvJzjqHCDhG/f4TwQxHmIc5YkczdQfwvN91lwmjsKYQ==", + "os": ["linux"], + "cpu": ["x64"] + }, + "@oxc-parser/binding-wasm32-wasi@0.99.0": { + "integrity": "sha512-DKA4j0QerUWSMADziLM5sAyM7V53Fj95CV9SjP77bPfEfT7MnvFKnneaRMqPK1cpzjAGiQF52OBUIKyk0dwOQA==", + "dependencies": [ + "@napi-rs/wasm-runtime@1.1.1" + ], + "cpu": ["wasm32"] + }, + "@oxc-parser/binding-win32-arm64-msvc@0.99.0": { + "integrity": "sha512-EaB3AvsxqdNUhh9FOoAxRZ2L4PCRwDlDb//QXItwyOJrX7XS+uGK9B1KEUV4FZ/7rDhHsWieLt5e07wl2Ti5AQ==", + "os": ["win32"], + "cpu": ["arm64"] + }, + "@oxc-parser/binding-win32-x64-msvc@0.99.0": { + "integrity": "sha512-sJN1Q8h7ggFOyDn0zsHaXbP/MklAVUvhrbq0LA46Qum686P3SZQHjbATqJn9yaVEvaSKXCshgl0vQ1gWkGgpcQ==", + "os": ["win32"], + "cpu": ["x64"] + }, + "@oxc-project/types@0.99.0": { + "integrity": "sha512-LLDEhXB7g1m5J+woRSgfKsFPS3LhR9xRhTeIoEBm5WrkwMxn6eZ0Ld0c0K5eHB57ChZX6I3uSmmLjZ8pcjlRcw==" + }, + "@polka/url@1.0.0-next.29": { + "integrity": "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==" + }, + "@rollup/rollup-android-arm-eabi@4.59.0": { + "integrity": "sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==", + "os": ["android"], + "cpu": ["arm"] + }, + "@rollup/rollup-android-arm64@4.59.0": { + "integrity": "sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==", + "os": ["android"], + "cpu": ["arm64"] + }, + "@rollup/rollup-darwin-arm64@4.59.0": { + "integrity": "sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==", + "os": ["darwin"], + "cpu": ["arm64"] + }, + "@rollup/rollup-darwin-x64@4.59.0": { + "integrity": "sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==", + "os": ["darwin"], + "cpu": ["x64"] + }, + "@rollup/rollup-freebsd-arm64@4.59.0": { + "integrity": "sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==", + "os": ["freebsd"], + "cpu": ["arm64"] + }, + "@rollup/rollup-freebsd-x64@4.59.0": { + "integrity": "sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==", + "os": ["freebsd"], + "cpu": ["x64"] + }, + "@rollup/rollup-linux-arm-gnueabihf@4.59.0": { + "integrity": "sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==", + "os": ["linux"], + "cpu": ["arm"] + }, + "@rollup/rollup-linux-arm-musleabihf@4.59.0": { + "integrity": "sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==", + "os": ["linux"], + "cpu": ["arm"] + }, + "@rollup/rollup-linux-arm64-gnu@4.59.0": { + "integrity": "sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==", + "os": ["linux"], + "cpu": ["arm64"] + }, + "@rollup/rollup-linux-arm64-musl@4.59.0": { + "integrity": "sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==", + "os": ["linux"], + "cpu": ["arm64"] + }, + "@rollup/rollup-linux-loong64-gnu@4.59.0": { + "integrity": "sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==", + "os": ["linux"], + "cpu": ["loong64"] + }, + "@rollup/rollup-linux-loong64-musl@4.59.0": { + "integrity": "sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==", + "os": ["linux"], + "cpu": ["loong64"] + }, + "@rollup/rollup-linux-ppc64-gnu@4.59.0": { + "integrity": "sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==", + "os": ["linux"], + "cpu": ["ppc64"] + }, + "@rollup/rollup-linux-ppc64-musl@4.59.0": { + "integrity": "sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==", + "os": ["linux"], + "cpu": ["ppc64"] + }, + "@rollup/rollup-linux-riscv64-gnu@4.59.0": { + "integrity": "sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==", + "os": ["linux"], + "cpu": ["riscv64"] + }, + "@rollup/rollup-linux-riscv64-musl@4.59.0": { + "integrity": "sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==", + "os": ["linux"], + "cpu": ["riscv64"] + }, + "@rollup/rollup-linux-s390x-gnu@4.59.0": { + "integrity": "sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==", + "os": ["linux"], + "cpu": ["s390x"] + }, + "@rollup/rollup-linux-x64-gnu@4.59.0": { + "integrity": "sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==", + "os": ["linux"], + "cpu": ["x64"] + }, + "@rollup/rollup-linux-x64-musl@4.59.0": { + "integrity": "sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==", + "os": ["linux"], + "cpu": ["x64"] + }, + "@rollup/rollup-openbsd-x64@4.59.0": { + "integrity": "sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==", + "os": ["openbsd"], + "cpu": ["x64"] + }, + "@rollup/rollup-openharmony-arm64@4.59.0": { + "integrity": "sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==", + "os": ["openharmony"], + "cpu": ["arm64"] + }, + "@rollup/rollup-win32-arm64-msvc@4.59.0": { + "integrity": "sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==", + "os": ["win32"], + "cpu": ["arm64"] + }, + "@rollup/rollup-win32-ia32-msvc@4.59.0": { + "integrity": "sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==", + "os": ["win32"], + "cpu": ["ia32"] + }, + "@rollup/rollup-win32-x64-gnu@4.59.0": { + "integrity": "sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==", + "os": ["win32"], + "cpu": ["x64"] + }, + "@rollup/rollup-win32-x64-msvc@4.59.0": { + "integrity": "sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==", + "os": ["win32"], + "cpu": ["x64"] + }, + "@ryanatkn/eslint-config@0.10.1_eslint@9.39.4_eslint-plugin-svelte@3.15.2__eslint@9.39.4__svelte@5.55.2_svelte@5.55.2_typescript@5.9.3_typescript-eslint@8.57.1__eslint@9.39.4__typescript@5.9.3": { + "integrity": "sha512-fHQ5PyFriflVj/fiF9m4SoUnipyK/Of522HL3+YA5TD2lKdJueA5c4wxucxkuFanuZ1FvsCBjGN/wMHO94HNHA==", + "dependencies": [ + "@eslint/js", + "eslint", + "eslint-plugin-svelte", + "globals@16.5.0", + "svelte", + "typescript", + "typescript-eslint" + ] + }, + "@standard-schema/spec@1.1.0": { + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==" + }, + "@sveltejs/acorn-typescript@1.0.9_acorn@8.16.0": { + "integrity": "sha512-lVJX6qEgs/4DOcRTpo56tmKzVPtoWAaVbL4hfO7t7NVwl9AAXzQR6cihesW1BmNMPl+bK6dreu2sOKBP2Q9CIA==", + "dependencies": [ + "acorn" + ] + }, + "@sveltejs/adapter-static@3.0.10_@sveltejs+kit@2.55.0__@sveltejs+vite-plugin-svelte@6.2.4___svelte@5.55.2___vite@7.3.1____@types+node@24.12.0__svelte@5.55.2__typescript@5.9.3__vite@7.3.1___@types+node@24.12.0_svelte@5.55.2": { + "integrity": "sha512-7D9lYFWJmB7zxZyTE/qxjksvMqzMuYrrsyh1f4AlZqeZeACPRySjbC3aFiY55wb1tWUaKOQG9PVbm74JcN2Iew==", + "dependencies": [ + "@sveltejs/kit" + ] + }, + "@sveltejs/kit@2.55.0_@sveltejs+vite-plugin-svelte@6.2.4__svelte@5.55.2__vite@7.3.1___@types+node@24.12.0_svelte@5.55.2_typescript@5.9.3_vite@7.3.1__@types+node@24.12.0": { + "integrity": "sha512-MdFRjevVxmAknf2NbaUkDF16jSIzXMWd4Nfah0Qp8TtQVoSp3bV4jKt8mX7z7qTUTWvgSaxtR0EG5WJf53gcuA==", + "dependencies": [ + "@standard-schema/spec", + "@sveltejs/acorn-typescript", + "@sveltejs/vite-plugin-svelte", + "@types/cookie", + "acorn", + "cookie", + "devalue", + "esm-env", + "kleur", + "magic-string", + "mrmime", + "set-cookie-parser", + "sirv", + "svelte", + "typescript", + "vite" + ], + "optionalPeers": [ + "typescript" + ], + "bin": true + }, + "@sveltejs/vite-plugin-svelte-inspector@5.0.2_@sveltejs+vite-plugin-svelte@6.2.4__svelte@5.55.2__vite@7.3.1___@types+node@24.12.0_svelte@5.55.2_vite@7.3.1__@types+node@24.12.0": { + "integrity": "sha512-TZzRTcEtZffICSAoZGkPSl6Etsj2torOVrx6Uw0KpXxrec9Gg6jFWQ60Q3+LmNGfZSxHRCZL7vXVZIWmuV50Ig==", + "dependencies": [ + "@sveltejs/vite-plugin-svelte", + "obug", + "svelte", + "vite" + ] + }, + "@sveltejs/vite-plugin-svelte@6.2.4_svelte@5.55.2_vite@7.3.1__@types+node@24.12.0": { + "integrity": "sha512-ou/d51QSdTyN26D7h6dSpusAKaZkAiGM55/AKYi+9AGZw7q85hElbjK3kEyzXHhLSnRISHOYzVge6x0jRZ7DXA==", + "dependencies": [ + "@sveltejs/vite-plugin-svelte-inspector", + "deepmerge", + "magic-string", + "obug", + "svelte", + "vite", + "vitefu" + ] + }, + "@tybys/wasm-util@0.10.1": { + "integrity": "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==", + "dependencies": [ + "tslib" + ] + }, + "@types/chai@5.2.3": { + "integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==", + "dependencies": [ + "@types/deep-eql", + "assertion-error" + ] + }, + "@types/cookie@0.6.0": { + "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==" + }, + "@types/deep-eql@4.0.2": { + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==" + }, + "@types/deno@2.5.0": { + "integrity": "sha512-g8JS38vmc0S87jKsFzre+0ZyMOUDHPVokEJymSCRlL57h6f/FdKPWBXgdFh3Z8Ees9sz11qt9VWELU9Y9ZkiVw==" + }, + "@types/estree@1.0.8": { + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==" + }, + "@types/json-schema@7.0.15": { + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==" + }, + "@types/node@24.12.0": { + "integrity": "sha512-GYDxsZi3ChgmckRT9HPU0WEhKLP08ev/Yfcq2AstjrDASOYCSXeyjDsHg4v5t4jOj7cyDX3vmprafKlWIG9MXQ==", + "dependencies": [ + "undici-types" + ] + }, + "@types/trusted-types@2.0.7": { + "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==" + }, + "@typescript-eslint/eslint-plugin@8.57.1_@typescript-eslint+parser@8.57.1__eslint@9.39.4__typescript@5.9.3_eslint@9.39.4_typescript@5.9.3": { + "integrity": "sha512-Gn3aqnvNl4NGc6x3/Bqk1AOn0thyTU9bqDRhiRnUWezgvr2OnhYCWCgC8zXXRVqBsIL1pSDt7T9nJUe0oM0kDQ==", + "dependencies": [ + "@eslint-community/regexpp", + "@typescript-eslint/parser", + "@typescript-eslint/scope-manager", + "@typescript-eslint/type-utils", + "@typescript-eslint/utils", + "@typescript-eslint/visitor-keys", + "eslint", + "ignore@7.0.5", + "natural-compare", + "ts-api-utils", + "typescript" + ] + }, + "@typescript-eslint/parser@8.57.1_eslint@9.39.4_typescript@5.9.3": { + "integrity": "sha512-k4eNDan0EIMTT/dUKc/g+rsJ6wcHYhNPdY19VoX/EOtaAG8DLtKCykhrUnuHPYvinn5jhAPgD2Qw9hXBwrahsw==", + "dependencies": [ + "@typescript-eslint/scope-manager", + "@typescript-eslint/types", + "@typescript-eslint/typescript-estree", + "@typescript-eslint/visitor-keys", + "debug", + "eslint", + "typescript" + ] + }, + "@typescript-eslint/project-service@8.57.1_typescript@5.9.3": { + "integrity": "sha512-vx1F37BRO1OftsYlmG9xay1TqnjNVlqALymwWVuYTdo18XuKxtBpCj1QlzNIEHlvlB27osvXFWptYiEWsVdYsg==", + "dependencies": [ + "@typescript-eslint/tsconfig-utils", + "@typescript-eslint/types", + "debug", + "typescript" + ] + }, + "@typescript-eslint/scope-manager@8.57.1": { + "integrity": "sha512-hs/QcpCwlwT2L5S+3fT6gp0PabyGk4Q0Rv2doJXA0435/OpnSR3VRgvrp8Xdoc3UAYSg9cyUjTeFXZEPg/3OKg==", + "dependencies": [ + "@typescript-eslint/types", + "@typescript-eslint/visitor-keys" + ] + }, + "@typescript-eslint/tsconfig-utils@8.57.1_typescript@5.9.3": { + "integrity": "sha512-0lgOZB8cl19fHO4eI46YUx2EceQqhgkPSuCGLlGi79L2jwYY1cxeYc1Nae8Aw1xjgW3PKVDLlr3YJ6Bxx8HkWg==", + "dependencies": [ + "typescript" + ] + }, + "@typescript-eslint/type-utils@8.57.1_eslint@9.39.4_typescript@5.9.3": { + "integrity": "sha512-+Bwwm0ScukFdyoJsh2u6pp4S9ktegF98pYUU0hkphOOqdMB+1sNQhIz8y5E9+4pOioZijrkfNO/HUJVAFFfPKA==", + "dependencies": [ + "@typescript-eslint/types", + "@typescript-eslint/typescript-estree", + "@typescript-eslint/utils", + "debug", + "eslint", + "ts-api-utils", + "typescript" + ] + }, + "@typescript-eslint/types@8.57.1": { + "integrity": "sha512-S29BOBPJSFUiblEl6RzPPjJt6w25A6XsBqRVDt53tA/tlL8q7ceQNZHTjPeONt/3S7KRI4quk+yP9jK2WjBiPQ==" + }, + "@typescript-eslint/typescript-estree@8.57.1_typescript@5.9.3": { + "integrity": "sha512-ybe2hS9G6pXpqGtPli9Gx9quNV0TWLOmh58ADlmZe9DguLq0tiAKVjirSbtM1szG6+QH6rVXyU6GTLQbWnMY+g==", + "dependencies": [ + "@typescript-eslint/project-service", + "@typescript-eslint/tsconfig-utils", + "@typescript-eslint/types", + "@typescript-eslint/visitor-keys", + "debug", + "minimatch@10.2.4", + "semver", + "tinyglobby", + "ts-api-utils", + "typescript" + ] + }, + "@typescript-eslint/utils@8.57.1_eslint@9.39.4_typescript@5.9.3": { + "integrity": "sha512-XUNSJ/lEVFttPMMoDVA2r2bwrl8/oPx8cURtczkSEswY5T3AeLmCy+EKWQNdL4u0MmAHOjcWrqJp2cdvgjn8dQ==", + "dependencies": [ + "@eslint-community/eslint-utils", + "@typescript-eslint/scope-manager", + "@typescript-eslint/types", + "@typescript-eslint/typescript-estree", + "eslint", + "typescript" + ] + }, + "@typescript-eslint/visitor-keys@8.57.1": { + "integrity": "sha512-YWnmJkXbofiz9KbnbbwuA2rpGkFPLbAIetcCNO6mJ8gdhdZ/v7WDXsoGFAJuM6ikUFKTlSQnjWnVO4ux+UzS6A==", + "dependencies": [ + "@typescript-eslint/types", + "eslint-visitor-keys@5.0.1" + ] + }, + "@vitest/expect@4.1.0": { + "integrity": "sha512-EIxG7k4wlWweuCLG9Y5InKFwpMEOyrMb6ZJ1ihYu02LVj/bzUwn2VMU+13PinsjRW75XnITeFrQBMH5+dLvCDA==", + "dependencies": [ + "@standard-schema/spec", + "@types/chai", + "@vitest/spy", + "@vitest/utils", + "chai", + "tinyrainbow" + ] + }, + "@vitest/mocker@4.1.0_vite@7.3.1__@types+node@24.12.0_@types+node@24.12.0": { + "integrity": "sha512-evxREh+Hork43+Y4IOhTo+h5lGmVRyjqI739Rz4RlUPqwrkFFDF6EMvOOYjTx4E8Tl6gyCLRL8Mu7Ry12a13Tw==", + "dependencies": [ + "@vitest/spy", + "estree-walker", + "magic-string", + "vite" + ], + "optionalPeers": [ + "vite" + ] + }, + "@vitest/pretty-format@4.1.0": { + "integrity": "sha512-3RZLZlh88Ib0J7NQTRATfc/3ZPOnSUn2uDBUoGNn5T36+bALixmzphN26OUD3LRXWkJu4H0s5vvUeqBiw+kS0A==", + "dependencies": [ + "tinyrainbow" + ] + }, + "@vitest/runner@4.1.0": { + "integrity": "sha512-Duvx2OzQ7d6OjchL+trw+aSrb9idh7pnNfxrklo14p3zmNL4qPCDeIJAK+eBKYjkIwG96Bc6vYuxhqDXQOWpoQ==", + "dependencies": [ + "@vitest/utils", + "pathe" + ] + }, + "@vitest/snapshot@4.1.0": { + "integrity": "sha512-0Vy9euT1kgsnj1CHttwi9i9o+4rRLEaPRSOJ5gyv579GJkNpgJK+B4HSv/rAWixx2wdAFci1X4CEPjiu2bXIMg==", + "dependencies": [ + "@vitest/pretty-format", + "@vitest/utils", + "magic-string", + "pathe" + ] + }, + "@vitest/spy@4.1.0": { + "integrity": "sha512-pz77k+PgNpyMDv2FV6qmk5ZVau6c3R8HC8v342T2xlFxQKTrSeYw9waIJG8KgV9fFwAtTu4ceRzMivPTH6wSxw==" + }, + "@vitest/utils@4.1.0": { + "integrity": "sha512-XfPXT6a8TZY3dcGY8EdwsBulFCIw+BeeX0RZn2x/BtiY/75YGh8FeWGG8QISN/WhaqSrE2OrlDgtF8q5uhOTmw==", + "dependencies": [ + "@vitest/pretty-format", + "convert-source-map", + "tinyrainbow" + ] + }, + "@webref/css@8.4.1_css-tree@3.2.1": { + "integrity": "sha512-8DTncc0dhWJ4lVbi9rhLVyMNm+YEYrsFLRbdjgMxPupjNHcAdXiT1s4ZWJXzN4ckUvYQKTjLJKtZWc6tsR4FIQ==", + "dependencies": [ + "css-tree" + ] + }, + "@xterm/xterm@6.0.0": { + "integrity": "sha512-TQwDdQGtwwDt+2cgKDLn0IRaSxYu1tSUjgKarSDkUM0ZNiSRXFpjxEsvc/Zgc5kq5omJ+V0a8/kIM2WD3sMOYg==" + }, + "acorn-jsx@5.3.2_acorn@8.16.0": { + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dependencies": [ + "acorn" + ] + }, + "acorn@8.16.0": { + "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==", + "bin": true + }, + "agent-base@7.1.4": { + "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==" + }, + "ajv@6.14.0": { + "integrity": "sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==", + "dependencies": [ + "fast-deep-equal", + "fast-json-stable-stringify", + "json-schema-traverse", + "uri-js" + ] + }, + "ansi-styles@4.3.0": { + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": [ + "color-convert" + ] + }, + "argparse@2.0.1": { + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" + }, + "aria-query@5.3.1": { + "integrity": "sha512-Z/ZeOgVl7bcSYZ/u/rh0fOpvEpq//LZmdbkXyc7syVzjPAhfOa9ebsdTSjEBDU4vs5nC98Kfduj1uFo0qyET3g==" + }, + "assertion-error@2.0.1": { + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==" + }, + "axobject-query@4.1.0": { + "integrity": "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==" + }, + "balanced-match@1.0.2": { + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "balanced-match@4.0.4": { + "integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==" + }, + "bidi-js@1.0.3": { + "integrity": "sha512-RKshQI1R3YQ+n9YJz2QQ147P66ELpa1FQEg20Dk8oW9t2KgLbpDLLp9aGZ7y8WHSshDknG0bknqGw5/tyCs5tw==", + "dependencies": [ + "require-from-string" + ] + }, + "brace-expansion@1.1.12": { + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dependencies": [ + "balanced-match@1.0.2", + "concat-map" + ] + }, + "brace-expansion@5.0.4": { + "integrity": "sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==", + "dependencies": [ + "balanced-match@4.0.4" + ] + }, + "callsites@3.1.0": { + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==" + }, + "chai@6.2.2": { + "integrity": "sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==" + }, + "chalk@4.1.2": { + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": [ + "ansi-styles", + "supports-color" + ] + }, + "chokidar@4.0.3": { + "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", + "dependencies": [ + "readdirp@4.1.2" + ] + }, + "chokidar@5.0.0": { + "integrity": "sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw==", + "dependencies": [ + "readdirp@5.0.0" + ] + }, + "clsx@2.1.1": { + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==" + }, + "color-convert@2.0.1": { + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": [ + "color-name" + ] + }, + "color-name@1.1.4": { + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "concat-map@0.0.1": { + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + }, + "convert-source-map@2.0.0": { + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==" + }, + "cookie@0.6.0": { + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==" + }, + "cross-spawn@7.0.6": { + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dependencies": [ + "path-key", + "shebang-command", + "which" + ] + }, + "css-tree@3.2.1": { + "integrity": "sha512-X7sjQzceUhu1u7Y/ylrRZFU2FS6LRiFVp6rKLPg23y3x3c3DOKAwuXGDp+PAGjh6CSnCjYeAul8pcT8bAl+lSA==", + "dependencies": [ + "mdn-data", + "source-map-js" + ] + }, + "cssesc@3.0.0": { + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "bin": true + }, + "cssstyle@5.3.7": { + "integrity": "sha512-7D2EPVltRrsTkhpQmksIu+LxeWAIEk6wRDMJ1qljlv+CKHJM+cJLlfhWIzNA44eAsHXSNe3+vO6DW1yCYx8SuQ==", + "dependencies": [ + "@asamuzakjp/css-color", + "@csstools/css-syntax-patches-for-csstree", + "css-tree", + "lru-cache" + ] + }, + "data-urls@6.0.1": { + "integrity": "sha512-euIQENZg6x8mj3fO6o9+fOW8MimUI4PpD/fZBhJfeioZVy9TUpM4UY7KjQNVZFlqwJ0UdzRDzkycB997HEq1BQ==", + "dependencies": [ + "whatwg-mimetype@5.0.0", + "whatwg-url" + ] + }, + "date-fns@4.1.0": { + "integrity": "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==" + }, + "debug@4.4.3": { + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dependencies": [ + "ms" + ] + }, + "decimal.js@10.6.0": { + "integrity": "sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg==" + }, + "dedent-js@1.0.1": { + "integrity": "sha512-OUepMozQULMLUmhxS95Vudo0jb0UchLimi3+pQ2plj61Fcy8axbP9hbiD4Sz6DPqn6XG3kfmziVfQ1rSys5AJQ==" + }, + "deep-is@0.1.4": { + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==" + }, + "deepmerge@4.3.1": { + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==" + }, + "devalue@5.6.4": { + "integrity": "sha512-Gp6rDldRsFh/7XuouDbxMH3Mx8GMCcgzIb1pDTvNyn8pZGQ22u+Wa+lGV9dQCltFQ7uVw0MhRyb8XDskNFOReA==" + }, + "dotenv@17.3.1": { + "integrity": "sha512-IO8C/dzEb6O3F9/twg6ZLXz164a2fhTnEWb95H23Dm4OuN+92NmEAlTrupP9VW6Jm3sO26tQlqyvyi4CsnY9GA==" + }, + "entities@6.0.1": { + "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==" + }, + "es-module-lexer@2.0.0": { + "integrity": "sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw==" + }, + "esbuild@0.27.7": { + "integrity": "sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==", + "optionalDependencies": [ + "@esbuild/aix-ppc64", + "@esbuild/android-arm", + "@esbuild/android-arm64", + "@esbuild/android-x64", + "@esbuild/darwin-arm64", + "@esbuild/darwin-x64", + "@esbuild/freebsd-arm64", + "@esbuild/freebsd-x64", + "@esbuild/linux-arm", + "@esbuild/linux-arm64", + "@esbuild/linux-ia32", + "@esbuild/linux-loong64", + "@esbuild/linux-mips64el", + "@esbuild/linux-ppc64", + "@esbuild/linux-riscv64", + "@esbuild/linux-s390x", + "@esbuild/linux-x64", + "@esbuild/netbsd-arm64", + "@esbuild/netbsd-x64", + "@esbuild/openbsd-arm64", + "@esbuild/openbsd-x64", + "@esbuild/openharmony-arm64", + "@esbuild/sunos-x64", + "@esbuild/win32-arm64", + "@esbuild/win32-ia32", + "@esbuild/win32-x64" + ], + "scripts": true, + "bin": true + }, + "escape-string-regexp@4.0.0": { + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==" + }, + "eslint-plugin-svelte@3.15.2_eslint@9.39.4_svelte@5.55.2": { + "integrity": "sha512-k4Nsjs3bHujeEnnckoTM4mFYR1e8Mb9l2rTwNdmYiamA+Tjzn8X+2F+fuSP2w4VbXYhn2bmySyACQYdmUDW2Cg==", + "dependencies": [ + "@eslint-community/eslint-utils", + "@jridgewell/sourcemap-codec", + "eslint", + "esutils", + "globals@16.5.0", + "known-css-properties", + "postcss", + "postcss-load-config", + "postcss-safe-parser", + "semver", + "svelte", + "svelte-eslint-parser" + ], + "optionalPeers": [ + "svelte" + ] + }, + "eslint-scope@8.4.0": { + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", + "dependencies": [ + "esrecurse", + "estraverse" + ] + }, + "eslint-visitor-keys@3.4.3": { + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==" + }, + "eslint-visitor-keys@4.2.1": { + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==" + }, + "eslint-visitor-keys@5.0.1": { + "integrity": "sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA==" + }, + "eslint@9.39.4": { + "integrity": "sha512-XoMjdBOwe/esVgEvLmNsD3IRHkm7fbKIUGvrleloJXUZgDHig2IPWNniv+GwjyJXzuNqVjlr5+4yVUZjycJwfQ==", + "dependencies": [ + "@eslint-community/eslint-utils", + "@eslint-community/regexpp", + "@eslint/config-array", + "@eslint/config-helpers", + "@eslint/core", + "@eslint/eslintrc", + "@eslint/js", + "@eslint/plugin-kit", + "@humanfs/node", + "@humanwhocodes/module-importer", + "@humanwhocodes/retry", + "@types/estree", + "ajv", + "chalk", + "cross-spawn", + "debug", + "escape-string-regexp", + "eslint-scope", + "eslint-visitor-keys@4.2.1", + "espree", + "esquery", + "esutils", + "fast-deep-equal", + "file-entry-cache", + "find-up", + "glob-parent", + "ignore@5.3.2", + "imurmurhash", + "is-glob", + "json-stable-stringify-without-jsonify", + "lodash.merge", + "minimatch@3.1.5", + "natural-compare", + "optionator" + ], + "bin": true + }, + "esm-env@1.2.2": { + "integrity": "sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA==" + }, + "espree@10.4.0": { + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", + "dependencies": [ + "acorn", + "acorn-jsx", + "eslint-visitor-keys@4.2.1" + ] + }, + "esquery@1.7.0": { + "integrity": "sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==", + "dependencies": [ + "estraverse" + ] + }, + "esrap@2.2.4": { + "integrity": "sha512-suICpxAmZ9A8bzJjEl/+rLJiDKC0X4gYWUxT6URAWBLvlXmtbZd5ySMu/N2ZGEtMCAmflUDPSehrP9BQcsGcSg==", + "dependencies": [ + "@jridgewell/sourcemap-codec", + "@typescript-eslint/types" + ] + }, + "esrecurse@4.3.0": { + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dependencies": [ + "estraverse" + ] + }, + "estraverse@5.3.0": { + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==" + }, + "estree-walker@3.0.3": { + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dependencies": [ + "@types/estree" + ] + }, + "esutils@2.0.3": { + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==" + }, + "expect-type@1.3.0": { + "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==" + }, + "fast-deep-equal@3.1.3": { + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + }, + "fast-json-stable-stringify@2.1.0": { + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + }, + "fast-levenshtein@2.0.6": { + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==" + }, + "fdir@6.5.0_picomatch@4.0.3": { + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dependencies": [ + "picomatch" + ], + "optionalPeers": [ + "picomatch" + ] + }, + "file-entry-cache@8.0.0": { + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dependencies": [ + "flat-cache" + ] + }, + "find-up@5.0.0": { + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dependencies": [ + "locate-path", + "path-exists" + ] + }, + "flat-cache@4.0.1": { + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dependencies": [ + "flatted", + "keyv" + ] + }, + "flatted@3.4.1": { + "integrity": "sha512-IxfVbRFVlV8V/yRaGzk0UVIcsKKHMSfYw66T/u4nTwlWteQePsxe//LjudR1AMX4tZW3WFCh3Zqa/sjlqpbURQ==" + }, + "fsevents@2.3.3": { + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "os": ["darwin"], + "scripts": true + }, + "glob-parent@6.0.2": { + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dependencies": [ + "is-glob" + ] + }, + "globals@14.0.0": { + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==" + }, + "globals@16.5.0": { + "integrity": "sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==" + }, + "has-flag@4.0.0": { + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" + }, + "hono@4.12.8": { + "integrity": "sha512-VJCEvtrezO1IAR+kqEYnxUOoStaQPGrCmX3j4wDTNOcD1uRPFpGlwQUIW8niPuvHXaTUxeOUl5MMDGrl+tmO9A==" + }, + "html-encoding-sniffer@6.0.0": { + "integrity": "sha512-CV9TW3Y3f8/wT0BRFc1/KAVQ3TUHiXmaAb6VW9vtiMFf7SLoMd1PdAc4W3KFOFETBJUb90KatHqlsZMWV+R9Gg==", + "dependencies": [ + "@exodus/bytes" + ] + }, + "http-proxy-agent@7.0.2": { + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "dependencies": [ + "agent-base", + "debug" + ] + }, + "https-proxy-agent@7.0.6": { + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "dependencies": [ + "agent-base", + "debug" + ] + }, + "ignore@5.3.2": { + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==" + }, + "ignore@7.0.5": { + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==" + }, + "import-fresh@3.3.1": { + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dependencies": [ + "parent-module", + "resolve-from" + ] + }, + "imurmurhash@0.1.4": { + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==" + }, + "is-extglob@2.1.1": { + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==" + }, + "is-glob@4.0.3": { + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dependencies": [ + "is-extglob" + ] + }, + "is-potential-custom-element-name@1.0.1": { + "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==" + }, + "is-reference@3.0.3": { + "integrity": "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw==", + "dependencies": [ + "@types/estree" + ] + }, + "isexe@2.0.0": { + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + }, + "js-yaml@4.1.1": { + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", + "dependencies": [ + "argparse" + ], + "bin": true + }, + "jsdom@27.4.0": { + "integrity": "sha512-mjzqwWRD9Y1J1KUi7W97Gja1bwOOM5Ug0EZ6UDK3xS7j7mndrkwozHtSblfomlzyB4NepioNt+B2sOSzczVgtQ==", + "dependencies": [ + "@acemir/cssom", + "@asamuzakjp/dom-selector", + "@exodus/bytes", + "cssstyle", + "data-urls", + "decimal.js", + "html-encoding-sniffer", + "http-proxy-agent", + "https-proxy-agent", + "is-potential-custom-element-name", + "parse5", + "saxes", + "symbol-tree", + "tough-cookie", + "w3c-xmlserializer", + "webidl-conversions", + "whatwg-mimetype@4.0.0", + "whatwg-url", + "ws", + "xml-name-validator" + ] + }, + "json-buffer@3.0.1": { + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==" + }, + "json-schema-to-ts@3.1.1": { + "integrity": "sha512-+DWg8jCJG2TEnpy7kOm/7/AxaYoaRbjVB4LFZLySZlWn8exGs3A4OLJR966cVvU26N7X9TWxl+Jsw7dzAqKT6g==", + "dependencies": [ + "@babel/runtime", + "ts-algebra" + ] + }, + "json-schema-traverse@0.4.1": { + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, + "json-stable-stringify-without-jsonify@1.0.1": { + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==" + }, + "keyv@4.5.4": { + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dependencies": [ + "json-buffer" + ] + }, + "kleur@4.1.5": { + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==" + }, + "known-css-properties@0.37.0": { + "integrity": "sha512-JCDrsP4Z1Sb9JwG0aJ8Eo2r7k4Ou5MwmThS/6lcIe1ICyb7UBJKGRIUUdqc2ASdE/42lgz6zFUnzAIhtXnBVrQ==" + }, + "levn@0.4.1": { + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dependencies": [ + "prelude-ls", + "type-check" + ] + }, + "lilconfig@2.1.0": { + "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==" + }, + "locate-character@3.0.0": { + "integrity": "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==" + }, + "locate-path@6.0.0": { + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dependencies": [ + "p-locate" + ] + }, + "lodash.merge@4.6.2": { + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" + }, + "lru-cache@11.2.7": { + "integrity": "sha512-aY/R+aEsRelme17KGQa/1ZSIpLpNYYrhcrepKTZgE+W3WM16YMCaPwOHLHsmopZHELU0Ojin1lPVxKR0MihncA==" + }, + "magic-string@0.30.21": { + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "dependencies": [ + "@jridgewell/sourcemap-codec" + ] + }, + "mdn-data@2.27.1": { + "integrity": "sha512-9Yubnt3e8A0OKwxYSXyhLymGW4sCufcLG6VdiDdUGVkPhpqLxlvP5vl1983gQjJl3tqbrM731mjaZaP68AgosQ==" + }, + "minimatch@10.2.4": { + "integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==", + "dependencies": [ + "brace-expansion@5.0.4" + ] + }, + "minimatch@3.1.5": { + "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", + "dependencies": [ + "brace-expansion@1.1.12" + ] + }, + "mri@1.2.0": { + "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==" + }, + "mrmime@2.0.1": { + "integrity": "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==" + }, + "ms@2.1.3": { + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "nanoid@3.3.11": { + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "bin": true + }, + "natural-compare@1.4.0": { + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==" + }, + "obug@2.1.1": { + "integrity": "sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==" + }, + "ollama@0.6.3": { + "integrity": "sha512-KEWEhIqE5wtfzEIZbDCLH51VFZ6Z3ZSa6sIOg/E/tBV8S51flyqBOXi+bRxlOYKDf8i327zG9eSTb8IJxvm3Zg==", + "dependencies": [ + "whatwg-fetch" + ] + }, + "openai@6.29.0_zod@4.3.6": { + "integrity": "sha512-YxoArl2BItucdO89/sN6edksV0x47WUTgkgVfCgX7EuEMhbirENsgYe5oO4LTjBL9PtdKtk2WqND1gSLcTd2yw==", + "dependencies": [ + "zod" + ], + "optionalPeers": [ + "zod" + ], + "bin": true + }, + "optionator@0.9.4": { + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dependencies": [ + "deep-is", + "fast-levenshtein", + "levn", + "prelude-ls", + "type-check", + "word-wrap" + ] + }, + "oxc-parser@0.99.0": { + "integrity": "sha512-MpS1lbd2vR0NZn1v0drpgu7RUFu3x9Rd0kxExObZc2+F+DIrV0BOMval/RO3BYGwssIOerII6iS8EbbpCCZQpQ==", + "dependencies": [ + "@oxc-project/types" + ], + "optionalDependencies": [ + "@oxc-parser/binding-android-arm64", + "@oxc-parser/binding-darwin-arm64", + "@oxc-parser/binding-darwin-x64", + "@oxc-parser/binding-freebsd-x64", + "@oxc-parser/binding-linux-arm-gnueabihf", + "@oxc-parser/binding-linux-arm-musleabihf", + "@oxc-parser/binding-linux-arm64-gnu", + "@oxc-parser/binding-linux-arm64-musl", + "@oxc-parser/binding-linux-riscv64-gnu", + "@oxc-parser/binding-linux-s390x-gnu", + "@oxc-parser/binding-linux-x64-gnu", + "@oxc-parser/binding-linux-x64-musl", + "@oxc-parser/binding-wasm32-wasi", + "@oxc-parser/binding-win32-arm64-msvc", + "@oxc-parser/binding-win32-x64-msvc" + ] + }, + "p-limit@3.1.0": { + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dependencies": [ + "yocto-queue" + ] + }, + "p-locate@5.0.0": { + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dependencies": [ + "p-limit" + ] + }, + "parent-module@1.0.1": { + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dependencies": [ + "callsites" + ] + }, + "parse5@8.0.0": { + "integrity": "sha512-9m4m5GSgXjL4AjumKzq1Fgfp3Z8rsvjRNbnkVwfu2ImRqE5D0LnY2QfDen18FSY9C573YU5XxSapdHZTZ2WolA==", + "dependencies": [ + "entities" + ] + }, + "path-exists@4.0.0": { + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==" + }, + "path-key@3.1.1": { + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" + }, + "pathe@2.0.3": { + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==" + }, + "picocolors@1.1.1": { + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==" + }, + "picomatch@4.0.3": { + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==" + }, + "postcss-load-config@3.1.4_postcss@8.5.8": { + "integrity": "sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg==", + "dependencies": [ + "lilconfig", + "postcss", + "yaml" + ], + "optionalPeers": [ + "postcss" + ] + }, + "postcss-safe-parser@7.0.1_postcss@8.5.8": { + "integrity": "sha512-0AioNCJZ2DPYz5ABT6bddIqlhgwhpHZ/l65YAYo0BCIn0xiDpsnTHz0gnoTGk0OXZW0JRs+cDwL8u/teRdz+8A==", + "dependencies": [ + "postcss" + ] + }, + "postcss-scss@4.0.9_postcss@8.5.8": { + "integrity": "sha512-AjKOeiwAitL/MXxQW2DliT28EKukvvbEWx3LBmJIRN8KfBGZbRTxNYW0kSqi1COiTZ57nZ9NW06S6ux//N1c9A==", + "dependencies": [ + "postcss" + ] + }, + "postcss-selector-parser@7.1.1": { + "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", + "dependencies": [ + "cssesc", + "util-deprecate" + ] + }, + "postcss@8.5.8": { + "integrity": "sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==", + "dependencies": [ + "nanoid", + "picocolors", + "source-map-js" + ] + }, + "prelude-ls@1.2.1": { + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==" + }, + "prettier-plugin-svelte@3.5.1_prettier@3.8.1_svelte@5.55.2": { + "integrity": "sha512-65+fr5+cgIKWKiqM1Doum4uX6bY8iFCdztvvp2RcF+AJoieaw9kJOFMNcJo/bkmKYsxFaM9OsVZK/gWauG/5mg==", + "dependencies": [ + "prettier", + "svelte" + ] + }, + "prettier@3.8.1": { + "integrity": "sha512-UOnG6LftzbdaHZcKoPFtOcCKztrQ57WkHDeRD9t/PTQtmT0NHSeWWepj6pS0z/N7+08BHFDQVUrfmfMRcZwbMg==", + "bin": true + }, + "punycode@2.3.1": { + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==" + }, + "readdirp@4.1.2": { + "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==" + }, + "readdirp@5.0.0": { + "integrity": "sha512-9u/XQ1pvrQtYyMpZe7DXKv2p5CNvyVwzUB6uhLAnQwHMSgKMBR62lc7AHljaeteeHXn11XTAaLLUVZYVZyuRBQ==" + }, + "require-from-string@2.0.2": { + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==" + }, + "resolve-from@4.0.0": { + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==" + }, + "rollup@4.59.0": { + "integrity": "sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==", + "dependencies": [ + "@types/estree" + ], + "optionalDependencies": [ + "@rollup/rollup-android-arm-eabi", + "@rollup/rollup-android-arm64", + "@rollup/rollup-darwin-arm64", + "@rollup/rollup-darwin-x64", + "@rollup/rollup-freebsd-arm64", + "@rollup/rollup-freebsd-x64", + "@rollup/rollup-linux-arm-gnueabihf", + "@rollup/rollup-linux-arm-musleabihf", + "@rollup/rollup-linux-arm64-gnu", + "@rollup/rollup-linux-arm64-musl", + "@rollup/rollup-linux-loong64-gnu", + "@rollup/rollup-linux-loong64-musl", + "@rollup/rollup-linux-ppc64-gnu", + "@rollup/rollup-linux-ppc64-musl", + "@rollup/rollup-linux-riscv64-gnu", + "@rollup/rollup-linux-riscv64-musl", + "@rollup/rollup-linux-s390x-gnu", + "@rollup/rollup-linux-x64-gnu", + "@rollup/rollup-linux-x64-musl", + "@rollup/rollup-openbsd-x64", + "@rollup/rollup-openharmony-arm64", + "@rollup/rollup-win32-arm64-msvc", + "@rollup/rollup-win32-ia32-msvc", + "@rollup/rollup-win32-x64-gnu", + "@rollup/rollup-win32-x64-msvc", + "fsevents" + ], + "bin": true + }, + "sade@1.8.1": { + "integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==", + "dependencies": [ + "mri" + ] + }, + "saxes@6.0.0": { + "integrity": "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==", + "dependencies": [ + "xmlchars" + ] + }, + "scule@1.3.0": { + "integrity": "sha512-6FtHJEvt+pVMIB9IBY+IcCJ6Z5f1iQnytgyfKMhDKgmzYG+TeH/wx1y3l27rshSbLiSanrR9ffZDrEsmjlQF2g==" + }, + "semver@7.7.4": { + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "bin": true + }, + "set-cookie-parser@3.0.1": { + "integrity": "sha512-n7Z7dXZhJbwuAHhNzkTti6Aw9QDDjZtm3JTpTGATIdNzdQz5GuFs22w90BcvF4INfnrL5xrX3oGsuqO5Dx3A1Q==" + }, + "shebang-command@2.0.0": { + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dependencies": [ + "shebang-regex" + ] + }, + "shebang-regex@3.0.0": { + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" + }, + "siginfo@2.0.0": { + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==" + }, + "sirv@3.0.2": { + "integrity": "sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g==", + "dependencies": [ + "@polka/url", + "mrmime", + "totalist" + ] + }, + "source-map-js@1.2.1": { + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==" + }, + "stackback@0.0.2": { + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==" + }, + "std-env@4.0.0": { + "integrity": "sha512-zUMPtQ/HBY3/50VbpkupYHbRroTRZJPRLvreamgErJVys0ceuzMkD44J/QjqhHjOzK42GQ3QZIeFG1OYfOtKqQ==" + }, + "strip-json-comments@3.1.1": { + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==" + }, + "supports-color@7.2.0": { + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dependencies": [ + "has-flag" + ] + }, + "svelte-check@4.4.6_svelte@5.55.2_typescript@5.9.3": { + "integrity": "sha512-kP1zG81EWaFe9ZyTv4ZXv44Csi6Pkdpb7S3oj6m+K2ec/IcDg/a8LsFsnVLqm2nxtkSwsd5xPj/qFkTBgXHXjg==", + "dependencies": [ + "@jridgewell/trace-mapping", + "chokidar@4.0.3", + "fdir", + "picocolors", + "sade", + "svelte", + "typescript" + ], + "bin": true + }, + "svelte-eslint-parser@1.6.0_svelte@5.55.2": { + "integrity": "sha512-qoB1ehychT6OxEtQAqc/guSqLS20SlA53Uijl7x375s8nlUT0lb9ol/gzraEEatQwsyPTJo87s2CmKL9Xab+Uw==", + "dependencies": [ + "eslint-scope", + "eslint-visitor-keys@4.2.1", + "espree", + "postcss", + "postcss-scss", + "postcss-selector-parser", + "semver", + "svelte" + ], + "optionalPeers": [ + "svelte" + ] + }, + "svelte2tsx@0.7.52_svelte@5.55.2_typescript@5.9.3": { + "integrity": "sha512-svdT1FTrCLpvlU62evO5YdJt/kQ7nxgQxII/9BpQUvKr+GJRVdAXNVw8UWOt0fhoe5uWKyU0WsUTMRVAtRbMQg==", + "dependencies": [ + "dedent-js", + "scule", + "svelte", + "typescript" + ] + }, + "svelte@5.55.2": { + "integrity": "sha512-z41M/hi0ZPTzrwVKLvB/R1/Oo08gL1uIib8HZ+FncqxxtY9MLb01emg2fqk+WLZ/lNrrtNDFh7BZLDxAHvMgLw==", + "dependencies": [ + "@jridgewell/remapping", + "@jridgewell/sourcemap-codec", + "@sveltejs/acorn-typescript", + "@types/estree", + "@types/trusted-types", + "acorn", + "aria-query", + "axobject-query", + "clsx", + "devalue", + "esm-env", + "esrap", + "is-reference", + "locate-character", + "magic-string", + "zimmerframe" + ] + }, + "symbol-tree@3.2.4": { + "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==" + }, + "tinybench@2.9.0": { + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==" + }, + "tinyexec@1.0.4": { + "integrity": "sha512-u9r3uZC0bdpGOXtlxUIdwf9pkmvhqJdrVCH9fapQtgy/OeTTMZ1nqH7agtvEfmGui6e1XxjcdrlxvxJvc3sMqw==" + }, + "tinyglobby@0.2.15": { + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dependencies": [ + "fdir", + "picomatch" + ] + }, + "tinyrainbow@3.1.0": { + "integrity": "sha512-Bf+ILmBgretUrdJxzXM0SgXLZ3XfiaUuOj/IKQHuTXip+05Xn+uyEYdVg0kYDipTBcLrCVyUzAPz7QmArb0mmw==" + }, + "tldts-core@7.0.26": { + "integrity": "sha512-5WJ2SqFsv4G2Dwi7ZFVRnz6b2H1od39QME1lc2y5Ew3eWiZMAeqOAfWpRP9jHvhUl881406QtZTODvjttJs+ew==" + }, + "tldts@7.0.26": { + "integrity": "sha512-WiGwQjr0qYdNNG8KpMKlSvpxz652lqa3Rd+/hSaDcY4Uo6SKWZq2LAF+hsAhUewTtYhXlorBKgNF3Kk8hnjGoQ==", + "dependencies": [ + "tldts-core" + ], + "bin": true + }, + "totalist@3.0.1": { + "integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==" + }, + "tough-cookie@6.0.1": { + "integrity": "sha512-LktZQb3IeoUWB9lqR5EWTHgW/VTITCXg4D21M+lvybRVdylLrRMnqaIONLVb5mav8vM19m44HIcGq4qASeu2Qw==", + "dependencies": [ + "tldts" + ] + }, + "tr46@6.0.0": { + "integrity": "sha512-bLVMLPtstlZ4iMQHpFHTR7GAGj2jxi8Dg0s2h2MafAE4uSWF98FC/3MomU51iQAMf8/qDUbKWf5GxuvvVcXEhw==", + "dependencies": [ + "punycode" + ] + }, + "ts-algebra@2.0.0": { + "integrity": "sha512-FPAhNPFMrkwz76P7cdjdmiShwMynZYN6SgOujD1urY4oNm80Ou9oMdmbR45LotcKOXoy7wSmHkRFE6Mxbrhefw==" + }, + "ts-api-utils@2.4.0_typescript@5.9.3": { + "integrity": "sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==", + "dependencies": [ + "typescript" + ] + }, + "ts-blank-space@0.6.2": { + "integrity": "sha512-hZjcHdHrveEKI67v8OzI90a1bizgoDkY7ekE4fH89qJhZgxvmjfBOv98aibCU7OpKbvV3R9p/qd3DrzZqT1cFQ==", + "dependencies": [ + "typescript" + ] + }, + "tslib@2.8.1": { + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==" + }, + "type-check@0.4.0": { + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dependencies": [ + "prelude-ls" + ] + }, + "typescript-eslint@8.57.1_eslint@9.39.4_typescript@5.9.3": { + "integrity": "sha512-fLvZWf+cAGw3tqMCYzGIU6yR8K+Y9NT2z23RwOjlNFF2HwSB3KhdEFI5lSBv8tNmFkkBShSjsCjzx1vahZfISA==", + "dependencies": [ + "@typescript-eslint/eslint-plugin", + "@typescript-eslint/parser", + "@typescript-eslint/typescript-estree", + "@typescript-eslint/utils", + "eslint", + "typescript" + ] + }, + "typescript@5.9.3": { + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "bin": true + }, + "undici-types@7.16.0": { + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==" + }, + "uri-js@4.4.1": { + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dependencies": [ + "punycode" + ] + }, + "util-deprecate@1.0.2": { + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "vite@7.3.1_@types+node@24.12.0": { + "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==", + "dependencies": [ + "@types/node", + "esbuild", + "fdir", + "picomatch", + "postcss", + "rollup", + "tinyglobby" + ], + "optionalDependencies": [ + "fsevents" + ], + "optionalPeers": [ + "@types/node" + ], + "bin": true + }, + "vitefu@1.1.2_vite@7.3.1__@types+node@24.12.0_@types+node@24.12.0": { + "integrity": "sha512-zpKATdUbzbsycPFBN71nS2uzBUQiVnFoOrr2rvqv34S1lcAgMKKkjWleLGeiJlZ8lwCXvtWaRn7R3ZC16SYRuw==", + "dependencies": [ + "vite" + ], + "optionalPeers": [ + "vite" + ] + }, + "vitest@4.1.0_@types+node@24.12.0_jsdom@27.4.0_vite@7.3.1__@types+node@24.12.0": { + "integrity": "sha512-YbDrMF9jM2Lqc++2530UourxZHmkKLxrs4+mYhEwqWS97WJ7wOYEkcr+QfRgJ3PW9wz3odRijLZjHEaRLTNbqw==", + "dependencies": [ + "@types/node", + "@vitest/expect", + "@vitest/mocker", + "@vitest/pretty-format", + "@vitest/runner", + "@vitest/snapshot", + "@vitest/spy", + "@vitest/utils", + "es-module-lexer", + "expect-type", + "jsdom", + "magic-string", + "obug", + "pathe", + "picomatch", + "std-env", + "tinybench", + "tinyexec", + "tinyglobby", + "tinyrainbow", + "vite", + "why-is-node-running" + ], + "optionalPeers": [ + "@types/node", + "jsdom" + ], + "bin": true + }, + "w3c-xmlserializer@5.0.0": { + "integrity": "sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==", + "dependencies": [ + "xml-name-validator" + ] + }, + "webidl-conversions@8.0.1": { + "integrity": "sha512-BMhLD/Sw+GbJC21C/UgyaZX41nPt8bUTg+jWyDeg7e7YN4xOM05YPSIXceACnXVtqyEw/LMClUQMtMZ+PGGpqQ==" + }, + "whatwg-fetch@3.6.20": { + "integrity": "sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg==" + }, + "whatwg-mimetype@4.0.0": { + "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==" + }, + "whatwg-mimetype@5.0.0": { + "integrity": "sha512-sXcNcHOC51uPGF0P/D4NVtrkjSU2fNsm9iog4ZvZJsL3rjoDAzXZhkm2MWt1y+PUdggKAYVoMAIYcs78wJ51Cw==" + }, + "whatwg-url@15.1.0": { + "integrity": "sha512-2ytDk0kiEj/yu90JOAp44PVPUkO9+jVhyf+SybKlRHSDlvOOZhdPIrr7xTH64l4WixO2cP+wQIcgujkGBPPz6g==", + "dependencies": [ + "tr46", + "webidl-conversions" + ] + }, + "which@2.0.2": { + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dependencies": [ + "isexe" + ], + "bin": true + }, + "why-is-node-running@2.3.0": { + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", + "dependencies": [ + "siginfo", + "stackback" + ], + "bin": true + }, + "word-wrap@1.2.5": { + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==" + }, + "ws@8.19.0": { + "integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==" + }, + "xml-name-validator@5.0.0": { + "integrity": "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==" + }, + "xmlchars@2.2.0": { + "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==" + }, + "yaml@1.10.2": { + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==" + }, + "yocto-queue@0.1.0": { + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==" + }, + "zimmerframe@1.1.4": { + "integrity": "sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ==" + }, + "zod@4.3.6": { + "integrity": "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==" + } + }, + "workspace": { + "dependencies": [ + "npm:@anthropic-ai/sdk@~0.71.2", + "npm:@electric-sql/pglite@0.3", + "npm:@fuzdev/blake3_wasm@~0.1.1", + "npm:@fuzdev/fuz_app@0.12", + "npm:@fuzdev/fuz_util@0.55", + "npm:@fuzdev/gro@~0.197.3", + "npm:@google/generative-ai@~0.24.1", + "npm:date-fns@4", + "npm:esm-env@1", + "npm:hono@4", + "npm:ollama@0.6", + "npm:openai@^6.10.0", + "npm:svelte@5", + "npm:zod@4" + ], + "packageJson": { + "dependencies": [ + "npm:@anthropic-ai/sdk@~0.71.2", + "npm:@changesets/changelog-git@~0.2.1", + "npm:@electric-sql/pglite@~0.3.16", + "npm:@fuzdev/blake3_wasm@~0.1.1", + "npm:@fuzdev/fuz_app@0.12", + "npm:@fuzdev/fuz_code@~0.45.1", + "npm:@fuzdev/fuz_css@0.58", + "npm:@fuzdev/fuz_ui@~0.191.4", + "npm:@fuzdev/fuz_util@0.56", + "npm:@fuzdev/gro@~0.197.3", + "npm:@google/generative-ai@~0.24.1", + "npm:@jridgewell/trace-mapping@~0.3.31", + "npm:@node-rs/argon2@^2.0.2", + "npm:@ryanatkn/eslint-config@~0.10.1", + "npm:@sveltejs/acorn-typescript@^1.0.9", + "npm:@sveltejs/adapter-static@^3.0.10", + "npm:@sveltejs/kit@^2.57.1", + "npm:@sveltejs/vite-plugin-svelte@^6.2.4", + "npm:@types/deno@^2.5.0", + "npm:@types/estree@^1.0.8", + "npm:@types/node@^24.10.1", + "npm:@webref/css@^8.2.0", + "npm:@xterm/xterm@6", + "npm:date-fns@^4.1.0", + "npm:eslint-plugin-svelte@^3.13.1", + "npm:eslint@^9.39.1", + "npm:esm-env@^1.2.2", + "npm:hono@^4.12.7", + "npm:jsdom@^27.2.0", + "npm:magic-string@~0.30.21", + "npm:ollama@~0.6.3", + "npm:openai@^6.10.0", + "npm:pg@^8.20.0", + "npm:prettier-plugin-svelte@^3.4.1", + "npm:prettier@^3.7.4", + "npm:svelte-check@^4.4.5", + "npm:svelte2tsx@~0.7.52", + "npm:svelte@^5.55.3", + "npm:tslib@^2.8.1", + "npm:typescript-eslint@^8.48.1", + "npm:typescript@^5.9.3", + "npm:vite@^7.3.1", + "npm:vitest@^4.0.15", + "npm:zimmerframe@^1.1.4", + "npm:zod@^4.3.6" + ] + } + } +} diff --git a/docs/architecture.md b/docs/architecture.md index 9a3b059a6..73ff2c278 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -12,52 +12,52 @@ Every action is a plain object with Zod schemas. Defined in `src/lib/action_spec ```typescript export const completion_create_action_spec = { - method: 'completion_create', - kind: 'request_response', - initiator: 'frontend', - auth: 'public', - side_effects: true, - input: z.strictObject({ - completion_request: CompletionRequest, - _meta: z.looseObject({progressToken: Uuid.optional()}).optional(), - }), - output: z.strictObject({ - completion_response: CompletionResponse, - _meta: z.looseObject({progressToken: Uuid.optional()}).optional(), - }), - async: true, + method: 'completion_create', + kind: 'request_response', + initiator: 'frontend', + auth: 'public', + side_effects: true, + input: z.strictObject({ + completion_request: CompletionRequest, + _meta: z.looseObject({progressToken: Uuid.optional()}).optional(), + }), + output: z.strictObject({ + completion_response: CompletionResponse, + _meta: z.looseObject({progressToken: Uuid.optional()}).optional(), + }), + async: true, } satisfies ActionSpecUnion; ``` ### Action Kinds -| Kind | Phases | Transport | Use | -|------|--------|-----------|-----| -| `request_response` | `send_request` → `receive_request` → `send_response` → `receive_response` | HTTP or WebSocket | Standard RPC | -| `remote_notification` | `send` → `receive` | WebSocket only | Streaming progress (backend → frontend) | -| `local_call` | `execute` | None | Frontend-only UI actions | +| Kind | Phases | Transport | Use | +| --------------------- | ------------------------------------------------------------------------- | ----------------- | --------------------------------------- | +| `request_response` | `send_request` → `receive_request` → `send_response` → `receive_response` | HTTP or WebSocket | Standard RPC | +| `remote_notification` | `send` → `receive` | WebSocket only | Streaming progress (backend → frontend) | +| `local_call` | `execute` | None | Frontend-only UI actions | ### Action Spec Fields -| Field | Type | Values | -|-------|------|--------| -| `method` | `string` | Action name (e.g. `'completion_create'`) | -| `kind` | `ActionKind` | `'request_response'` \| `'remote_notification'` \| `'local_call'` | -| `initiator` | `ActionInitiator` | `'frontend'` \| `'backend'` \| `'both'` | -| `auth` | `ActionAuth \| null` | `'public'` \| `'authorize'` \| `null` | -| `side_effects` | `boolean \| null` | Whether action mutates state | -| `input` | `z.ZodType` | Zod schema for request params | -| `output` | `z.ZodType` | Zod schema for response | -| `async` | `boolean` | Whether handler is async | +| Field | Type | Values | +| -------------- | -------------------- | ----------------------------------------------------------------- | +| `method` | `string` | Action name (e.g. `'completion_create'`) | +| `kind` | `ActionKind` | `'request_response'` \| `'remote_notification'` \| `'local_call'` | +| `initiator` | `ActionInitiator` | `'frontend'` \| `'backend'` \| `'both'` | +| `auth` | `ActionAuth \| null` | `'public'` \| `'authenticate'` \| `null` | +| `side_effects` | `boolean \| null` | Whether action mutates state | +| `input` | `z.ZodType` | Zod schema for request params | +| `output` | `z.ZodType` | Zod schema for response | +| `async` | `boolean` | Whether handler is async | ### Core Components -| Component | File | Purpose | -|-----------|------|---------| -| `ActionSpec` | `action_spec.ts` | Action metadata schema | -| `ActionEvent` | `action_event.ts` | Lifecycle state machine (initial → parsed → handling → handled/failed) | -| `ActionPeer` | `action_peer.ts` | Send/receive on both sides | -| `ActionRegistry` | `action_registry.ts` | Type-safe action lookup | +| Component | File | Purpose | +| ---------------- | -------------------- | ---------------------------------------------------------------------- | +| `ActionSpec` | `action_spec.ts` | Action metadata schema | +| `ActionEvent` | `action_event.ts` | Lifecycle state machine (initial → parsed → handling → handled/failed) | +| `ActionPeer` | `action_peer.ts` | Send/receive on both sides | +| `ActionRegistry` | `action_registry.ts` | Type-safe action lookup | ### Action Event Lifecycle @@ -77,37 +77,42 @@ Frontend and backend register handlers per action per phase: ```typescript // Frontend (frontend_action_handlers.ts) export const frontend_action_handlers: FrontendActionHandlers = { - completion_create: { - send_request: ({data: {input}}) => { - console.log('sending prompt:', input.completion_request.prompt); - }, - receive_response: ({app, data: {input, output}}) => { - const progress_token = input._meta?.progressToken; - if (progress_token) { - const turn = app.cell_registry.all.get(progress_token); - if (turn instanceof Turn) { - turn.content = to_completion_response_text(output.completion_response) || ''; - turn.response = output.completion_response; - } - } - }, - receive_error: ({data: {error}}) => { - console.error('completion failed:', error); - }, - }, + completion_create: { + send_request: ({data: {input}}) => { + console.log('sending prompt:', input.completion_request.prompt); + }, + receive_response: ({app, data: {input, output}}) => { + const progress_token = input._meta?.progressToken; + if (progress_token) { + const turn = app.cell_registry.all.get(progress_token); + if (turn instanceof Turn) { + turn.content = to_completion_response_text(output.completion_response) || ''; + turn.response = output.completion_response; + } + } + }, + receive_error: ({data: {error}}) => { + console.error('completion failed:', error); + }, + }, }; -// Backend (server/backend_action_handlers.ts) -export const backend_action_handlers: BackendActionHandlers = { - completion_create: { - receive_request: async ({backend, data: {input}}) => { - const {prompt, provider_name, model, completion_messages} = input.completion_request; - const progress_token = input._meta?.progressToken; - const provider = backend.lookup_provider(provider_name); - const handler = provider.get_handler(!!progress_token); - return await handler({model, prompt, completion_messages, completion_options, progress_token}); - }, - }, +// Backend (server/zzz_action_handlers.ts) +// Unified handler — called by both HTTP RPC and WebSocket paths +export const zzz_action_handlers: Record = { + completion_create: async (input, ctx) => { + const {prompt, provider_name, model, completion_messages} = input.completion_request; + const progress_token = input._meta?.progressToken; + const provider = ctx.backend.lookup_provider(provider_name); + const handler = provider.get_handler(!!progress_token); + return await handler({ + model, + prompt, + completion_messages, + completion_options, + progress_token, + }); + }, }; ``` @@ -117,10 +122,10 @@ Actions are transport-agnostic via the `Transport` interface (`transports.ts`): ```typescript interface Transport { - transport_name: TransportName; - send(message: JsonrpcRequest): Promise; - send(message: JsonrpcNotification): Promise; - is_ready: () => boolean; + transport_name: TransportName; + send(message: JsonrpcRequest): Promise; + send(message: JsonrpcNotification): Promise; + is_ready: () => boolean; } ``` @@ -139,28 +144,28 @@ MCP-compatible subset, no batching: ### All 20 Actions -| Method | Kind | Initiator | Purpose | -|--------|------|-----------|---------| -| `ping` | `request_response` | `both` | Health check | -| `session_load` | `request_response` | `frontend` | Load initial session data | -| `filer_change` | `remote_notification` | `backend` | File system change notification | -| `diskfile_update` | `request_response` | `frontend` | Write file content | -| `diskfile_delete` | `request_response` | `frontend` | Delete a file | -| `directory_create` | `request_response` | `frontend` | Create a directory | -| `completion_create` | `request_response` | `frontend` | Start AI completion | -| `completion_progress` | `remote_notification` | `backend` | Stream completion chunks | -| `ollama_progress` | `remote_notification` | `backend` | Model operation progress | -| `toggle_main_menu` | `local_call` | `frontend` | Toggle main menu UI | -| `ollama_list` | `request_response` | `frontend` | List local models | -| `ollama_ps` | `request_response` | `frontend` | List running models | -| `ollama_show` | `request_response` | `frontend` | Show model details | -| `ollama_pull` | `request_response` | `frontend` | Pull model | -| `ollama_delete` | `request_response` | `frontend` | Delete model | -| `ollama_copy` | `request_response` | `frontend` | Copy model | -| `ollama_create` | `request_response` | `frontend` | Create model | -| `ollama_unload` | `request_response` | `frontend` | Unload model from memory | -| `provider_load_status` | `request_response` | `frontend` | Check provider availability | -| `provider_update_api_key` | `request_response` | `frontend` | Update provider API key | +| Method | Kind | Initiator | Purpose | +| ------------------------- | --------------------- | ---------- | ------------------------------- | +| `ping` | `request_response` | `both` | Health check | +| `session_load` | `request_response` | `frontend` | Load initial session data | +| `filer_change` | `remote_notification` | `backend` | File system change notification | +| `diskfile_update` | `request_response` | `frontend` | Write file content | +| `diskfile_delete` | `request_response` | `frontend` | Delete a file | +| `directory_create` | `request_response` | `frontend` | Create a directory | +| `completion_create` | `request_response` | `frontend` | Start AI completion | +| `completion_progress` | `remote_notification` | `backend` | Stream completion chunks | +| `ollama_progress` | `remote_notification` | `backend` | Model operation progress | +| `toggle_main_menu` | `local_call` | `frontend` | Toggle main menu UI | +| `ollama_list` | `request_response` | `frontend` | List local models | +| `ollama_ps` | `request_response` | `frontend` | List running models | +| `ollama_show` | `request_response` | `frontend` | Show model details | +| `ollama_pull` | `request_response` | `frontend` | Pull model | +| `ollama_delete` | `request_response` | `frontend` | Delete model | +| `ollama_copy` | `request_response` | `frontend` | Copy model | +| `ollama_create` | `request_response` | `frontend` | Create model | +| `ollama_unload` | `request_response` | `frontend` | Unload model from memory | +| `provider_load_status` | `request_response` | `frontend` | Check provider availability | +| `provider_update_api_key` | `request_response` | `frontend` | Update provider API key | ## Cell System @@ -174,10 +179,10 @@ From `cell.svelte.ts`: export abstract class Cell implements CellJson { readonly cid = ++global_cell_count; // monotonic client-side ordering - // Base properties from CellJson - id: Uuid = $state()!; - created: Datetime = $state()!; - updated: Datetime = $state()!; + // Base properties from CellJson — $state.raw() by default + id: Uuid = $state.raw()!; + created: Datetime = $state.raw()!; + updated: Datetime = $state.raw()!; readonly schema!: TSchema; readonly schema_keys: Array> = $derived(...); @@ -201,8 +206,8 @@ export abstract class Cell implements Cel ```typescript interface CellOptions { - app: Frontend; // Root app state reference - json?: z.input; // Initial JSON data (parsed by schema) + app: Frontend; // Root app state reference + json?: z.input; // Initial JSON data (parsed by schema) } ``` @@ -213,36 +218,36 @@ Real example from `chat.svelte.ts`: ```typescript // 1. Schema with CellJson base — every field has .default() export const ChatJson = CellJson.extend({ - name: z.string().default(''), - thread_ids: z.array(Uuid).default(() => []), - main_input: z.string().default(''), - view_mode: z.enum(['simple', 'multi']).default('simple'), - selected_thread_id: Uuid.nullable().default(null), + name: z.string().default(''), + thread_ids: z.array(Uuid).default(() => []), + main_input: z.string().default(''), + view_mode: z.enum(['simple', 'multi']).default('simple'), + selected_thread_id: Uuid.nullable().default(null), }).meta({cell_class_name: 'Chat'}); -// 2. Class: $state for schema fields, $derived for computed +// 2. Class: $state.raw by default, $state only for in-place-mutated arrays export class Chat extends Cell { - name: string = $state()!; - thread_ids: Array = $state()!; - main_input: string = $state()!; - view_mode: ChatViewMode = $state()!; - selected_thread_id: Uuid | null = $state()!; - - readonly threads: Array = $derived.by(() => { - const result: Array = []; - for (const id of this.thread_ids) { - const thread = this.app.threads.items.by_id.get(id); - if (thread) result.push(thread); - } - return result; - }); - - readonly enabled_threads = $derived(this.threads.filter((t) => t.enabled)); - - constructor(options: ChatOptions) { - super(ChatJson, options); - this.init(); // Must call at end - } + name: string = $state.raw()!; + thread_ids: Array = $state()!; // $state because push/splice used + main_input: string = $state.raw()!; + view_mode: ChatViewMode = $state.raw()!; + selected_thread_id: Uuid | null = $state.raw()!; + + readonly threads: Array = $derived.by(() => { + const result: Array = []; + for (const id of this.thread_ids) { + const thread = this.app.threads.items.by_id.get(id); + if (thread) result.push(thread); + } + return result; + }); + + readonly enabled_threads = $derived(this.threads.filter((t) => t.enabled)); + + constructor(options: ChatOptions) { + super(ChatJson, options); + this.init(); // Must call at end + } } ``` @@ -277,12 +282,17 @@ All cell classes are registered in `cell_classes.ts`. Frontend iterates and regi ```typescript // cell_classes.ts — add new classes here export const cell_classes = { - Parts, Chat, Chats, Thread, Threads, Turn, /* ... 26 total */ + Parts, + Chat, + Chats, + Thread, + Threads, + Turn /* ... 26 total */, } satisfies Record>; // frontend.svelte.ts — auto-registers all classes for (const constructor of Object.values(cell_classes)) { - this.cell_registry.register(constructor); + this.cell_registry.register(constructor); } // Lookup by ID at runtime @@ -303,9 +313,9 @@ Prompt → parts: Array (reusable content templates) ### Parts -| Type | Class | Content source | -|------|-------|----------------| -| Text | `TextPart` | `content: string` stored directly | +| Type | Class | Content source | +| -------- | -------------- | ------------------------------------------------------ | +| Text | `TextPart` | `content: string` stored directly | | Diskfile | `DiskfilePart` | `path: DiskfilePath` → reads from disk or editor state | ### Turns @@ -314,17 +324,20 @@ Conversation messages with role: ```typescript class Turn extends Cell { - part_ids: Array = $state()!; - role: CompletionRole = $state()!; // 'user' | 'assistant' | 'system' - request: CompletionRequest | undefined = $state.raw(); - response: CompletionResponse | undefined = $state.raw(); - - readonly content: string = $derived( - this.parts.map(p => p.content).filter(Boolean).join('\n\n') - ); - readonly pending: boolean = $derived( - this.role === 'assistant' && this.is_content_empty && !this.response - ); + part_ids: Array = $state()!; // $state because push/splice used + role: CompletionRole = $state.raw()!; // 'user' | 'assistant' | 'system' + request: CompletionRequest | undefined = $state.raw(); + response: CompletionResponse | undefined = $state.raw(); + + readonly content: string = $derived( + this.parts + .map((p) => p.content) + .filter(Boolean) + .join('\n\n'), + ); + readonly pending: boolean = $derived( + this.role === 'assistant' && this.is_content_empty && !this.response, + ); } ``` @@ -334,9 +347,9 @@ Linear conversation with one model. Sends messages via the action system: ```typescript class Thread extends Cell { - model_name: string = $state()!; + model_name: string = $state.raw()!; readonly turns: IndexedCollection = new IndexedCollection(); - enabled: boolean = $state()!; + enabled: boolean = $state.raw()!; async send_message(content: string): Promise { const user_turn = this.add_user_turn(content); @@ -367,20 +380,18 @@ User types message in Chat UI → app.api.completion_create(request) → ActionEvent send_request phase → Transport.send(JSON-RPC request) - → Backend.peer.receive(message) - → ActionEvent receive_request phase - → backend_action_handlers.completion_create.receive_request() - → backend.lookup_provider(provider_name) - → provider.get_handler(!!progress_token) - → handler({model, prompt, ...}) - → For each chunk: backend.api.completion_progress({token, chunk}) - → Return {completion_response} - → ActionEvent send_response phase - → JSON-RPC response via Transport - → Frontend receive_response phase - → turn.content = response_text - → turn.response = completion_response - → Svelte reactivity updates UI + → WS dispatch: spec lookup → Zod validate → handler call + → zzz_action_handlers.completion_create(input, ctx) + → ctx.backend.lookup_provider(provider_name) + → provider.get_handler(!!progress_token) + → handler({model, prompt, ...}) + → For each chunk: backend.api.completion_progress({token, chunk}) + → Return {completion_response} + → JSON-RPC response via WebSocket + → Frontend receive_response phase + → turn.content = response_text + → turn.response = completion_response + → Svelte reactivity updates UI ``` ### Streaming Progress @@ -404,31 +415,31 @@ Queryable reactive collections with multiple index types. From `indexed_collecti ```typescript class IndexedCollection { - readonly by_id: SvelteMap = new SvelteMap(); - readonly values: Array = $derived(Array.from(this.by_id.values())); - readonly size: number = $derived(this.by_id.size); + readonly by_id: SvelteMap = new SvelteMap(); + readonly values: Array = $derived(Array.from(this.by_id.values())); + readonly size: number = $derived(this.by_id.size); } ``` ### Index Types -| Type | Cardinality | Example | -|------|-------------|---------| -| `single` | One key → one item | `by('name', 'gpt-5')` | -| `multi` | One key → many items | `where('provider_name', 'ollama')` | +| Type | Cardinality | Example | +| --------- | --------------------- | ---------------------------------- | +| `single` | One key → one item | `by('name', 'gpt-5')` | +| `multi` | One key → many items | `where('provider_name', 'ollama')` | | `derived` | Computed sorted array | `derived_index('ordered_by_name')` | -| `dynamic` | Runtime-computed | Custom queries | +| `dynamic` | Runtime-computed | Custom queries | ### Index Definition ```typescript interface IndexDefinition { - key: string; - type?: 'single' | 'multi' | 'derived' | 'dynamic'; - extractor?: (item: T) => any; - compute: (collection: IndexedCollection) => TResult; - onadd?: (result: TResult, item: T, collection: IndexedCollection) => TResult; - onremove?: (result: TResult, item: T, collection: IndexedCollection) => TResult; + key: string; + type?: 'single' | 'multi' | 'derived' | 'dynamic'; + extractor?: (item: T) => any; + compute: (collection: IndexedCollection) => TResult; + onadd?: (result: TResult, item: T, collection: IndexedCollection) => TResult; + onremove?: (result: TResult, item: T, collection: IndexedCollection) => TResult; } ``` @@ -437,27 +448,27 @@ interface IndexDefinition { ```typescript // Create with indexes const items = new IndexedCollection({ - indexes: [ - create_single_index({key: 'name', extractor: m => m.name}), - create_multi_index({key: 'provider_name', extractor: m => m.provider_name}), - create_derived_index({key: 'ordered_by_name', sort: (a, b) => a.name.localeCompare(b.name)}), - ], + indexes: [ + create_single_index({key: 'name', extractor: (m) => m.name}), + create_multi_index({key: 'provider_name', extractor: (m) => m.provider_name}), + create_derived_index({key: 'ordered_by_name', sort: (a, b) => a.name.localeCompare(b.name)}), + ], }); // Query -items.by('name', 'gpt-5'); // single → Model | undefined -items.where('provider_name', 'ollama'); // multi → Array -items.derived_index('ordered_by_name'); // derived → Array +items.by('name', 'gpt-5'); // single → Model | undefined +items.where('provider_name', 'ollama'); // multi → Array +items.derived_index('ordered_by_name'); // derived → Array ``` ## Filesystem Two separate concerns: -| Concern | Env Var | Purpose | -|---------|---------|---------| -| App directory | `PUBLIC_ZZZ_DIR` | Zzz's own data (`.zzz/state/`, `.zzz/cache/`, `.zzz/run/`) | -| Scoped dirs | `PUBLIC_ZZZ_SCOPED_DIRS` | User file access (comma-separated paths) | +| Concern | Env Var | Purpose | +| ------------- | ------------------------ | ---------------------------------------------------------- | +| App directory | `PUBLIC_ZZZ_DIR` | Zzz's own data (`.zzz/state/`, `.zzz/cache/`, `.zzz/run/`) | +| Scoped dirs | `PUBLIC_ZZZ_SCOPED_DIRS` | User file access (comma-separated paths) | ### ScopedFs @@ -467,6 +478,6 @@ All filesystem operations go through `ScopedFs` (`server/scoped_fs.ts`). Securit Each scoped directory gets a `Filer` watcher. File changes are broadcast to clients via `filer_change` notifications over WebSocket. -### Server Info +### Daemon Info -`run/server.json` tracks the running server (PID, port, version). Written atomically on startup, removed on clean shutdown (SIGINT/SIGTERM). Stale detection via `process.kill(pid, 0)`. +`run/daemon.json` tracks the running server (PID, port, version). Written atomically on startup via `@fuzdev/fuz_app/cli/daemon.js`, removed on clean shutdown (SIGINT/SIGTERM). Stale detection via `kill -0`. diff --git a/docs/development.md b/docs/development.md index d5493e104..40273a7b7 100644 --- a/docs/development.md +++ b/docs/development.md @@ -6,12 +6,30 @@ Development workflow, extension points, and common patterns. ```bash git clone https://github.com/fuzdev/zzz.git && cd zzz -cp src/lib/server/.env.development.example .env.development +deno task dev:setup npm install ``` Optionally add API keys to `.env.development` for remote providers (Anthropic, OpenAI, Google). Ollama requires no key. +### PTY support (optional) + +Terminal integration uses a Rust shared library (`fuz_pty`) for real PTY +support via Deno FFI. Without it, terminals fall back to `Deno.Command` pipes +(commands run but no echo, no prompt, no interactivity). + +```bash +cd ~/dev/private_fuz && cargo build -p fuz_pty --release +``` + +This produces `target/release/libfuz_pty.so`, which zzz loads at runtime via +`Deno.dlopen()`. The dev server needs `--allow-ffi` (already set in +`gro.config.ts`). The compiled binary also has `--allow-ffi`. + +For bundled/compiled binaries, place `libfuz_pty.so` next to the `zzz` +executable. The library lookup checks exe-relative path first, then falls back +to the dev path (`~/dev/private_fuz/target/release/`). + ## Commands | Command | Purpose | @@ -37,7 +55,6 @@ Never run `gro dev` — the user manages the dev server. | `src/lib/action_metatypes.gen.ts` | Action method types | | `src/lib/action_collections.gen.ts` | Action spec collections | | `src/lib/frontend_action_types.gen.ts` | Frontend handler types | -| `src/lib/server/backend_action_types.gen.ts` | Backend handler types | | `src/routes/library.gen.ts` | Route metadata | Run `gro gen` after changing `action_specs.ts`. @@ -65,6 +82,7 @@ Components use `PascalCase` with domain prefixes: | `Ollama` | Ollama-specific | `OllamaManager`, `OllamaPullModel` | | `Part` | Content parts | `PartView`, `PartEditorForText` | | `Prompt` | Prompts | `PromptList`, `PromptPickerDialog` | +| `Terminal` | Terminals | `TerminalRunner`, `TerminalView`, `TerminalContextmenu` | | `Thread` | Threads | `ThreadList`, `ThreadContextmenu` | | `Turn` | Turns | `TurnView`, `TurnListitem` | @@ -85,8 +103,8 @@ export const MyThingJson = CellJson.extend({ ```typescript export class MyThing extends Cell { - name: string = $state()!; - value: number = $state()!; + name: string = $state.raw()!; + value: number = $state.raw()!; readonly doubled = $derived(this.value * 2); @@ -145,17 +163,17 @@ my_action: { }, ``` -4. Add backend handler (`src/lib/server/backend_action_handlers.ts`): +4. Add handler (`src/lib/server/zzz_action_handlers.ts`): ```typescript -my_action: { - receive_request: async ({backend, data: {input}}) => { - const {message} = input; - return {result: `Processed: ${message}`}; - }, +my_action: async (input, ctx) => { + const {message} = input; + return {result: `Processed: ${message}`}; }, ``` +Both HTTP RPC and WebSocket paths automatically pick up the new handler. + ### Adding a New Route Create `src/routes/my_route/+page.svelte`: @@ -339,15 +357,15 @@ All imports use `.js` extensions (ESM convention). ### Svelte 5 Runes in State Classes ```typescript -// Schema fields — $state()! initialized by Cell.init() -name: string = $state()!; +// Schema fields — $state.raw()! by default, initialized by Cell.init() +name: string = $state.raw()!; + +// $state()! only for arrays/objects mutated in place (push, splice, etc.) +thread_ids: Array = $state()!; // Derived values readonly doubled = $derived(this.count * 2); readonly complex = $derived.by(() => expensiveCalculation(this.count)); - -// Raw state (no deep reactivity) — for large objects -response: CompletionResponse = $state.raw(); ``` No `$effect` in Cell classes — effects belong in Svelte components only. diff --git a/docs/providers.md b/docs/providers.md index f97d27efb..016909a4a 100644 --- a/docs/providers.md +++ b/docs/providers.md @@ -241,7 +241,7 @@ Local providers (Ollama): `available` = `true` when service responds. 2. Implement `create_client()`, `handle_streaming_completion()`, `handle_non_streaming_completion()` 3. Register in `src/lib/server/server.ts`: `backend.add_provider(new BackendProviderNewProvider(provider_options))` 4. Add response helper in `src/lib/response_helpers.ts` -5. Add env var to `.env.development.example`: `SECRET_NEWPROVIDER_API_KEY=` +5. Add env var to `.env.development.example` and `.env.production.example`: `SECRET_NEWPROVIDER_API_KEY=` 6. Add default models to `src/lib/config_defaults.ts` (`models_default`) See [src/lib/server/CLAUDE.md](../src/lib/server/CLAUDE.md) for detailed backend architecture. diff --git a/gro.config.ts b/gro.config.ts new file mode 100644 index 000000000..1056a8796 --- /dev/null +++ b/gro.config.ts @@ -0,0 +1,39 @@ +import type {CreateGroConfig} from '@fuzdev/gro'; +import {gro_plugin_deno_compile} from '@fuzdev/gro/gro_plugin_deno_compile.js'; +import {gro_plugin_deno_server} from '@fuzdev/gro/gro_plugin_deno_server.js'; + +// eslint-disable-next-line @typescript-eslint/require-await +const config: CreateGroConfig = async (base_config) => { + const base_plugins = base_config.plugins; + base_config.plugins = async (ctx) => { + const plugins = (await base_plugins(ctx)).filter((p) => p.name !== 'gro_plugin_server'); + plugins.push( + gro_plugin_deno_server({ + entry: 'src/lib/server/server.ts', + port: 8999, + permissions: [ + '--allow-net', + '--allow-read', + '--allow-write', + '--allow-env', + '--allow-run', + '--allow-ffi', + '--allow-sys', + ], + flags: ['--no-check', '--sloppy-imports'], + }), + ); + return [ + ...plugins, + gro_plugin_deno_compile({ + entry: 'src/lib/zzz/main.ts', + output_name: 'zzz', + flags: ['--no-check', '--sloppy-imports'], + }), + ]; + }; + + return base_config; +}; + +export default config; diff --git a/package-lock.json b/package-lock.json index 6bc070077..9d5ab87b8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10,28 +10,32 @@ "license": "MIT", "dependencies": { "@anthropic-ai/sdk": "^0.71.2", - "@fuzdev/gro": "^0.197.1", + "@fuzdev/blake3_wasm": "^0.1.1", + "@fuzdev/gro": "^0.197.3", "@google/generative-ai": "^0.24.1", - "@hono/node-server": "^1.19.6", - "@hono/node-ws": "^1.2.0", + "@xterm/xterm": "^6.0.0", "date-fns": "^4.1.0", "esm-env": "^1.2.2", - "hono": "^4.10.7", + "hono": "^4.12.7", "openai": "^6.10.0", "zod": "^4.3.6" }, "devDependencies": { "@changesets/changelog-git": "^0.2.1", + "@electric-sql/pglite": "^0.3.16", + "@fuzdev/fuz_app": "^0.12.0", "@fuzdev/fuz_code": "^0.45.1", - "@fuzdev/fuz_css": "^0.57.0", - "@fuzdev/fuz_ui": "^0.191.1", - "@fuzdev/fuz_util": "^0.55.0", + "@fuzdev/fuz_css": "^0.58.0", + "@fuzdev/fuz_ui": "^0.191.4", + "@fuzdev/fuz_util": "^0.56.0", "@jridgewell/trace-mapping": "^0.3.31", - "@ryanatkn/eslint-config": "^0.9.0", - "@sveltejs/adapter-node": "^5.4.0", + "@node-rs/argon2": "^2.0.2", + "@ryanatkn/eslint-config": "^0.10.1", + "@sveltejs/acorn-typescript": "^1.0.9", "@sveltejs/adapter-static": "^3.0.10", - "@sveltejs/kit": "^2.55.0", + "@sveltejs/kit": "^2.57.1", "@sveltejs/vite-plugin-svelte": "^6.2.4", + "@types/deno": "^2.5.0", "@types/estree": "^1.0.8", "@types/node": "^24.10.1", "@webref/css": "^8.2.0", @@ -40,14 +44,16 @@ "jsdom": "^27.2.0", "magic-string": "^0.30.21", "ollama": "^0.6.3", + "pg": "^8.20.0", "prettier": "^3.7.4", "prettier-plugin-svelte": "^3.4.1", - "svelte": "^5.54.0", + "svelte": "^5.55.3", "svelte-check": "^4.4.5", "svelte2tsx": "^0.7.52", "tslib": "^2.8.1", "typescript": "^5.9.3", "typescript-eslint": "^8.48.1", + "vite": "^7.3.1", "vitest": "^4.0.15", "zimmerframe": "^1.1.4" }, @@ -62,10 +68,77 @@ "svelte": "^5" } }, + "../fuz_app": { + "name": "@fuzdev/fuz_app", + "version": "0.1.0", + "extraneous": true, + "license": "MIT", + "devDependencies": { + "@electric-sql/pglite": "^0.3.16", + "@fuzdev/blake3_wasm": "^0.1.0", + "@fuzdev/fuz_code": "^0.45.1", + "@fuzdev/fuz_css": "^0.57.0", + "@fuzdev/fuz_ui": "^0.191.1", + "@fuzdev/fuz_util": "^0.55.0", + "@fuzdev/gro": "^0.197.1", + "@jridgewell/trace-mapping": "^0.3.31", + "@node-rs/argon2": "^2.0.2", + "@ryanatkn/eslint-config": "^0.10.1", + "@sveltejs/acorn-typescript": "^1.0.9", + "@sveltejs/adapter-static": "^3.0.10", + "@sveltejs/kit": "^2.55.0", + "@sveltejs/package": "^2.5.7", + "@sveltejs/vite-plugin-svelte": "^6.2.4", + "@types/estree": "^1.0.8", + "@types/pg": "^8.18.0", + "@webref/css": "^8.2.0", + "eslint": "^9.39.4", + "eslint-plugin-svelte": "^3.15.1", + "esm-env": "^1.2.2", + "hono": "^4.12.7", + "jsdom": "^28.1.0", + "magic-string": "^0.30.21", + "pg": "^8.20.0", + "prettier": "^3.7.4", + "prettier-plugin-svelte": "^3.5.1", + "svelte": "^5.54.1", + "svelte-check": "^4.4.5", + "svelte2tsx": "^0.7.52", + "tslib": "^2.8.1", + "typescript": "^5.9.3", + "typescript-eslint": "^8.57.0", + "vite": "^7.3.1", + "vitest": "^4.0.18", + "zimmerframe": "^1.1.4", + "zod": "^4.3.6" + }, + "engines": { + "node": ">=22.15" + }, + "peerDependencies": { + "@electric-sql/pglite": ">=0.3", + "@fuzdev/blake3_wasm": ">=0.1.0", + "@fuzdev/fuz_util": ">=0.53.4", + "@node-rs/argon2": ">=2", + "@sveltejs/kit": "^2", + "hono": ">=4", + "pg": ">=8", + "svelte": "^5", + "zod": ">=4" + }, + "peerDependenciesMeta": { + "@electric-sql/pglite": { + "optional": true + }, + "pg": { + "optional": true + } + } + }, "node_modules/@acemir/cssom": { - "version": "0.9.24", - "resolved": "https://registry.npmjs.org/@acemir/cssom/-/cssom-0.9.24.tgz", - "integrity": "sha512-5YjgMmAiT2rjJZU7XK1SNI7iqTy92DpaYVgG6x63FxkJ11UpYfLndHJATtinWJClAXiOlW9XWaUyAQf8pMrQPg==", + "version": "0.9.31", + "resolved": "https://registry.npmjs.org/@acemir/cssom/-/cssom-0.9.31.tgz", + "integrity": "sha512-ZnR3GSaH+/vJ0YlHau21FjfLYjMpYVIzTD8M8vIEQvIGxeOXyXdzCI140rrCY862p/C/BbzWsjc1dgnM9mkoTA==", "devOptional": true, "license": "MIT" }, @@ -90,23 +163,23 @@ } }, "node_modules/@asamuzakjp/css-color": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-4.1.0.tgz", - "integrity": "sha512-9xiBAtLn4aNsa4mDnpovJvBn72tNEIACyvlqaNJ+ADemR+yeMJWnBudOi2qGDviJa7SwcDOU/TRh5dnET7qk0w==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-4.1.2.tgz", + "integrity": "sha512-NfBUvBaYgKIuq6E/RBLY1m0IohzNHAYyaJGuTK79Z23uNwmz2jl1mPsC5ZxCCxylinKhT1Amn5oNTlx1wN8cQg==", "devOptional": true, "license": "MIT", "dependencies": { - "@csstools/css-calc": "^2.1.4", - "@csstools/css-color-parser": "^3.1.0", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "lru-cache": "^11.2.2" + "@csstools/css-calc": "^3.0.0", + "@csstools/css-color-parser": "^4.0.1", + "@csstools/css-parser-algorithms": "^4.0.0", + "@csstools/css-tokenizer": "^4.0.0", + "lru-cache": "^11.2.5" } }, "node_modules/@asamuzakjp/dom-selector": { - "version": "6.7.4", - "resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-6.7.4.tgz", - "integrity": "sha512-buQDjkm+wDPXd6c13534URWZqbz0RP5PAhXZ+LIoa5LgwInT9HVJvGIJivg75vi8I13CxDGdTnz+aY5YUJlIAA==", + "version": "6.8.1", + "resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-6.8.1.tgz", + "integrity": "sha512-MvRz1nCqW0fsy8Qz4dnLIvhOlMzqDVBabZx6lH+YywFDdjXhMY37SmpV1XFX3JzG5GWHn63j6HX6QPr3lZXHvQ==", "devOptional": true, "license": "MIT", "dependencies": { @@ -114,7 +187,7 @@ "bidi-js": "^1.0.3", "css-tree": "^3.1.0", "is-potential-custom-element-name": "^1.0.1", - "lru-cache": "^11.2.2" + "lru-cache": "^11.2.6" } }, "node_modules/@asamuzakjp/nwsapi": { @@ -125,9 +198,9 @@ "license": "MIT" }, "node_modules/@babel/runtime": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz", - "integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.6.tgz", + "integrity": "sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA==", "license": "MIT", "engines": { "node": ">=6.9.0" @@ -151,9 +224,9 @@ "license": "MIT" }, "node_modules/@csstools/color-helpers": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-5.1.0.tgz", - "integrity": "sha512-S11EXWJyy0Mz5SYvRmY8nJYTFFd1LCNV+7cXyAgQtOOuzb4EsgfqDufL+9esx72/eLhsRdGZwaldu/h+E4t4BA==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-6.0.2.tgz", + "integrity": "sha512-LMGQLS9EuADloEFkcTBR3BwV/CGHV7zyDxVRtVDTwdI2Ca4it0CCVTT9wCkxSgokjE5Ho41hEPgb8OEUwoXr6Q==", "devOptional": true, "funding": [ { @@ -167,13 +240,13 @@ ], "license": "MIT-0", "engines": { - "node": ">=18" + "node": ">=20.19.0" } }, "node_modules/@csstools/css-calc": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/@csstools/css-calc/-/css-calc-2.1.4.tgz", - "integrity": "sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@csstools/css-calc/-/css-calc-3.1.1.tgz", + "integrity": "sha512-HJ26Z/vmsZQqs/o3a6bgKslXGFAungXGbinULZO3eMsOyNJHeBBZfup5FiZInOghgoM4Hwnmw+OgbJCNg1wwUQ==", "devOptional": true, "funding": [ { @@ -187,17 +260,17 @@ ], "license": "MIT", "engines": { - "node": ">=18" + "node": ">=20.19.0" }, "peerDependencies": { - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4" + "@csstools/css-parser-algorithms": "^4.0.0", + "@csstools/css-tokenizer": "^4.0.0" } }, "node_modules/@csstools/css-color-parser": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@csstools/css-color-parser/-/css-color-parser-3.1.0.tgz", - "integrity": "sha512-nbtKwh3a6xNVIp/VRuXV64yTKnb1IjTAEEh3irzS+HkKjAOYLTGNb9pmVNntZ8iVBHcWDA2Dof0QtPgFI1BaTA==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@csstools/css-color-parser/-/css-color-parser-4.0.2.tgz", + "integrity": "sha512-0GEfbBLmTFf0dJlpsNU7zwxRIH0/BGEMuXLTCvFYxuL1tNhqzTbtnFICyJLTNK4a+RechKP75e7w42ClXSnJQw==", "devOptional": true, "funding": [ { @@ -211,21 +284,21 @@ ], "license": "MIT", "dependencies": { - "@csstools/color-helpers": "^5.1.0", - "@csstools/css-calc": "^2.1.4" + "@csstools/color-helpers": "^6.0.2", + "@csstools/css-calc": "^3.1.1" }, "engines": { - "node": ">=18" + "node": ">=20.19.0" }, "peerDependencies": { - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4" + "@csstools/css-parser-algorithms": "^4.0.0", + "@csstools/css-tokenizer": "^4.0.0" } }, "node_modules/@csstools/css-parser-algorithms": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.5.tgz", - "integrity": "sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-4.0.0.tgz", + "integrity": "sha512-+B87qS7fIG3L5h3qwJ/IFbjoVoOe/bpOdh9hAjXbvx0o8ImEmUsGXN0inFOnk2ChCFgqkkGFQ+TpM5rbhkKe4w==", "devOptional": true, "funding": [ { @@ -239,16 +312,16 @@ ], "license": "MIT", "engines": { - "node": ">=18" + "node": ">=20.19.0" }, "peerDependencies": { - "@csstools/css-tokenizer": "^3.0.4" + "@csstools/css-tokenizer": "^4.0.0" } }, "node_modules/@csstools/css-syntax-patches-for-csstree": { - "version": "1.0.17", - "resolved": "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.0.17.tgz", - "integrity": "sha512-LCC++2h8pLUSPY+EsZmrrJ1EOUu+5iClpEiDhhdw3zRJpPbABML/N5lmRuBHjxtKm9VnRcsUzioyD0sekFMF0A==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.1.1.tgz", + "integrity": "sha512-BvqN0AMWNAnLk9G8jnUT77D+mUbY/H2b3uDTvg2isJkHaOufUE2R3AOwxWo7VBQKT1lOdwdvorddo2B/lk64+w==", "devOptional": true, "funding": [ { @@ -261,14 +334,19 @@ } ], "license": "MIT-0", - "engines": { - "node": ">=18" + "peerDependencies": { + "css-tree": "^3.2.1" + }, + "peerDependenciesMeta": { + "css-tree": { + "optional": true + } } }, "node_modules/@csstools/css-tokenizer": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-3.0.4.tgz", - "integrity": "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-4.0.0.tgz", + "integrity": "sha512-QxULHAm7cNu72w97JUNCBFODFaXpbDg+dP8b/oWFAZ2MTRppA3U00Y2L1HqaS4J6yBqxwa/Y3nMBaxVKbB/NsA==", "devOptional": true, "funding": [ { @@ -282,24 +360,31 @@ ], "license": "MIT", "engines": { - "node": ">=18" + "node": ">=20.19.0" } }, + "node_modules/@electric-sql/pglite": { + "version": "0.3.16", + "resolved": "https://registry.npmjs.org/@electric-sql/pglite/-/pglite-0.3.16.tgz", + "integrity": "sha512-mZkZfOd9OqTMHsK+1cje8OSzfAQcpD7JmILXTl5ahdempjUDdmg4euf1biDex5/LfQIDJ3gvCu6qDgdnDxfJmA==", + "dev": true, + "license": "Apache-2.0" + }, "node_modules/@emnapi/core": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.8.1.tgz", - "integrity": "sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.9.0.tgz", + "integrity": "sha512-0DQ98G9ZQZOxfUcQn1waV2yS8aWdZ6kJMbYCJB3oUBecjWYO1fqJ+a1DRfPF3O5JEkwqwP1A9QEN/9mYm2Yd0w==", "license": "MIT", "optional": true, "dependencies": { - "@emnapi/wasi-threads": "1.1.0", + "@emnapi/wasi-threads": "1.2.0", "tslib": "^2.4.0" } }, "node_modules/@emnapi/runtime": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.8.1.tgz", - "integrity": "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.9.0.tgz", + "integrity": "sha512-QN75eB0IH2ywSpRpNddCRfQIhmJYBCJ1x5Lb3IscKAL8bMnVAKnRg8dCoXbHzVLLH7P38N2Z3mtulB7W0J0FKw==", "license": "MIT", "optional": true, "dependencies": { @@ -307,9 +392,9 @@ } }, "node_modules/@emnapi/wasi-threads": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.1.0.tgz", - "integrity": "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.2.0.tgz", + "integrity": "sha512-N10dEJNSsUx41Z6pZsXU8FjPjpBEplgH24sfkmITrBED1/U2Esum9F3lfLrMjKHHjmi557zQn7kR9R+XWXu5Rg==", "license": "MIT", "optional": true, "dependencies": { @@ -317,9 +402,9 @@ } }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.3.tgz", - "integrity": "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.4.tgz", + "integrity": "sha512-cQPwL2mp2nSmHHJlCyoXgHGhbEPMrEEU5xhkcy3Hs/O7nGZqEpZ2sUtLaL9MORLtDfRvVl2/3PAuEkYZH0Ty8Q==", "cpu": [ "ppc64" ], @@ -328,15 +413,14 @@ "os": [ "aix" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/android-arm": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.3.tgz", - "integrity": "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.4.tgz", + "integrity": "sha512-X9bUgvxiC8CHAGKYufLIHGXPJWnr0OCdR0anD2e21vdvgCI8lIfqFbnoeOz7lBjdrAGUhqLZLcQo6MLhTO2DKQ==", "cpu": [ "arm" ], @@ -345,15 +429,14 @@ "os": [ "android" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/android-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.3.tgz", - "integrity": "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.4.tgz", + "integrity": "sha512-gdLscB7v75wRfu7QSm/zg6Rx29VLdy9eTr2t44sfTW7CxwAtQghZ4ZnqHk3/ogz7xao0QAgrkradbBzcqFPasw==", "cpu": [ "arm64" ], @@ -362,15 +445,14 @@ "os": [ "android" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/android-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.3.tgz", - "integrity": "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.4.tgz", + "integrity": "sha512-PzPFnBNVF292sfpfhiyiXCGSn9HZg5BcAz+ivBuSsl6Rk4ga1oEXAamhOXRFyMcjwr2DVtm40G65N3GLeH1Lvw==", "cpu": [ "x64" ], @@ -379,15 +461,14 @@ "os": [ "android" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz", - "integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.4.tgz", + "integrity": "sha512-b7xaGIwdJlht8ZFCvMkpDN6uiSmnxxK56N2GDTMYPr2/gzvfdQN8rTfBsvVKmIVY/X7EM+/hJKEIbbHs9oA4tQ==", "cpu": [ "arm64" ], @@ -396,15 +477,14 @@ "os": [ "darwin" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz", - "integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.4.tgz", + "integrity": "sha512-sR+OiKLwd15nmCdqpXMnuJ9W2kpy0KigzqScqHI3Hqwr7IXxBp3Yva+yJwoqh7rE8V77tdoheRYataNKL4QrPw==", "cpu": [ "x64" ], @@ -413,15 +493,14 @@ "os": [ "darwin" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.3.tgz", - "integrity": "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.4.tgz", + "integrity": "sha512-jnfpKe+p79tCnm4GVav68A7tUFeKQwQyLgESwEAUzyxk/TJr4QdGog9sqWNcUbr/bZt/O/HXouspuQDd9JxFSw==", "cpu": [ "arm64" ], @@ -430,15 +509,14 @@ "os": [ "freebsd" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.3.tgz", - "integrity": "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.4.tgz", + "integrity": "sha512-2kb4ceA/CpfUrIcTUl1wrP/9ad9Atrp5J94Lq69w7UwOMolPIGrfLSvAKJp0RTvkPPyn6CIWrNy13kyLikZRZQ==", "cpu": [ "x64" ], @@ -447,15 +525,14 @@ "os": [ "freebsd" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/linux-arm": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.3.tgz", - "integrity": "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.4.tgz", + "integrity": "sha512-aBYgcIxX/wd5n2ys0yESGeYMGF+pv6g0DhZr3G1ZG4jMfruU9Tl1i2Z+Wnj9/KjGz1lTLCcorqE2viePZqj4Eg==", "cpu": [ "arm" ], @@ -464,15 +541,14 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.3.tgz", - "integrity": "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.4.tgz", + "integrity": "sha512-7nQOttdzVGth1iz57kxg9uCz57dxQLHWxopL6mYuYthohPKEK0vU0C3O21CcBK6KDlkYVcnDXY099HcCDXd9dA==", "cpu": [ "arm64" ], @@ -481,15 +557,14 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.3.tgz", - "integrity": "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.4.tgz", + "integrity": "sha512-oPtixtAIzgvzYcKBQM/qZ3R+9TEUd1aNJQu0HhGyqtx6oS7qTpvjheIWBbes4+qu1bNlo2V4cbkISr8q6gRBFA==", "cpu": [ "ia32" ], @@ -498,15 +573,14 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.3.tgz", - "integrity": "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.4.tgz", + "integrity": "sha512-8mL/vh8qeCoRcFH2nM8wm5uJP+ZcVYGGayMavi8GmRJjuI3g1v6Z7Ni0JJKAJW+m0EtUuARb6Lmp4hMjzCBWzA==", "cpu": [ "loong64" ], @@ -515,15 +589,14 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.3.tgz", - "integrity": "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.4.tgz", + "integrity": "sha512-1RdrWFFiiLIW7LQq9Q2NES+HiD4NyT8Itj9AUeCl0IVCA459WnPhREKgwrpaIfTOe+/2rdntisegiPWn/r/aAw==", "cpu": [ "mips64el" ], @@ -532,15 +605,14 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.3.tgz", - "integrity": "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.4.tgz", + "integrity": "sha512-tLCwNG47l3sd9lpfyx9LAGEGItCUeRCWeAx6x2Jmbav65nAwoPXfewtAdtbtit/pJFLUWOhpv0FpS6GQAmPrHA==", "cpu": [ "ppc64" ], @@ -549,15 +621,14 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.3.tgz", - "integrity": "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.4.tgz", + "integrity": "sha512-BnASypppbUWyqjd1KIpU4AUBiIhVr6YlHx/cnPgqEkNoVOhHg+YiSVxM1RLfiy4t9cAulbRGTNCKOcqHrEQLIw==", "cpu": [ "riscv64" ], @@ -566,15 +637,14 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.3.tgz", - "integrity": "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.4.tgz", + "integrity": "sha512-+eUqgb/Z7vxVLezG8bVB9SfBie89gMueS+I0xYh2tJdw3vqA/0ImZJ2ROeWwVJN59ihBeZ7Tu92dF/5dy5FttA==", "cpu": [ "s390x" ], @@ -583,15 +653,14 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/linux-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.3.tgz", - "integrity": "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.4.tgz", + "integrity": "sha512-S5qOXrKV8BQEzJPVxAwnryi2+Iq5pB40gTEIT69BQONqR7JH1EPIcQ/Uiv9mCnn05jff9umq/5nqzxlqTOg9NA==", "cpu": [ "x64" ], @@ -600,15 +669,14 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/netbsd-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.3.tgz", - "integrity": "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.4.tgz", + "integrity": "sha512-xHT8X4sb0GS8qTqiwzHqpY00C95DPAq7nAwX35Ie/s+LO9830hrMd3oX0ZMKLvy7vsonee73x0lmcdOVXFzd6Q==", "cpu": [ "arm64" ], @@ -617,15 +685,14 @@ "os": [ "netbsd" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.3.tgz", - "integrity": "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.4.tgz", + "integrity": "sha512-RugOvOdXfdyi5Tyv40kgQnI0byv66BFgAqjdgtAKqHoZTbTF2QqfQrFwa7cHEORJf6X2ht+l9ABLMP0dnKYsgg==", "cpu": [ "x64" ], @@ -634,15 +701,14 @@ "os": [ "netbsd" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.3.tgz", - "integrity": "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.4.tgz", + "integrity": "sha512-2MyL3IAaTX+1/qP0O1SwskwcwCoOI4kV2IBX1xYnDDqthmq5ArrW94qSIKCAuRraMgPOmG0RDTA74mzYNQA9ow==", "cpu": [ "arm64" ], @@ -651,15 +717,14 @@ "os": [ "openbsd" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.3.tgz", - "integrity": "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.4.tgz", + "integrity": "sha512-u8fg/jQ5aQDfsnIV6+KwLOf1CmJnfu1ShpwqdwC0uA7ZPwFws55Ngc12vBdeUdnuWoQYx/SOQLGDcdlfXhYmXQ==", "cpu": [ "x64" ], @@ -668,15 +733,14 @@ "os": [ "openbsd" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/openharmony-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.3.tgz", - "integrity": "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.4.tgz", + "integrity": "sha512-JkTZrl6VbyO8lDQO3yv26nNr2RM2yZzNrNHEsj9bm6dOwwu9OYN28CjzZkH57bh4w0I2F7IodpQvUAEd1mbWXg==", "cpu": [ "arm64" ], @@ -685,15 +749,14 @@ "os": [ "openharmony" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.3.tgz", - "integrity": "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.4.tgz", + "integrity": "sha512-/gOzgaewZJfeJTlsWhvUEmUG4tWEY2Spp5M20INYRg2ZKl9QPO3QEEgPeRtLjEWSW8FilRNacPOg8R1uaYkA6g==", "cpu": [ "x64" ], @@ -702,15 +765,14 @@ "os": [ "sunos" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.3.tgz", - "integrity": "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.4.tgz", + "integrity": "sha512-Z9SExBg2y32smoDQdf1HRwHRt6vAHLXcxD2uGgO/v2jK7Y718Ix4ndsbNMU/+1Qiem9OiOdaqitioZwxivhXYg==", "cpu": [ "arm64" ], @@ -719,15 +781,14 @@ "os": [ "win32" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.3.tgz", - "integrity": "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.4.tgz", + "integrity": "sha512-DAyGLS0Jz5G5iixEbMHi5KdiApqHBWMGzTtMiJ72ZOLhbu/bzxgAe8Ue8CTS3n3HbIUHQz/L51yMdGMeoxXNJw==", "cpu": [ "ia32" ], @@ -736,15 +797,14 @@ "os": [ "win32" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@esbuild/win32-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.3.tgz", - "integrity": "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.4.tgz", + "integrity": "sha512-+knoa0BDoeXgkNvvV1vvbZX4+hizelrkwmGJBdT17t8FNPwG2lKemmuMZlmaNQ3ws3DKKCxpb4zRZEIp3UxFCg==", "cpu": [ "x64" ], @@ -753,15 +813,14 @@ "os": [ "win32" ], - "peer": true, "engines": { "node": ">=18" } }, "node_modules/@eslint-community/eslint-utils": { - "version": "4.9.0", - "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz", - "integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==", + "version": "4.9.1", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.1.tgz", + "integrity": "sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==", "dev": true, "license": "MIT", "dependencies": { @@ -791,9 +850,9 @@ } }, "node_modules/@eslint-community/regexpp": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", - "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "version": "4.12.2", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz", + "integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==", "dev": true, "license": "MIT", "engines": { @@ -801,15 +860,15 @@ } }, "node_modules/@eslint/config-array": { - "version": "0.21.1", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.1.tgz", - "integrity": "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==", + "version": "0.21.2", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.2.tgz", + "integrity": "sha512-nJl2KGTlrf9GjLimgIru+V/mzgSK0ABCDQRvxw5BjURL7WfH5uoWmizbH7QB6MmnMBd8cIC9uceWnezL1VZWWw==", "dev": true, "license": "Apache-2.0", "dependencies": { "@eslint/object-schema": "^2.1.7", "debug": "^4.3.1", - "minimatch": "^3.1.2" + "minimatch": "^3.1.5" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -842,20 +901,20 @@ } }, "node_modules/@eslint/eslintrc": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz", - "integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==", + "version": "3.3.5", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.5.tgz", + "integrity": "sha512-4IlJx0X0qftVsN5E+/vGujTRIFtwuLbNsVUe7TO6zYPDR1O6nFwvwhIKEKSrl6dZchmYBITazxKoUYOjdtjlRg==", "dev": true, "license": "MIT", "dependencies": { - "ajv": "^6.12.4", + "ajv": "^6.14.0", "debug": "^4.3.2", "espree": "^10.0.1", "globals": "^14.0.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", - "minimatch": "^3.1.2", + "js-yaml": "^4.1.1", + "minimatch": "^3.1.5", "strip-json-comments": "^3.1.1" }, "engines": { @@ -879,9 +938,9 @@ } }, "node_modules/@eslint/js": { - "version": "9.39.1", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.1.tgz", - "integrity": "sha512-S26Stp4zCy88tH94QbBv3XCuzRQiZ9yXofEILmglYTh/Ug/a9/umqvgFtYBAo3Lp0nsI/5/qH1CCrbdK3AP1Tw==", + "version": "9.39.4", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.4.tgz", + "integrity": "sha512-nE7DEIchvtiFTwBw4Lfbu59PG+kCofhjsKaCWzxTpt4lfRjRMqG6uMBzKXuEcyXhOHoUp9riAm7/aWYGhXZ9cw==", "dev": true, "license": "MIT", "engines": { @@ -915,12 +974,29 @@ "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, + "node_modules/@exodus/bytes": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/@exodus/bytes/-/bytes-1.15.0.tgz", + "integrity": "sha512-UY0nlA+feH81UGSHv92sLEPLCeZFjXOuHhrIo0HQydScuQc8s0A7kL/UdgwgDq8g8ilksmuoF35YVTNphV2aBQ==", + "devOptional": true, + "license": "MIT", + "engines": { + "node": "^20.19.0 || ^22.12.0 || >=24.0.0" + }, + "peerDependencies": { + "@noble/hashes": "^1.8.0 || ^2.0.0" + }, + "peerDependenciesMeta": { + "@noble/hashes": { + "optional": true + } + } + }, "node_modules/@fuzdev/blake3_wasm": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@fuzdev/blake3_wasm/-/blake3_wasm-0.1.0.tgz", - "integrity": "sha512-EU5uUcSX55Li3IXi1NiBDoVlxCN8ip9wqAhVZlMBEUa+cFQtLL6Z8GpYjlWy0KosLmxy2Z9WQv49PAkiAzFppg==", + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@fuzdev/blake3_wasm/-/blake3_wasm-0.1.1.tgz", + "integrity": "sha512-JikFOouJEVLKJvsEQ7+fRdo3GElL4nmu2sV8rg+xu2bv+BAMk+GvoO3TOSPYX9fdHeXJ7U4N0IdIP/mNh7WNfw==", "license": "MIT", - "peer": true, "engines": { "node": ">=20" }, @@ -928,6 +1004,35 @@ "url": "https://www.ryanatkn.com/funding" } }, + "node_modules/@fuzdev/fuz_app": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/@fuzdev/fuz_app/-/fuz_app-0.12.0.tgz", + "integrity": "sha512-6bDLg5fobf+N/m3hc4+UNKTgbilUtNV3PtVQyrZn7V1HrUes/pZ+TJeOUIuSdeLisJ3GHh+AzpltArcY3WSSQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=22.15" + }, + "peerDependencies": { + "@electric-sql/pglite": ">=0.3", + "@fuzdev/blake3_wasm": ">=0.1.0", + "@fuzdev/fuz_util": ">=0.53.4", + "@node-rs/argon2": ">=2", + "@sveltejs/kit": "^2", + "hono": ">=4", + "pg": ">=8", + "svelte": "^5", + "zod": ">=4" + }, + "peerDependenciesMeta": { + "@electric-sql/pglite": { + "optional": true + }, + "pg": { + "optional": true + } + } + }, "node_modules/@fuzdev/fuz_code": { "version": "0.45.1", "resolved": "https://registry.npmjs.org/@fuzdev/fuz_code/-/fuz_code-0.45.1.tgz", @@ -967,9 +1072,9 @@ } }, "node_modules/@fuzdev/fuz_css": { - "version": "0.57.0", - "resolved": "https://registry.npmjs.org/@fuzdev/fuz_css/-/fuz_css-0.57.0.tgz", - "integrity": "sha512-2UGLAG4tfvLEOqTWTdV1j5raitAJ5YzyiM2luXaJfddSCfs0lZGlVP4M8i2DtIfcCBJDJfanjZuu2WSNm8MYjQ==", + "version": "0.58.0", + "resolved": "https://registry.npmjs.org/@fuzdev/fuz_css/-/fuz_css-0.58.0.tgz", + "integrity": "sha512-+66ZulIMyZj6xdh61kpTznZaSucpMtUggJOQjEmzdLYAY74GUFsda6801J6YgQhlqJ3QX7wzP8z7+lw8NMcMUQ==", "dev": true, "license": "MIT", "engines": { @@ -1016,9 +1121,9 @@ } }, "node_modules/@fuzdev/fuz_ui": { - "version": "0.191.1", - "resolved": "https://registry.npmjs.org/@fuzdev/fuz_ui/-/fuz_ui-0.191.1.tgz", - "integrity": "sha512-AXrlIcx+ijB98+z6RejsVIyQXeHfr7VfYsXOjS6GNALU6kFbrU1QArUbjv57TciqONEIh5bVywX3tf4fMOL0zQ==", + "version": "0.191.4", + "resolved": "https://registry.npmjs.org/@fuzdev/fuz_ui/-/fuz_ui-0.191.4.tgz", + "integrity": "sha512-OYF6k1GR2v2wy5BbnYZ6GRGCloS3zQ5y+Nvn1trhN38YdX5HPgBDRWUOUdI9EbHCiMb6t0Ey+dUuEqu97OQFlg==", "dev": true, "license": "MIT", "engines": { @@ -1072,9 +1177,9 @@ } }, "node_modules/@fuzdev/fuz_util": { - "version": "0.55.0", - "resolved": "https://registry.npmjs.org/@fuzdev/fuz_util/-/fuz_util-0.55.0.tgz", - "integrity": "sha512-nHjwB6RIExT4+n+1OWhy+mlq0KGlkdY/NtABYncHqo6AuD+Pq8+7PDIhhGEWcBWB49NySxroujdaGfyS8xrsBw==", + "version": "0.56.0", + "resolved": "https://registry.npmjs.org/@fuzdev/fuz_util/-/fuz_util-0.56.0.tgz", + "integrity": "sha512-+5YQQRF/bheWQ2t9BSgwkjqx8pHdpT7KdJLWD10d+9n9HQTMkzK+qmN86pyD6dWGUByjSPe7rZZirZFKCKqz5w==", "license": "MIT", "engines": { "node": ">=22.15" @@ -1112,9 +1217,9 @@ } }, "node_modules/@fuzdev/gro": { - "version": "0.197.1", - "resolved": "https://registry.npmjs.org/@fuzdev/gro/-/gro-0.197.1.tgz", - "integrity": "sha512-FAiMQ4Pngbc9+CsF1WFjwD3Q+L+AVMyRXswnjl0/AczTCcYt+kBPQYEsZ4K9E6QcYT9WPuRs+WI1CX9YVGofyw==", + "version": "0.197.3", + "resolved": "https://registry.npmjs.org/@fuzdev/gro/-/gro-0.197.3.tgz", + "integrity": "sha512-LSwnzCL968EQ/abuUWlgJBSKQVAmCXVrQWdSx6ogGhe3xWvSVPJIeH8fi16azSjqrSUOD/oOUcvdQczmvt/H6g==", "license": "MIT", "dependencies": { "chokidar": "^5.0.0", @@ -1157,34 +1262,6 @@ } } }, - "node_modules/@fuzdev/gro/node_modules/chokidar": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-5.0.0.tgz", - "integrity": "sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw==", - "license": "MIT", - "dependencies": { - "readdirp": "^5.0.0" - }, - "engines": { - "node": ">= 20.19.0" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/@fuzdev/gro/node_modules/readdirp": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-5.0.0.tgz", - "integrity": "sha512-9u/XQ1pvrQtYyMpZe7DXKv2p5CNvyVwzUB6uhLAnQwHMSgKMBR62lc7AHljaeteeHXn11XTAaLLUVZYVZyuRBQ==", - "license": "MIT", - "engines": { - "node": ">= 20.19.0" - }, - "funding": { - "type": "individual", - "url": "https://paulmillr.com/funding/" - } - }, "node_modules/@google/generative-ai": { "version": "0.24.1", "resolved": "https://registry.npmjs.org/@google/generative-ai/-/generative-ai-0.24.1.tgz", @@ -1194,34 +1271,6 @@ "node": ">=18.0.0" } }, - "node_modules/@hono/node-server": { - "version": "1.19.6", - "resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.6.tgz", - "integrity": "sha512-Shz/KjlIeAhfiuE93NDKVdZ7HdBVLQAfdbaXEaoAVO3ic9ibRSLGIQGkcBbFyuLr+7/1D5ZCINM8B+6IvXeMtw==", - "license": "MIT", - "engines": { - "node": ">=18.14.1" - }, - "peerDependencies": { - "hono": "^4" - } - }, - "node_modules/@hono/node-ws": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@hono/node-ws/-/node-ws-1.2.0.tgz", - "integrity": "sha512-OBPQ8OSHBw29mj00wT/xGYtB6HY54j0fNSdVZ7gZM3TUeq0So11GXaWtFf1xWxQNfumKIsj0wRuLKWfVsO5GgQ==", - "license": "MIT", - "dependencies": { - "ws": "^8.17.0" - }, - "engines": { - "node": ">=18.14.1" - }, - "peerDependencies": { - "@hono/node-server": "^1.11.1", - "hono": "^4.6.0" - } - }, "node_modules/@humanfs/core": { "version": "0.19.1", "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", @@ -1335,399 +1384,287 @@ "url": "https://github.com/sponsors/Brooooooklyn" } }, - "node_modules/@oxc-parser/binding-android-arm64": { - "version": "0.99.0", - "resolved": "https://registry.npmjs.org/@oxc-parser/binding-android-arm64/-/binding-android-arm64-0.99.0.tgz", - "integrity": "sha512-V4jhmKXgQQdRnm73F+r3ZY4pUEsijQeSraFeaCGng7abSNJGs76X6l82wHnmjLGFAeY00LWtjcELs7ZmbJ9+lA==", + "node_modules/@node-rs/argon2": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@node-rs/argon2/-/argon2-2.0.2.tgz", + "integrity": "sha512-t64wIsPEtNd4aUPuTAyeL2ubxATCBGmeluaKXEMAFk/8w6AJIVVkeLKMBpgLW6LU2t5cQxT+env/c6jxbtTQBg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10" + }, + "optionalDependencies": { + "@node-rs/argon2-android-arm-eabi": "2.0.2", + "@node-rs/argon2-android-arm64": "2.0.2", + "@node-rs/argon2-darwin-arm64": "2.0.2", + "@node-rs/argon2-darwin-x64": "2.0.2", + "@node-rs/argon2-freebsd-x64": "2.0.2", + "@node-rs/argon2-linux-arm-gnueabihf": "2.0.2", + "@node-rs/argon2-linux-arm64-gnu": "2.0.2", + "@node-rs/argon2-linux-arm64-musl": "2.0.2", + "@node-rs/argon2-linux-x64-gnu": "2.0.2", + "@node-rs/argon2-linux-x64-musl": "2.0.2", + "@node-rs/argon2-wasm32-wasi": "2.0.2", + "@node-rs/argon2-win32-arm64-msvc": "2.0.2", + "@node-rs/argon2-win32-ia32-msvc": "2.0.2", + "@node-rs/argon2-win32-x64-msvc": "2.0.2" + } + }, + "node_modules/@node-rs/argon2-android-arm-eabi": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@node-rs/argon2-android-arm-eabi/-/argon2-android-arm-eabi-2.0.2.tgz", + "integrity": "sha512-DV/H8p/jt40lrao5z5g6nM9dPNPGEHL+aK6Iy/og+dbL503Uj0AHLqj1Hk9aVUSCNnsDdUEKp4TVMi0YakDYKw==", "cpu": [ - "arm64" + "arm" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "android" ], "engines": { - "node": "^20.19.0 || >=22.12.0" + "node": ">= 10" } }, - "node_modules/@oxc-parser/binding-darwin-arm64": { - "version": "0.99.0", - "resolved": "https://registry.npmjs.org/@oxc-parser/binding-darwin-arm64/-/binding-darwin-arm64-0.99.0.tgz", - "integrity": "sha512-Rp41nf9zD5FyLZciS9l1GfK8PhYqrD5kEGxyTOA2esTLeAy37rZxetG2E3xteEolAkeb2WDkVrlxPtibeAncMg==", + "node_modules/@node-rs/argon2-android-arm64": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@node-rs/argon2-android-arm64/-/argon2-android-arm64-2.0.2.tgz", + "integrity": "sha512-1LKwskau+8O1ktKx7TbK7jx1oMOMt4YEXZOdSNIar1TQKxm6isZ0cRXgHLibPHEcNHgYRsJWDE9zvDGBB17QDg==", "cpu": [ "arm64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ - "darwin" + "android" ], "engines": { - "node": "^20.19.0 || >=22.12.0" + "node": ">= 10" } }, - "node_modules/@oxc-parser/binding-darwin-x64": { - "version": "0.99.0", - "resolved": "https://registry.npmjs.org/@oxc-parser/binding-darwin-x64/-/binding-darwin-x64-0.99.0.tgz", - "integrity": "sha512-WVonp40fPPxo5Gs0POTI57iEFv485TvNKOHMwZRhigwZRhZY2accEAkYIhei9eswF4HN5B44Wybkz7Gd1Qr/5Q==", + "node_modules/@node-rs/argon2-darwin-arm64": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@node-rs/argon2-darwin-arm64/-/argon2-darwin-arm64-2.0.2.tgz", + "integrity": "sha512-3TTNL/7wbcpNju5YcqUrCgXnXUSbD7ogeAKatzBVHsbpjZQbNb1NDxDjqqrWoTt6XL3z9mJUMGwbAk7zQltHtA==", "cpu": [ - "x64" + "arm64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "darwin" ], "engines": { - "node": "^20.19.0 || >=22.12.0" + "node": ">= 10" } }, - "node_modules/@oxc-parser/binding-freebsd-x64": { - "version": "0.99.0", - "resolved": "https://registry.npmjs.org/@oxc-parser/binding-freebsd-x64/-/binding-freebsd-x64-0.99.0.tgz", - "integrity": "sha512-H30bjOOttPmG54gAqu6+HzbLEzuNOYO2jZYrIq4At+NtLJwvNhXz28Hf5iEAFZIH/4hMpLkM4VN7uc+5UlNW3Q==", + "node_modules/@node-rs/argon2-darwin-x64": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@node-rs/argon2-darwin-x64/-/argon2-darwin-x64-2.0.2.tgz", + "integrity": "sha512-vNPfkLj5Ij5111UTiYuwgxMqE7DRbOS2y58O2DIySzSHbcnu+nipmRKg+P0doRq6eKIJStyBK8dQi5Ic8pFyDw==", "cpu": [ "x64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ - "freebsd" + "darwin" ], "engines": { - "node": "^20.19.0 || >=22.12.0" + "node": ">= 10" } }, - "node_modules/@oxc-parser/binding-linux-arm-gnueabihf": { - "version": "0.99.0", - "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-0.99.0.tgz", - "integrity": "sha512-0Z/Th0SYqzSRDPs6tk5lQdW0i73UCupnim3dgq2oW0//UdLonV/5wIZCArfKGC7w9y4h8TxgXpgtIyD1kKzzlQ==", + "node_modules/@node-rs/argon2-freebsd-x64": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@node-rs/argon2-freebsd-x64/-/argon2-freebsd-x64-2.0.2.tgz", + "integrity": "sha512-M8vQZk01qojQfCqQU0/O1j1a4zPPrz93zc9fSINY7Q/6RhQRBCYwDw7ltDCZXg5JRGlSaeS8cUXWyhPGar3cGg==", "cpu": [ - "arm" + "x64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ - "linux" + "freebsd" ], "engines": { - "node": "^20.19.0 || >=22.12.0" + "node": ">= 10" } }, - "node_modules/@oxc-parser/binding-linux-arm-musleabihf": { - "version": "0.99.0", - "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-0.99.0.tgz", - "integrity": "sha512-xo0wqNd5bpbzQVNpAIFbHk1xa+SaS/FGBABCd942SRTnrpxl6GeDj/s1BFaGcTl8MlwlKVMwOcyKrw/2Kdfquw==", + "node_modules/@node-rs/argon2-linux-arm-gnueabihf": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@node-rs/argon2-linux-arm-gnueabihf/-/argon2-linux-arm-gnueabihf-2.0.2.tgz", + "integrity": "sha512-7EmmEPHLzcu0G2GDh30L6G48CH38roFC2dqlQJmtRCxs6no3tTE/pvgBGatTp/o2n2oyOJcfmgndVFcUpwMnww==", "cpu": [ "arm" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "linux" ], "engines": { - "node": "^20.19.0 || >=22.12.0" + "node": ">= 10" } }, - "node_modules/@oxc-parser/binding-linux-arm64-gnu": { - "version": "0.99.0", - "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-0.99.0.tgz", - "integrity": "sha512-u26I6LKoLTPTd4Fcpr0aoAtjnGf5/ulMllo+QUiBhupgbVCAlaj4RyXH/mvcjcsl2bVBv9E/gYJZz2JjxQWXBA==", + "node_modules/@node-rs/argon2-linux-arm64-gnu": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@node-rs/argon2-linux-arm64-gnu/-/argon2-linux-arm64-gnu-2.0.2.tgz", + "integrity": "sha512-6lsYh3Ftbk+HAIZ7wNuRF4SZDtxtFTfK+HYFAQQyW7Ig3LHqasqwfUKRXVSV5tJ+xTnxjqgKzvZSUJCAyIfHew==", "cpu": [ "arm64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "linux" ], "engines": { - "node": "^20.19.0 || >=22.12.0" + "node": ">= 10" } }, - "node_modules/@oxc-parser/binding-linux-arm64-musl": { - "version": "0.99.0", - "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-arm64-musl/-/binding-linux-arm64-musl-0.99.0.tgz", - "integrity": "sha512-qhftDo2D37SqCEl3ZTa367NqWSZNb1Ddp34CTmShLKFrnKdNiUn55RdokLnHtf1AL5ssaQlYDwBECX7XiBWOhw==", + "node_modules/@node-rs/argon2-linux-arm64-musl": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@node-rs/argon2-linux-arm64-musl/-/argon2-linux-arm64-musl-2.0.2.tgz", + "integrity": "sha512-p3YqVMNT/4DNR67tIHTYGbedYmXxW9QlFmF39SkXyEbGQwpgSf6pH457/fyXBIYznTU/smnG9EH+C1uzT5j4hA==", "cpu": [ "arm64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "linux" ], "engines": { - "node": "^20.19.0 || >=22.12.0" + "node": ">= 10" } }, - "node_modules/@oxc-parser/binding-linux-riscv64-gnu": { - "version": "0.99.0", - "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-0.99.0.tgz", - "integrity": "sha512-zxn/xkf519f12FKkpL5XwJipsylfSSnm36h6c1zBDTz4fbIDMGyIhHfWfwM7uUmHo9Aqw1pLxFpY39Etv398+Q==", + "node_modules/@node-rs/argon2-linux-x64-gnu": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@node-rs/argon2-linux-x64-gnu/-/argon2-linux-x64-gnu-2.0.2.tgz", + "integrity": "sha512-ZM3jrHuJ0dKOhvA80gKJqBpBRmTJTFSo2+xVZR+phQcbAKRlDMSZMFDiKbSTnctkfwNFtjgDdh5g1vaEV04AvA==", "cpu": [ - "riscv64" + "x64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "linux" ], "engines": { - "node": "^20.19.0 || >=22.12.0" + "node": ">= 10" } }, - "node_modules/@oxc-parser/binding-linux-s390x-gnu": { - "version": "0.99.0", - "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-0.99.0.tgz", - "integrity": "sha512-Y1eSDKDS5E4IVC7Oxw+NbYAKRmJPMJTIjW+9xOWwteDHkFqpocKe0USxog+Q1uhzalD9M0p9eXWEWdGQCMDBMQ==", + "node_modules/@node-rs/argon2-linux-x64-musl": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@node-rs/argon2-linux-x64-musl/-/argon2-linux-x64-musl-2.0.2.tgz", + "integrity": "sha512-of5uPqk7oCRF/44a89YlWTEfjsftPywyTULwuFDKyD8QtVZoonrJR6ZWvfFE/6jBT68S0okAkAzzMEdBVWdxWw==", "cpu": [ - "s390x" + "x64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "linux" ], "engines": { - "node": "^20.19.0 || >=22.12.0" + "node": ">= 10" } }, - "node_modules/@oxc-parser/binding-linux-x64-gnu": { - "version": "0.99.0", - "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-x64-gnu/-/binding-linux-x64-gnu-0.99.0.tgz", - "integrity": "sha512-YVJMfk5cFWB8i2/nIrbk6n15bFkMHqWnMIWkVx7r2KwpTxHyFMfu2IpeVKo1ITDSmt5nBrGdLHD36QRlu2nDLg==", + "node_modules/@node-rs/argon2-wasm32-wasi": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@node-rs/argon2-wasm32-wasi/-/argon2-wasm32-wasi-2.0.2.tgz", + "integrity": "sha512-U3PzLYKSQYzTERstgtHLd4ZTkOF9co57zTXT77r0cVUsleGZOrd6ut7rHzeWwoJSiHOVxxa0OhG1JVQeB7lLoQ==", "cpu": [ - "x64" + "wasm32" ], + "dev": true, "license": "MIT", "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": "^20.19.0 || >=22.12.0" - } - }, - "node_modules/@oxc-parser/binding-linux-x64-musl": { - "version": "0.99.0", - "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-x64-musl/-/binding-linux-x64-musl-0.99.0.tgz", - "integrity": "sha512-2+SDPrie5f90A1b9EirtVggOgsqtsYU5raZwkDYKyS1uvJzjqHCDhG/f4TwQxHmIc5YkczdQfwvN91lwmjsKYQ==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], + "dependencies": { + "@napi-rs/wasm-runtime": "^0.2.5" + }, "engines": { - "node": "^20.19.0 || >=22.12.0" + "node": ">=14.0.0" } }, - "node_modules/@oxc-parser/binding-wasm32-wasi": { - "version": "0.99.0", - "resolved": "https://registry.npmjs.org/@oxc-parser/binding-wasm32-wasi/-/binding-wasm32-wasi-0.99.0.tgz", - "integrity": "sha512-DKA4j0QerUWSMADziLM5sAyM7V53Fj95CV9SjP77bPfEfT7MnvFKnneaRMqPK1cpzjAGiQF52OBUIKyk0dwOQA==", - "cpu": [ - "wasm32" - ], + "node_modules/@node-rs/argon2-wasm32-wasi/node_modules/@napi-rs/wasm-runtime": { + "version": "0.2.12", + "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.12.tgz", + "integrity": "sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==", + "dev": true, "license": "MIT", "optional": true, "dependencies": { - "@napi-rs/wasm-runtime": "^1.0.7" - }, - "engines": { - "node": ">=14.0.0" + "@emnapi/core": "^1.4.3", + "@emnapi/runtime": "^1.4.3", + "@tybys/wasm-util": "^0.10.0" } }, - "node_modules/@oxc-parser/binding-win32-arm64-msvc": { - "version": "0.99.0", - "resolved": "https://registry.npmjs.org/@oxc-parser/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-0.99.0.tgz", - "integrity": "sha512-EaB3AvsxqdNUhh9FOoAxRZ2L4PCRwDlDb//QXItwyOJrX7XS+uGK9B1KEUV4FZ/7rDhHsWieLt5e07wl2Ti5AQ==", + "node_modules/@node-rs/argon2-win32-arm64-msvc": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@node-rs/argon2-win32-arm64-msvc/-/argon2-win32-arm64-msvc-2.0.2.tgz", + "integrity": "sha512-Eisd7/NM0m23ijrGr6xI2iMocdOuyl6gO27gfMfya4C5BODbUSP7ljKJ7LrA0teqZMdYHesRDzx36Js++/vhiQ==", "cpu": [ "arm64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "win32" ], "engines": { - "node": "^20.19.0 || >=22.12.0" + "node": ">= 10" } }, - "node_modules/@oxc-parser/binding-win32-x64-msvc": { - "version": "0.99.0", - "resolved": "https://registry.npmjs.org/@oxc-parser/binding-win32-x64-msvc/-/binding-win32-x64-msvc-0.99.0.tgz", - "integrity": "sha512-sJN1Q8h7ggFOyDn0zsHaXbP/MklAVUvhrbq0LA46Qum686P3SZQHjbATqJn9yaVEvaSKXCshgl0vQ1gWkGgpcQ==", + "node_modules/@node-rs/argon2-win32-ia32-msvc": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@node-rs/argon2-win32-ia32-msvc/-/argon2-win32-ia32-msvc-2.0.2.tgz", + "integrity": "sha512-GsE2ezwAYwh72f9gIjbGTZOf4HxEksb5M2eCaj+Y5rGYVwAdt7C12Q2e9H5LRYxWcFvLH4m4jiSZpQQ4upnPAQ==", "cpu": [ - "x64" + "ia32" ], + "dev": true, "license": "MIT", "optional": true, "os": [ "win32" ], "engines": { - "node": "^20.19.0 || >=22.12.0" - } - }, - "node_modules/@oxc-project/types": { - "version": "0.99.0", - "resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.99.0.tgz", - "integrity": "sha512-LLDEhXB7g1m5J+woRSgfKsFPS3LhR9xRhTeIoEBm5WrkwMxn6eZ0Ld0c0K5eHB57ChZX6I3uSmmLjZ8pcjlRcw==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/Boshen" - } - }, - "node_modules/@polka/url": { - "version": "1.0.0-next.29", - "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.29.tgz", - "integrity": "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==", - "devOptional": true, - "license": "MIT" - }, - "node_modules/@rollup/plugin-commonjs": { - "version": "28.0.6", - "resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-28.0.6.tgz", - "integrity": "sha512-XSQB1K7FUU5QP+3lOQmVCE3I0FcbbNvmNT4VJSj93iUjayaARrTQeoRdiYQoftAJBLrR9t2agwAd3ekaTgHNlw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@rollup/pluginutils": "^5.0.1", - "commondir": "^1.0.1", - "estree-walker": "^2.0.2", - "fdir": "^6.2.0", - "is-reference": "1.2.1", - "magic-string": "^0.30.3", - "picomatch": "^4.0.2" - }, - "engines": { - "node": ">=16.0.0 || 14 >= 14.17" - }, - "peerDependencies": { - "rollup": "^2.68.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/plugin-commonjs/node_modules/estree-walker": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", - "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", - "dev": true, - "license": "MIT" - }, - "node_modules/@rollup/plugin-commonjs/node_modules/is-reference": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-1.2.1.tgz", - "integrity": "sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/estree": "*" - } - }, - "node_modules/@rollup/plugin-json": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/@rollup/plugin-json/-/plugin-json-6.1.0.tgz", - "integrity": "sha512-EGI2te5ENk1coGeADSIwZ7G2Q8CJS2sF120T7jLw4xFw9n7wIOXHo+kIYRAoVpJAN+kmqZSoO3Fp4JtoNF4ReA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@rollup/pluginutils": "^5.1.0" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/plugin-node-resolve": { - "version": "16.0.3", - "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-16.0.3.tgz", - "integrity": "sha512-lUYM3UBGuM93CnMPG1YocWu7X802BrNF3jW2zny5gQyLQgRFJhV1Sq0Zi74+dh/6NBx1DxFC4b4GXg9wUCG5Qg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@rollup/pluginutils": "^5.0.1", - "@types/resolve": "1.20.2", - "deepmerge": "^4.2.2", - "is-module": "^1.0.0", - "resolve": "^1.22.1" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^2.78.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/pluginutils": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.3.0.tgz", - "integrity": "sha512-5EdhGZtnu3V88ces7s53hhfK5KSASnJZv8Lulpc04cWO3REESroJXg73DFsOmgbU2BhwV0E20bu2IDZb3VKW4Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "estree-walker": "^2.0.2", - "picomatch": "^4.0.2" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } + "node": ">= 10" } }, - "node_modules/@rollup/pluginutils/node_modules/estree-walker": { + "node_modules/@node-rs/argon2-win32-x64-msvc": { "version": "2.0.2", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", - "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", - "dev": true, - "license": "MIT" - }, - "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.50.1.tgz", - "integrity": "sha512-HJXwzoZN4eYTdD8bVV22DN8gsPCAj3V20NHKOs8ezfXanGpmVPR7kalUHd+Y31IJp9stdB87VKPFbsGY3H/2ag==", + "resolved": "https://registry.npmjs.org/@node-rs/argon2-win32-x64-msvc/-/argon2-win32-x64-msvc-2.0.2.tgz", + "integrity": "sha512-cJxWXanH4Ew9CfuZ4IAEiafpOBCe97bzoKowHCGk5lG/7kR4WF/eknnBlHW9m8q7t10mKq75kruPLtbSDqgRTw==", "cpu": [ - "arm" + "x64" ], + "dev": true, "license": "MIT", "optional": true, "os": [ - "android" - ] + "win32" + ], + "engines": { + "node": ">= 10" + } }, - "node_modules/@rollup/rollup-android-arm64": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.50.1.tgz", - "integrity": "sha512-PZlsJVcjHfcH53mOImyt3bc97Ep3FJDXRpk9sMdGX0qgLmY0EIWxCag6EigerGhLVuL8lDVYNnSo8qnTElO4xw==", + "node_modules/@oxc-parser/binding-android-arm64": { + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-android-arm64/-/binding-android-arm64-0.99.0.tgz", + "integrity": "sha512-V4jhmKXgQQdRnm73F+r3ZY4pUEsijQeSraFeaCGng7abSNJGs76X6l82wHnmjLGFAeY00LWtjcELs7ZmbJ9+lA==", "cpu": [ "arm64" ], @@ -1735,12 +1672,15 @@ "optional": true, "os": [ "android" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.50.1.tgz", - "integrity": "sha512-xc6i2AuWh++oGi4ylOFPmzJOEeAa2lJeGUGb4MudOtgfyyjr4UPNK+eEWTPLvmPJIY/pgw6ssFIox23SyrkkJw==", + "node_modules/@oxc-parser/binding-darwin-arm64": { + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-darwin-arm64/-/binding-darwin-arm64-0.99.0.tgz", + "integrity": "sha512-Rp41nf9zD5FyLZciS9l1GfK8PhYqrD5kEGxyTOA2esTLeAy37rZxetG2E3xteEolAkeb2WDkVrlxPtibeAncMg==", "cpu": [ "arm64" ], @@ -1748,12 +1688,15 @@ "optional": true, "os": [ "darwin" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.50.1.tgz", - "integrity": "sha512-2ofU89lEpDYhdLAbRdeyz/kX3Y2lpYc6ShRnDjY35bZhd2ipuDMDi6ZTQ9NIag94K28nFMofdnKeHR7BT0CATw==", + "node_modules/@oxc-parser/binding-darwin-x64": { + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-darwin-x64/-/binding-darwin-x64-0.99.0.tgz", + "integrity": "sha512-WVonp40fPPxo5Gs0POTI57iEFv485TvNKOHMwZRhigwZRhZY2accEAkYIhei9eswF4HN5B44Wybkz7Gd1Qr/5Q==", "cpu": [ "x64" ], @@ -1761,25 +1704,15 @@ "optional": true, "os": [ "darwin" - ] - }, - "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.50.1.tgz", - "integrity": "sha512-wOsE6H2u6PxsHY/BeFHA4VGQN3KUJFZp7QJBmDYI983fgxq5Th8FDkVuERb2l9vDMs1D5XhOrhBrnqcEY6l8ZA==", - "cpu": [ - "arm64" ], - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.50.1.tgz", - "integrity": "sha512-A/xeqaHTlKbQggxCqispFAcNjycpUEHP52mwMQZUNqDUJFFYtPHCXS1VAG29uMlDzIVr+i00tSFWFLivMcoIBQ==", + "node_modules/@oxc-parser/binding-freebsd-x64": { + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-freebsd-x64/-/binding-freebsd-x64-0.99.0.tgz", + "integrity": "sha512-H30bjOOttPmG54gAqu6+HzbLEzuNOYO2jZYrIq4At+NtLJwvNhXz28Hf5iEAFZIH/4hMpLkM4VN7uc+5UlNW3Q==", "cpu": [ "x64" ], @@ -1787,12 +1720,15 @@ "optional": true, "os": [ "freebsd" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.50.1.tgz", - "integrity": "sha512-54v4okehwl5TaSIkpp97rAHGp7t3ghinRd/vyC1iXqXMfjYUTm7TfYmCzXDoHUPTTf36L8pr0E7YsD3CfB3ZDg==", + "node_modules/@oxc-parser/binding-linux-arm-gnueabihf": { + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-0.99.0.tgz", + "integrity": "sha512-0Z/Th0SYqzSRDPs6tk5lQdW0i73UCupnim3dgq2oW0//UdLonV/5wIZCArfKGC7w9y4h8TxgXpgtIyD1kKzzlQ==", "cpu": [ "arm" ], @@ -1800,12 +1736,15 @@ "optional": true, "os": [ "linux" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.50.1.tgz", - "integrity": "sha512-p/LaFyajPN/0PUHjv8TNyxLiA7RwmDoVY3flXHPSzqrGcIp/c2FjwPPP5++u87DGHtw+5kSH5bCJz0mvXngYxw==", + "node_modules/@oxc-parser/binding-linux-arm-musleabihf": { + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-0.99.0.tgz", + "integrity": "sha512-xo0wqNd5bpbzQVNpAIFbHk1xa+SaS/FGBABCd942SRTnrpxl6GeDj/s1BFaGcTl8MlwlKVMwOcyKrw/2Kdfquw==", "cpu": [ "arm" ], @@ -1813,12 +1752,15 @@ "optional": true, "os": [ "linux" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.50.1.tgz", - "integrity": "sha512-2AbMhFFkTo6Ptna1zO7kAXXDLi7H9fGTbVaIq2AAYO7yzcAsuTNWPHhb2aTA6GPiP+JXh85Y8CiS54iZoj4opw==", + "node_modules/@oxc-parser/binding-linux-arm64-gnu": { + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-0.99.0.tgz", + "integrity": "sha512-u26I6LKoLTPTd4Fcpr0aoAtjnGf5/ulMllo+QUiBhupgbVCAlaj4RyXH/mvcjcsl2bVBv9E/gYJZz2JjxQWXBA==", "cpu": [ "arm64" ], @@ -1826,12 +1768,15 @@ "optional": true, "os": [ "linux" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.50.1.tgz", - "integrity": "sha512-Cgef+5aZwuvesQNw9eX7g19FfKX5/pQRIyhoXLCiBOrWopjo7ycfB292TX9MDcDijiuIJlx1IzJz3IoCPfqs9w==", + "node_modules/@oxc-parser/binding-linux-arm64-musl": { + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-arm64-musl/-/binding-linux-arm64-musl-0.99.0.tgz", + "integrity": "sha512-qhftDo2D37SqCEl3ZTa367NqWSZNb1Ddp34CTmShLKFrnKdNiUn55RdokLnHtf1AL5ssaQlYDwBECX7XiBWOhw==", "cpu": [ "arm64" ], @@ -1839,38 +1784,325 @@ "optional": true, "os": [ "linux" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-linux-loongarch64-gnu": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.50.1.tgz", - "integrity": "sha512-RPhTwWMzpYYrHrJAS7CmpdtHNKtt2Ueo+BlLBjfZEhYBhK00OsEqM08/7f+eohiF6poe0YRDDd8nAvwtE/Y62Q==", + "node_modules/@oxc-parser/binding-linux-riscv64-gnu": { + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-0.99.0.tgz", + "integrity": "sha512-zxn/xkf519f12FKkpL5XwJipsylfSSnm36h6c1zBDTz4fbIDMGyIhHfWfwM7uUmHo9Aqw1pLxFpY39Etv398+Q==", "cpu": [ - "loong64" + "riscv64" ], "license": "MIT", "optional": true, "os": [ "linux" - ] + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } }, - "node_modules/@rollup/rollup-linux-ppc64-gnu": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.50.1.tgz", - "integrity": "sha512-eSGMVQw9iekut62O7eBdbiccRguuDgiPMsw++BVUg+1K7WjZXHOg/YOT9SWMzPZA+w98G+Fa1VqJgHZOHHnY0Q==", + "node_modules/@oxc-parser/binding-linux-s390x-gnu": { + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-0.99.0.tgz", + "integrity": "sha512-Y1eSDKDS5E4IVC7Oxw+NbYAKRmJPMJTIjW+9xOWwteDHkFqpocKe0USxog+Q1uhzalD9M0p9eXWEWdGQCMDBMQ==", "cpu": [ - "ppc64" + "s390x" ], "license": "MIT", "optional": true, "os": [ "linux" - ] - }, + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-linux-x64-gnu": { + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-x64-gnu/-/binding-linux-x64-gnu-0.99.0.tgz", + "integrity": "sha512-YVJMfk5cFWB8i2/nIrbk6n15bFkMHqWnMIWkVx7r2KwpTxHyFMfu2IpeVKo1ITDSmt5nBrGdLHD36QRlu2nDLg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-linux-x64-musl": { + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-linux-x64-musl/-/binding-linux-x64-musl-0.99.0.tgz", + "integrity": "sha512-2+SDPrie5f90A1b9EirtVggOgsqtsYU5raZwkDYKyS1uvJzjqHCDhG/f4TwQxHmIc5YkczdQfwvN91lwmjsKYQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-wasm32-wasi": { + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-wasm32-wasi/-/binding-wasm32-wasi-0.99.0.tgz", + "integrity": "sha512-DKA4j0QerUWSMADziLM5sAyM7V53Fj95CV9SjP77bPfEfT7MnvFKnneaRMqPK1cpzjAGiQF52OBUIKyk0dwOQA==", + "cpu": [ + "wasm32" + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^1.0.7" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@oxc-parser/binding-win32-arm64-msvc": { + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-0.99.0.tgz", + "integrity": "sha512-EaB3AvsxqdNUhh9FOoAxRZ2L4PCRwDlDb//QXItwyOJrX7XS+uGK9B1KEUV4FZ/7rDhHsWieLt5e07wl2Ti5AQ==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-parser/binding-win32-x64-msvc": { + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@oxc-parser/binding-win32-x64-msvc/-/binding-win32-x64-msvc-0.99.0.tgz", + "integrity": "sha512-sJN1Q8h7ggFOyDn0zsHaXbP/MklAVUvhrbq0LA46Qum686P3SZQHjbATqJn9yaVEvaSKXCshgl0vQ1gWkGgpcQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "node_modules/@oxc-project/types": { + "version": "0.99.0", + "resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.99.0.tgz", + "integrity": "sha512-LLDEhXB7g1m5J+woRSgfKsFPS3LhR9xRhTeIoEBm5WrkwMxn6eZ0Ld0c0K5eHB57ChZX6I3uSmmLjZ8pcjlRcw==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/Boshen" + } + }, + "node_modules/@polka/url": { + "version": "1.0.0-next.29", + "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.29.tgz", + "integrity": "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==", + "devOptional": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.59.0.tgz", + "integrity": "sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.59.0.tgz", + "integrity": "sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.59.0.tgz", + "integrity": "sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.59.0.tgz", + "integrity": "sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.59.0.tgz", + "integrity": "sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.59.0.tgz", + "integrity": "sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.59.0.tgz", + "integrity": "sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.59.0.tgz", + "integrity": "sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.59.0.tgz", + "integrity": "sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.59.0.tgz", + "integrity": "sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.59.0.tgz", + "integrity": "sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==", + "cpu": [ + "loong64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.59.0.tgz", + "integrity": "sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==", + "cpu": [ + "loong64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.59.0.tgz", + "integrity": "sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.59.0.tgz", + "integrity": "sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.50.1.tgz", - "integrity": "sha512-S208ojx8a4ciIPrLgazF6AgdcNJzQE4+S9rsmOmDJkusvctii+ZvEuIC4v/xFqzbuP8yDjn73oBlNDgF6YGSXQ==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.59.0.tgz", + "integrity": "sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==", "cpu": [ "riscv64" ], @@ -1881,9 +2113,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.50.1.tgz", - "integrity": "sha512-3Ag8Ls1ggqkGUvSZWYcdgFwriy2lWo+0QlYgEFra/5JGtAd6C5Hw59oojx1DeqcA2Wds2ayRgvJ4qxVTzCHgzg==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.59.0.tgz", + "integrity": "sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==", "cpu": [ "riscv64" ], @@ -1894,9 +2126,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.50.1.tgz", - "integrity": "sha512-t9YrKfaxCYe7l7ldFERE1BRg/4TATxIg+YieHQ966jwvo7ddHJxPj9cNFWLAzhkVsbBvNA4qTbPVNsZKBO4NSg==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.59.0.tgz", + "integrity": "sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==", "cpu": [ "s390x" ], @@ -1907,9 +2139,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.50.1.tgz", - "integrity": "sha512-MCgtFB2+SVNuQmmjHf+wfI4CMxy3Tk8XjA5Z//A0AKD7QXUYFMQcns91K6dEHBvZPCnhJSyDWLApk40Iq/H3tA==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.59.0.tgz", + "integrity": "sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==", "cpu": [ "x64" ], @@ -1920,9 +2152,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.50.1.tgz", - "integrity": "sha512-nEvqG+0jeRmqaUMuwzlfMKwcIVffy/9KGbAGyoa26iu6eSngAYQ512bMXuqqPrlTyfqdlB9FVINs93j534UJrg==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.59.0.tgz", + "integrity": "sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==", "cpu": [ "x64" ], @@ -1932,10 +2164,23 @@ "linux" ] }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.59.0.tgz", + "integrity": "sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, "node_modules/@rollup/rollup-openharmony-arm64": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.50.1.tgz", - "integrity": "sha512-RDsLm+phmT3MJd9SNxA9MNuEAO/J2fhW8GXk62G/B4G7sLVumNFbRwDL6v5NrESb48k+QMqdGbHgEtfU0LCpbA==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.59.0.tgz", + "integrity": "sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==", "cpu": [ "arm64" ], @@ -1946,9 +2191,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.50.1.tgz", - "integrity": "sha512-hpZB/TImk2FlAFAIsoElM3tLzq57uxnGYwplg6WDyAxbYczSi8O2eQ+H2Lx74504rwKtZ3N2g4bCUkiamzS6TQ==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.59.0.tgz", + "integrity": "sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==", "cpu": [ "arm64" ], @@ -1959,9 +2204,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.50.1.tgz", - "integrity": "sha512-SXjv8JlbzKM0fTJidX4eVsH+Wmnp0/WcD8gJxIZyR6Gay5Qcsmdbi9zVtnbkGPG8v2vMR1AD06lGWy5FLMcG7A==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.59.0.tgz", + "integrity": "sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==", "cpu": [ "ia32" ], @@ -1971,10 +2216,23 @@ "win32" ] }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.59.0.tgz", + "integrity": "sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.50.1.tgz", - "integrity": "sha512-StxAO/8ts62KZVRAm4JZYq9+NqNsV7RvimNK+YM7ry//zebEH6meuugqW/P5OFUCjyQgui+9fUxT6d5NShvMvA==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.59.0.tgz", + "integrity": "sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==", "cpu": [ "x64" ], @@ -1985,9 +2243,9 @@ ] }, "node_modules/@ryanatkn/eslint-config": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/@ryanatkn/eslint-config/-/eslint-config-0.9.0.tgz", - "integrity": "sha512-RF42tZfJo2CYE4E3clQRBm9bVHMpL5ErR3HfWaxbiuL1aGraehegsiXMsr1L4BiKpSP55ZO8vvCr1ibUaSRIrQ==", + "version": "0.10.1", + "resolved": "https://registry.npmjs.org/@ryanatkn/eslint-config/-/eslint-config-0.10.1.tgz", + "integrity": "sha512-fHQ5PyFriflVj/fiF9m4SoUnipyK/Of522HL3+YA5TD2lKdJueA5c4wxucxkuFanuZ1FvsCBjGN/wMHO94HNHA==", "dev": true, "license": "Unlicense", "dependencies": { @@ -2006,37 +2264,21 @@ } }, "node_modules/@standard-schema/spec": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz", - "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz", + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", "devOptional": true, "license": "MIT" }, "node_modules/@sveltejs/acorn-typescript": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@sveltejs/acorn-typescript/-/acorn-typescript-1.0.5.tgz", - "integrity": "sha512-IwQk4yfwLdibDlrXVE04jTZYlLnwsTT2PIOQQGNLWfjavGifnk1JD1LcZjZaBTRcxZu2FfPfNLOE04DSu9lqtQ==", + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@sveltejs/acorn-typescript/-/acorn-typescript-1.0.9.tgz", + "integrity": "sha512-lVJX6qEgs/4DOcRTpo56tmKzVPtoWAaVbL4hfO7t7NVwl9AAXzQR6cihesW1BmNMPl+bK6dreu2sOKBP2Q9CIA==", "license": "MIT", "peerDependencies": { "acorn": "^8.9.0" } }, - "node_modules/@sveltejs/adapter-node": { - "version": "5.4.0", - "resolved": "https://registry.npmjs.org/@sveltejs/adapter-node/-/adapter-node-5.4.0.tgz", - "integrity": "sha512-NMsrwGVPEn+J73zH83Uhss/hYYZN6zT3u31R3IHAn3MiKC3h8fjmIAhLfTSOeNHr5wPYfjjMg8E+1gyFgyrEcQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@rollup/plugin-commonjs": "^28.0.1", - "@rollup/plugin-json": "^6.1.0", - "@rollup/plugin-node-resolve": "^16.0.0", - "rollup": "^4.9.5" - }, - "peerDependencies": { - "@sveltejs/kit": "^2.4.0" - } - }, "node_modules/@sveltejs/adapter-static": { "version": "3.0.10", "resolved": "https://registry.npmjs.org/@sveltejs/adapter-static/-/adapter-static-3.0.10.tgz", @@ -2048,9 +2290,9 @@ } }, "node_modules/@sveltejs/kit": { - "version": "2.55.0", - "resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-2.55.0.tgz", - "integrity": "sha512-MdFRjevVxmAknf2NbaUkDF16jSIzXMWd4Nfah0Qp8TtQVoSp3bV4jKt8mX7z7qTUTWvgSaxtR0EG5WJf53gcuA==", + "version": "2.57.1", + "resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-2.57.1.tgz", + "integrity": "sha512-VRdSbB96cI1EnRh09CqmnQqP/YJvET5buj8S6k7CxaJqBJD4bw4fRKDjcarAj/eX9k2eHifQfDH8NtOh+ZxxPw==", "devOptional": true, "license": "MIT", "dependencies": { @@ -2077,7 +2319,7 @@ "@opentelemetry/api": "^1.0.0", "@sveltejs/vite-plugin-svelte": "^3.0.0 || ^4.0.0-next.1 || ^5.0.0 || ^6.0.0-next.0 || ^7.0.0", "svelte": "^4.0.0 || ^5.0.0-next.0", - "typescript": "^5.3.3", + "typescript": "^5.3.3 || ^6.0.0", "vite": "^5.0.3 || ^6.0.0 || ^7.0.0-beta.0 || ^8.0.0" }, "peerDependenciesMeta": { @@ -2111,13 +2353,13 @@ } }, "node_modules/@sveltejs/vite-plugin-svelte-inspector": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte-inspector/-/vite-plugin-svelte-inspector-5.0.1.tgz", - "integrity": "sha512-ubWshlMk4bc8mkwWbg6vNvCeT7lGQojE3ijDh3QTR6Zr/R+GXxsGbyH4PExEPpiFmqPhYiVSVmHBjUcVc1JIrA==", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte-inspector/-/vite-plugin-svelte-inspector-5.0.2.tgz", + "integrity": "sha512-TZzRTcEtZffICSAoZGkPSl6Etsj2torOVrx6Uw0KpXxrec9Gg6jFWQ60Q3+LmNGfZSxHRCZL7vXVZIWmuV50Ig==", "devOptional": true, "license": "MIT", "dependencies": { - "debug": "^4.4.1" + "obug": "^2.1.0" }, "engines": { "node": "^20.19 || ^22.12 || >=24" @@ -2163,6 +2405,13 @@ "devOptional": true, "license": "MIT" }, + "node_modules/@types/deno": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@types/deno/-/deno-2.5.0.tgz", + "integrity": "sha512-g8JS38vmc0S87jKsFzre+0ZyMOUDHPVokEJymSCRlL57h6f/FdKPWBXgdFh3Z8Ees9sz11qt9VWELU9Y9ZkiVw==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/estree": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", @@ -2177,22 +2426,15 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "24.10.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.1.tgz", - "integrity": "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==", + "version": "24.12.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.12.0.tgz", + "integrity": "sha512-GYDxsZi3ChgmckRT9HPU0WEhKLP08ev/Yfcq2AstjrDASOYCSXeyjDsHg4v5t4jOj7cyDX3vmprafKlWIG9MXQ==", "devOptional": true, "license": "MIT", "dependencies": { "undici-types": "~7.16.0" } }, - "node_modules/@types/resolve": { - "version": "1.20.2", - "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.2.tgz", - "integrity": "sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==", - "dev": true, - "license": "MIT" - }, "node_modules/@types/trusted-types": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", @@ -2200,21 +2442,20 @@ "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.48.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.48.1.tgz", - "integrity": "sha512-X63hI1bxl5ohelzr0LY5coufyl0LJNthld+abwxpCoo6Gq+hSqhKwci7MUWkXo67mzgUK6YFByhmaHmUcuBJmA==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.57.1.tgz", + "integrity": "sha512-Gn3aqnvNl4NGc6x3/Bqk1AOn0thyTU9bqDRhiRnUWezgvr2OnhYCWCgC8zXXRVqBsIL1pSDt7T9nJUe0oM0kDQ==", "dev": true, "license": "MIT", "dependencies": { - "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.48.1", - "@typescript-eslint/type-utils": "8.48.1", - "@typescript-eslint/utils": "8.48.1", - "@typescript-eslint/visitor-keys": "8.48.1", - "graphemer": "^1.4.0", - "ignore": "^7.0.0", + "@eslint-community/regexpp": "^4.12.2", + "@typescript-eslint/scope-manager": "8.57.1", + "@typescript-eslint/type-utils": "8.57.1", + "@typescript-eslint/utils": "8.57.1", + "@typescript-eslint/visitor-keys": "8.57.1", + "ignore": "^7.0.5", "natural-compare": "^1.4.0", - "ts-api-utils": "^2.1.0" + "ts-api-utils": "^2.4.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2224,8 +2465,8 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^8.48.1", - "eslint": "^8.57.0 || ^9.0.0", + "@typescript-eslint/parser": "^8.57.1", + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", "typescript": ">=4.8.4 <6.0.0" } }, @@ -2240,17 +2481,17 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "8.48.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.48.1.tgz", - "integrity": "sha512-PC0PDZfJg8sP7cmKe6L3QIL8GZwU5aRvUFedqSIpw3B+QjRSUZeeITC2M5XKeMXEzL6wccN196iy3JLwKNvDVA==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.57.1.tgz", + "integrity": "sha512-k4eNDan0EIMTT/dUKc/g+rsJ6wcHYhNPdY19VoX/EOtaAG8DLtKCykhrUnuHPYvinn5jhAPgD2Qw9hXBwrahsw==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/scope-manager": "8.48.1", - "@typescript-eslint/types": "8.48.1", - "@typescript-eslint/typescript-estree": "8.48.1", - "@typescript-eslint/visitor-keys": "8.48.1", - "debug": "^4.3.4" + "@typescript-eslint/scope-manager": "8.57.1", + "@typescript-eslint/types": "8.57.1", + "@typescript-eslint/typescript-estree": "8.57.1", + "@typescript-eslint/visitor-keys": "8.57.1", + "debug": "^4.4.3" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2260,20 +2501,20 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/@typescript-eslint/project-service": { - "version": "8.48.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.48.1.tgz", - "integrity": "sha512-HQWSicah4s9z2/HifRPQ6b6R7G+SBx64JlFQpgSSHWPKdvCZX57XCbszg/bapbRsOEv42q5tayTYcEFpACcX1w==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.57.1.tgz", + "integrity": "sha512-vx1F37BRO1OftsYlmG9xay1TqnjNVlqALymwWVuYTdo18XuKxtBpCj1QlzNIEHlvlB27osvXFWptYiEWsVdYsg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/tsconfig-utils": "^8.48.1", - "@typescript-eslint/types": "^8.48.1", - "debug": "^4.3.4" + "@typescript-eslint/tsconfig-utils": "^8.57.1", + "@typescript-eslint/types": "^8.57.1", + "debug": "^4.4.3" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2287,14 +2528,14 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.48.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.48.1.tgz", - "integrity": "sha512-rj4vWQsytQbLxC5Bf4XwZ0/CKd362DkWMUkviT7DCS057SK64D5lH74sSGzhI6PDD2HCEq02xAP9cX68dYyg1w==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.57.1.tgz", + "integrity": "sha512-hs/QcpCwlwT2L5S+3fT6gp0PabyGk4Q0Rv2doJXA0435/OpnSR3VRgvrp8Xdoc3UAYSg9cyUjTeFXZEPg/3OKg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.48.1", - "@typescript-eslint/visitor-keys": "8.48.1" + "@typescript-eslint/types": "8.57.1", + "@typescript-eslint/visitor-keys": "8.57.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2305,9 +2546,9 @@ } }, "node_modules/@typescript-eslint/tsconfig-utils": { - "version": "8.48.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.48.1.tgz", - "integrity": "sha512-k0Jhs4CpEffIBm6wPaCXBAD7jxBtrHjrSgtfCjUvPp9AZ78lXKdTR8fxyZO5y4vWNlOvYXRtngSZNSn+H53Jkw==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.57.1.tgz", + "integrity": "sha512-0lgOZB8cl19fHO4eI46YUx2EceQqhgkPSuCGLlGi79L2jwYY1cxeYc1Nae8Aw1xjgW3PKVDLlr3YJ6Bxx8HkWg==", "dev": true, "license": "MIT", "engines": { @@ -2322,17 +2563,17 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.48.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.48.1.tgz", - "integrity": "sha512-1jEop81a3LrJQLTf/1VfPQdhIY4PlGDBc/i67EVWObrtvcziysbLN3oReexHOM6N3jyXgCrkBsZpqwH0hiDOQg==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.57.1.tgz", + "integrity": "sha512-+Bwwm0ScukFdyoJsh2u6pp4S9ktegF98pYUU0hkphOOqdMB+1sNQhIz8y5E9+4pOioZijrkfNO/HUJVAFFfPKA==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.48.1", - "@typescript-eslint/typescript-estree": "8.48.1", - "@typescript-eslint/utils": "8.48.1", - "debug": "^4.3.4", - "ts-api-utils": "^2.1.0" + "@typescript-eslint/types": "8.57.1", + "@typescript-eslint/typescript-estree": "8.57.1", + "@typescript-eslint/utils": "8.57.1", + "debug": "^4.4.3", + "ts-api-utils": "^2.4.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2342,15 +2583,14 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/@typescript-eslint/types": { - "version": "8.48.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.48.1.tgz", - "integrity": "sha512-+fZ3LZNeiELGmimrujsDCT4CRIbq5oXdHe7chLiW8qzqyPMnn1puNstCrMNVAqwcl2FdIxkuJ4tOs/RFDBVc/Q==", - "dev": true, + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.57.1.tgz", + "integrity": "sha512-S29BOBPJSFUiblEl6RzPPjJt6w25A6XsBqRVDt53tA/tlL8q7ceQNZHTjPeONt/3S7KRI4quk+yP9jK2WjBiPQ==", "license": "MIT", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2361,21 +2601,21 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.48.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.48.1.tgz", - "integrity": "sha512-/9wQ4PqaefTK6POVTjJaYS0bynCgzh6ClJHGSBj06XEHjkfylzB+A3qvyaXnErEZSaxhIo4YdyBgq6j4RysxDg==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.57.1.tgz", + "integrity": "sha512-ybe2hS9G6pXpqGtPli9Gx9quNV0TWLOmh58ADlmZe9DguLq0tiAKVjirSbtM1szG6+QH6rVXyU6GTLQbWnMY+g==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/project-service": "8.48.1", - "@typescript-eslint/tsconfig-utils": "8.48.1", - "@typescript-eslint/types": "8.48.1", - "@typescript-eslint/visitor-keys": "8.48.1", - "debug": "^4.3.4", - "minimatch": "^9.0.4", - "semver": "^7.6.0", + "@typescript-eslint/project-service": "8.57.1", + "@typescript-eslint/tsconfig-utils": "8.57.1", + "@typescript-eslint/types": "8.57.1", + "@typescript-eslint/visitor-keys": "8.57.1", + "debug": "^4.4.3", + "minimatch": "^10.2.2", + "semver": "^7.7.3", "tinyglobby": "^0.2.15", - "ts-api-utils": "^2.1.0" + "ts-api-utils": "^2.4.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2388,43 +2628,56 @@ "typescript": ">=4.8.4 <6.0.0" } }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/balanced-match": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz", + "integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "18 || 20 || >=22" + } + }, "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.4.tgz", + "integrity": "sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==", "dev": true, "license": "MIT", "dependencies": { - "balanced-match": "^1.0.0" + "balanced-match": "^4.0.2" + }, + "engines": { + "node": "18 || 20 || >=22" } }, "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "version": "10.2.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz", + "integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==", "dev": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "dependencies": { - "brace-expansion": "^2.0.1" + "brace-expansion": "^5.0.2" }, "engines": { - "node": ">=16 || 14 >=14.17" + "node": "18 || 20 || >=22" }, "funding": { "url": "https://github.com/sponsors/isaacs" } }, "node_modules/@typescript-eslint/utils": { - "version": "8.48.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.48.1.tgz", - "integrity": "sha512-fAnhLrDjiVfey5wwFRwrweyRlCmdz5ZxXz2G/4cLn0YDLjTapmN4gcCsTBR1N2rWnZSDeWpYtgLDsJt+FpmcwA==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.57.1.tgz", + "integrity": "sha512-XUNSJ/lEVFttPMMoDVA2r2bwrl8/oPx8cURtczkSEswY5T3AeLmCy+EKWQNdL4u0MmAHOjcWrqJp2cdvgjn8dQ==", "dev": true, "license": "MIT", "dependencies": { - "@eslint-community/eslint-utils": "^4.7.0", - "@typescript-eslint/scope-manager": "8.48.1", - "@typescript-eslint/types": "8.48.1", - "@typescript-eslint/typescript-estree": "8.48.1" + "@eslint-community/eslint-utils": "^4.9.1", + "@typescript-eslint/scope-manager": "8.57.1", + "@typescript-eslint/types": "8.57.1", + "@typescript-eslint/typescript-estree": "8.57.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2434,19 +2687,19 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.48.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.48.1.tgz", - "integrity": "sha512-BmxxndzEWhE4TIEEMBs8lP3MBWN3jFPs/p6gPm/wkv02o41hI6cq9AuSmGAaTTHPtA1FTi2jBre4A9rm5ZmX+Q==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.57.1.tgz", + "integrity": "sha512-YWnmJkXbofiz9KbnbbwuA2rpGkFPLbAIetcCNO6mJ8gdhdZ/v7WDXsoGFAJuM6ikUFKTlSQnjWnVO4ux+UzS6A==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.48.1", - "eslint-visitor-keys": "^4.2.1" + "@typescript-eslint/types": "8.57.1", + "eslint-visitor-keys": "^5.0.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2456,18 +2709,31 @@ "url": "https://opencollective.com/typescript-eslint" } }, + "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.1.tgz", + "integrity": "sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^20.19.0 || ^22.13.0 || >=24" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, "node_modules/@vitest/expect": { - "version": "4.0.15", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.0.15.tgz", - "integrity": "sha512-Gfyva9/GxPAWXIWjyGDli9O+waHDC0Q0jaLdFP1qPAUUfo1FEXPXUfUkp3eZA0sSq340vPycSyOlYUeM15Ft1w==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.1.0.tgz", + "integrity": "sha512-EIxG7k4wlWweuCLG9Y5InKFwpMEOyrMb6ZJ1ihYu02LVj/bzUwn2VMU+13PinsjRW75XnITeFrQBMH5+dLvCDA==", "devOptional": true, "license": "MIT", "dependencies": { - "@standard-schema/spec": "^1.0.0", + "@standard-schema/spec": "^1.1.0", "@types/chai": "^5.2.2", - "@vitest/spy": "4.0.15", - "@vitest/utils": "4.0.15", - "chai": "^6.2.1", + "@vitest/spy": "4.1.0", + "@vitest/utils": "4.1.0", + "chai": "^6.2.2", "tinyrainbow": "^3.0.3" }, "funding": { @@ -2475,13 +2741,13 @@ } }, "node_modules/@vitest/mocker": { - "version": "4.0.15", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.0.15.tgz", - "integrity": "sha512-CZ28GLfOEIFkvCFngN8Sfx5h+Se0zN+h4B7yOsPVCcgtiO7t5jt9xQh2E1UkFep+eb9fjyMfuC5gBypwb07fvQ==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.1.0.tgz", + "integrity": "sha512-evxREh+Hork43+Y4IOhTo+h5lGmVRyjqI739Rz4RlUPqwrkFFDF6EMvOOYjTx4E8Tl6gyCLRL8Mu7Ry12a13Tw==", "devOptional": true, "license": "MIT", "dependencies": { - "@vitest/spy": "4.0.15", + "@vitest/spy": "4.1.0", "estree-walker": "^3.0.3", "magic-string": "^0.30.21" }, @@ -2490,7 +2756,7 @@ }, "peerDependencies": { "msw": "^2.4.9", - "vite": "^6.0.0 || ^7.0.0-0" + "vite": "^6.0.0 || ^7.0.0 || ^8.0.0-0" }, "peerDependenciesMeta": { "msw": { @@ -2502,9 +2768,9 @@ } }, "node_modules/@vitest/pretty-format": { - "version": "4.0.15", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.0.15.tgz", - "integrity": "sha512-SWdqR8vEv83WtZcrfLNqlqeQXlQLh2iilO1Wk1gv4eiHKjEzvgHb2OVc3mIPyhZE6F+CtfYjNlDJwP5MN6Km7A==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.1.0.tgz", + "integrity": "sha512-3RZLZlh88Ib0J7NQTRATfc/3ZPOnSUn2uDBUoGNn5T36+bALixmzphN26OUD3LRXWkJu4H0s5vvUeqBiw+kS0A==", "devOptional": true, "license": "MIT", "dependencies": { @@ -2515,13 +2781,13 @@ } }, "node_modules/@vitest/runner": { - "version": "4.0.15", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.0.15.tgz", - "integrity": "sha512-+A+yMY8dGixUhHmNdPUxOh0la6uVzun86vAbuMT3hIDxMrAOmn5ILBHm8ajrqHE0t8R9T1dGnde1A5DTnmi3qw==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.1.0.tgz", + "integrity": "sha512-Duvx2OzQ7d6OjchL+trw+aSrb9idh7pnNfxrklo14p3zmNL4qPCDeIJAK+eBKYjkIwG96Bc6vYuxhqDXQOWpoQ==", "devOptional": true, "license": "MIT", "dependencies": { - "@vitest/utils": "4.0.15", + "@vitest/utils": "4.1.0", "pathe": "^2.0.3" }, "funding": { @@ -2529,13 +2795,14 @@ } }, "node_modules/@vitest/snapshot": { - "version": "4.0.15", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.0.15.tgz", - "integrity": "sha512-A7Ob8EdFZJIBjLjeO0DZF4lqR6U7Ydi5/5LIZ0xcI+23lYlsYJAfGn8PrIWTYdZQRNnSRlzhg0zyGu37mVdy5g==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.1.0.tgz", + "integrity": "sha512-0Vy9euT1kgsnj1CHttwi9i9o+4rRLEaPRSOJ5gyv579GJkNpgJK+B4HSv/rAWixx2wdAFci1X4CEPjiu2bXIMg==", "devOptional": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "4.0.15", + "@vitest/pretty-format": "4.1.0", + "@vitest/utils": "4.1.0", "magic-string": "^0.30.21", "pathe": "^2.0.3" }, @@ -2544,9 +2811,9 @@ } }, "node_modules/@vitest/spy": { - "version": "4.0.15", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.0.15.tgz", - "integrity": "sha512-+EIjOJmnY6mIfdXtE/bnozKEvTC4Uczg19yeZ2vtCz5Yyb0QQ31QWVQ8hswJ3Ysx/K2EqaNsVanjr//2+P3FHw==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.1.0.tgz", + "integrity": "sha512-pz77k+PgNpyMDv2FV6qmk5ZVau6c3R8HC8v342T2xlFxQKTrSeYw9waIJG8KgV9fFwAtTu4ceRzMivPTH6wSxw==", "devOptional": true, "license": "MIT", "funding": { @@ -2554,13 +2821,14 @@ } }, "node_modules/@vitest/utils": { - "version": "4.0.15", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.0.15.tgz", - "integrity": "sha512-HXjPW2w5dxhTD0dLwtYHDnelK3j8sR8cWIaLxr22evTyY6q8pRCjZSmhRWVjBaOVXChQd6AwMzi9pucorXCPZA==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.1.0.tgz", + "integrity": "sha512-XfPXT6a8TZY3dcGY8EdwsBulFCIw+BeeX0RZn2x/BtiY/75YGh8FeWGG8QISN/WhaqSrE2OrlDgtF8q5uhOTmw==", "devOptional": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "4.0.15", + "@vitest/pretty-format": "4.1.0", + "convert-source-map": "^2.0.0", "tinyrainbow": "^3.0.3" }, "funding": { @@ -2568,19 +2836,28 @@ } }, "node_modules/@webref/css": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/@webref/css/-/css-8.2.0.tgz", - "integrity": "sha512-BSTwlyJwR2LotmT6GTmO5WIPPORr+4lU39vDBWNVEFnLo9w3XYCuHU4lmmd8OY5Zj9ykadg6pfJ/1cFHxzyr3w==", + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/@webref/css/-/css-8.4.1.tgz", + "integrity": "sha512-8DTncc0dhWJ4lVbi9rhLVyMNm+YEYrsFLRbdjgMxPupjNHcAdXiT1s4ZWJXzN4ckUvYQKTjLJKtZWc6tsR4FIQ==", "dev": true, "license": "MIT", "peerDependencies": { - "css-tree": "^3.1.0" + "css-tree": "^3.2.1" } }, + "node_modules/@xterm/xterm": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@xterm/xterm/-/xterm-6.0.0.tgz", + "integrity": "sha512-TQwDdQGtwwDt+2cgKDLn0IRaSxYu1tSUjgKarSDkUM0ZNiSRXFpjxEsvc/Zgc5kq5omJ+V0a8/kIM2WD3sMOYg==", + "license": "MIT", + "workspaces": [ + "addons/*" + ] + }, "node_modules/acorn": { - "version": "8.15.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", - "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "version": "8.16.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz", + "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==", "license": "MIT", "bin": { "acorn": "bin/acorn" @@ -2610,9 +2887,9 @@ } }, "node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.14.0.tgz", + "integrity": "sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==", "dev": true, "license": "MIT", "dependencies": { @@ -2716,9 +2993,9 @@ } }, "node_modules/chai": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/chai/-/chai-6.2.1.tgz", - "integrity": "sha512-p4Z49OGG5W/WBCPSS/dH3jQ73kD6tiMmUM+bckNK6Jr5JHMG3k9bg/BvKR8lKmtVBKmOiuVaV2ws8s9oSbwysg==", + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/chai/-/chai-6.2.2.tgz", + "integrity": "sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==", "devOptional": true, "license": "MIT", "engines": { @@ -2743,16 +3020,15 @@ } }, "node_modules/chokidar": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", - "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", - "dev": true, + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-5.0.0.tgz", + "integrity": "sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw==", "license": "MIT", "dependencies": { - "readdirp": "^4.0.1" + "readdirp": "^5.0.0" }, "engines": { - "node": ">= 14.16.0" + "node": ">= 20.19.0" }, "funding": { "url": "https://paulmillr.com/funding/" @@ -2787,13 +3063,6 @@ "dev": true, "license": "MIT" }, - "node_modules/commondir": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", - "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==", - "dev": true, - "license": "MIT" - }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -2801,6 +3070,13 @@ "dev": true, "license": "MIT" }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "devOptional": true, + "license": "MIT" + }, "node_modules/cookie": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", @@ -2827,14 +3103,14 @@ } }, "node_modules/css-tree": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-3.1.0.tgz", - "integrity": "sha512-0eW44TGN5SQXU1mWSkKwFstI/22X2bG1nYzZTYMAWjylYURhse752YgbE4Cx46AC+bAvI+/dYTPRk1LqSUnu6w==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-3.2.1.tgz", + "integrity": "sha512-X7sjQzceUhu1u7Y/ylrRZFU2FS6LRiFVp6rKLPg23y3x3c3DOKAwuXGDp+PAGjh6CSnCjYeAul8pcT8bAl+lSA==", "devOptional": true, "license": "MIT", "dependencies": { - "mdn-data": "2.12.2", - "source-map-js": "^1.0.1" + "mdn-data": "2.27.1", + "source-map-js": "^1.2.1" }, "engines": { "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0" @@ -2854,34 +3130,45 @@ } }, "node_modules/cssstyle": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-5.3.3.tgz", - "integrity": "sha512-OytmFH+13/QXONJcC75QNdMtKpceNk3u8ThBjyyYjkEcy/ekBwR1mMAuNvi3gdBPW3N5TlCzQ0WZw8H0lN/bDw==", + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-5.3.7.tgz", + "integrity": "sha512-7D2EPVltRrsTkhpQmksIu+LxeWAIEk6wRDMJ1qljlv+CKHJM+cJLlfhWIzNA44eAsHXSNe3+vO6DW1yCYx8SuQ==", "devOptional": true, "license": "MIT", "dependencies": { - "@asamuzakjp/css-color": "^4.0.3", - "@csstools/css-syntax-patches-for-csstree": "^1.0.14", - "css-tree": "^3.1.0" + "@asamuzakjp/css-color": "^4.1.1", + "@csstools/css-syntax-patches-for-csstree": "^1.0.21", + "css-tree": "^3.1.0", + "lru-cache": "^11.2.4" }, "engines": { "node": ">=20" } }, "node_modules/data-urls": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-6.0.0.tgz", - "integrity": "sha512-BnBS08aLUM+DKamupXs3w2tJJoqU+AkaE/+6vQxi/G/DPmIZFJJp9Dkb1kM03AZx8ADehDUZgsNxju3mPXZYIA==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-6.0.1.tgz", + "integrity": "sha512-euIQENZg6x8mj3fO6o9+fOW8MimUI4PpD/fZBhJfeioZVy9TUpM4UY7KjQNVZFlqwJ0UdzRDzkycB997HEq1BQ==", "devOptional": true, "license": "MIT", "dependencies": { - "whatwg-mimetype": "^4.0.0", - "whatwg-url": "^15.0.0" + "whatwg-mimetype": "^5.0.0", + "whatwg-url": "^15.1.0" }, "engines": { "node": ">=20" } }, + "node_modules/data-urls/node_modules/whatwg-mimetype": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-5.0.0.tgz", + "integrity": "sha512-sXcNcHOC51uPGF0P/D4NVtrkjSU2fNsm9iog4ZvZJsL3rjoDAzXZhkm2MWt1y+PUdggKAYVoMAIYcs78wJ51Cw==", + "devOptional": true, + "license": "MIT", + "engines": { + "node": ">=20" + } + }, "node_modules/date-fns": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-4.1.0.tgz", @@ -2948,9 +3235,9 @@ "license": "MIT" }, "node_modules/dotenv": { - "version": "17.2.4", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.4.tgz", - "integrity": "sha512-mudtfb4zRB4bVvdj0xRo+e6duH1csJRM8IukBqfTRvHotn9+LBXB8ynAidP9zHqoRC/fsllXgk4kCKlR21fIhw==", + "version": "17.3.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.3.1.tgz", + "integrity": "sha512-IO8C/dzEb6O3F9/twg6ZLXz164a2fhTnEWb95H23Dm4OuN+92NmEAlTrupP9VW6Jm3sO26tQlqyvyi4CsnY9GA==", "license": "BSD-2-Clause", "engines": { "node": ">=12" @@ -2973,19 +3260,18 @@ } }, "node_modules/es-module-lexer": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", - "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-2.0.0.tgz", + "integrity": "sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw==", "devOptional": true, "license": "MIT" }, "node_modules/esbuild": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz", - "integrity": "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==", + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.4.tgz", + "integrity": "sha512-Rq4vbHnYkK5fws5NF7MYTU68FPRE1ajX7heQ/8QXXWqNgqqJ/GkmmyxIzUnf2Sr/bakf8l54716CcMGHYhMrrQ==", "hasInstallScript": true, "license": "MIT", - "peer": true, "bin": { "esbuild": "bin/esbuild" }, @@ -2993,32 +3279,32 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.27.3", - "@esbuild/android-arm": "0.27.3", - "@esbuild/android-arm64": "0.27.3", - "@esbuild/android-x64": "0.27.3", - "@esbuild/darwin-arm64": "0.27.3", - "@esbuild/darwin-x64": "0.27.3", - "@esbuild/freebsd-arm64": "0.27.3", - "@esbuild/freebsd-x64": "0.27.3", - "@esbuild/linux-arm": "0.27.3", - "@esbuild/linux-arm64": "0.27.3", - "@esbuild/linux-ia32": "0.27.3", - "@esbuild/linux-loong64": "0.27.3", - "@esbuild/linux-mips64el": "0.27.3", - "@esbuild/linux-ppc64": "0.27.3", - "@esbuild/linux-riscv64": "0.27.3", - "@esbuild/linux-s390x": "0.27.3", - "@esbuild/linux-x64": "0.27.3", - "@esbuild/netbsd-arm64": "0.27.3", - "@esbuild/netbsd-x64": "0.27.3", - "@esbuild/openbsd-arm64": "0.27.3", - "@esbuild/openbsd-x64": "0.27.3", - "@esbuild/openharmony-arm64": "0.27.3", - "@esbuild/sunos-x64": "0.27.3", - "@esbuild/win32-arm64": "0.27.3", - "@esbuild/win32-ia32": "0.27.3", - "@esbuild/win32-x64": "0.27.3" + "@esbuild/aix-ppc64": "0.27.4", + "@esbuild/android-arm": "0.27.4", + "@esbuild/android-arm64": "0.27.4", + "@esbuild/android-x64": "0.27.4", + "@esbuild/darwin-arm64": "0.27.4", + "@esbuild/darwin-x64": "0.27.4", + "@esbuild/freebsd-arm64": "0.27.4", + "@esbuild/freebsd-x64": "0.27.4", + "@esbuild/linux-arm": "0.27.4", + "@esbuild/linux-arm64": "0.27.4", + "@esbuild/linux-ia32": "0.27.4", + "@esbuild/linux-loong64": "0.27.4", + "@esbuild/linux-mips64el": "0.27.4", + "@esbuild/linux-ppc64": "0.27.4", + "@esbuild/linux-riscv64": "0.27.4", + "@esbuild/linux-s390x": "0.27.4", + "@esbuild/linux-x64": "0.27.4", + "@esbuild/netbsd-arm64": "0.27.4", + "@esbuild/netbsd-x64": "0.27.4", + "@esbuild/openbsd-arm64": "0.27.4", + "@esbuild/openbsd-x64": "0.27.4", + "@esbuild/openharmony-arm64": "0.27.4", + "@esbuild/sunos-x64": "0.27.4", + "@esbuild/win32-arm64": "0.27.4", + "@esbuild/win32-ia32": "0.27.4", + "@esbuild/win32-x64": "0.27.4" } }, "node_modules/escape-string-regexp": { @@ -3035,25 +3321,25 @@ } }, "node_modules/eslint": { - "version": "9.39.1", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.1.tgz", - "integrity": "sha512-BhHmn2yNOFA9H9JmmIVKJmd288g9hrVRDkdoIgRCRuSySRUHH7r/DI6aAXW9T1WwUuY3DFgrcaqB+deURBLR5g==", + "version": "9.39.4", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.4.tgz", + "integrity": "sha512-XoMjdBOwe/esVgEvLmNsD3IRHkm7fbKIUGvrleloJXUZgDHig2IPWNniv+GwjyJXzuNqVjlr5+4yVUZjycJwfQ==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", - "@eslint/config-array": "^0.21.1", + "@eslint/config-array": "^0.21.2", "@eslint/config-helpers": "^0.4.2", "@eslint/core": "^0.17.0", - "@eslint/eslintrc": "^3.3.1", - "@eslint/js": "9.39.1", + "@eslint/eslintrc": "^3.3.5", + "@eslint/js": "9.39.4", "@eslint/plugin-kit": "^0.4.1", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", - "ajv": "^6.12.4", + "ajv": "^6.14.0", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", @@ -3072,7 +3358,7 @@ "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", - "minimatch": "^3.1.2", + "minimatch": "^3.1.5", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, @@ -3095,9 +3381,9 @@ } }, "node_modules/eslint-plugin-svelte": { - "version": "3.13.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-svelte/-/eslint-plugin-svelte-3.13.1.tgz", - "integrity": "sha512-Ng+kV/qGS8P/isbNYVE3sJORtubB+yLEcYICMkUWNaDTb0SwZni/JhAYXh/Dz/q2eThUwWY0VMPZ//KYD1n3eQ==", + "version": "3.15.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-svelte/-/eslint-plugin-svelte-3.15.2.tgz", + "integrity": "sha512-k4Nsjs3bHujeEnnckoTM4mFYR1e8Mb9l2rTwNdmYiamA+Tjzn8X+2F+fuSP2w4VbXYhn2bmySyACQYdmUDW2Cg==", "dev": true, "license": "MIT", "dependencies": { @@ -3119,7 +3405,7 @@ "url": "https://github.com/sponsors/ota-meshi" }, "peerDependencies": { - "eslint": "^8.57.1 || ^9.0.0", + "eslint": "^8.57.1 || ^9.0.0 || ^10.0.0", "svelte": "^3.37.0 || ^4.0.0 || ^5.0.0" }, "peerDependenciesMeta": { @@ -3183,9 +3469,9 @@ } }, "node_modules/esquery": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", - "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.7.0.tgz", + "integrity": "sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==", "dev": true, "license": "BSD-3-Clause", "dependencies": { @@ -3196,12 +3482,13 @@ } }, "node_modules/esrap": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/esrap/-/esrap-2.2.2.tgz", - "integrity": "sha512-zA6497ha+qKvoWIK+WM9NAh5ni17sKZKhbS5B3PoYbBvaYHZWoS33zmFybmyqpn07RLUxSmn+RCls2/XF+d0oQ==", + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/esrap/-/esrap-2.2.4.tgz", + "integrity": "sha512-suICpxAmZ9A8bzJjEl/+rLJiDKC0X4gYWUxT6URAWBLvlXmtbZd5ySMu/N2ZGEtMCAmflUDPSehrP9BQcsGcSg==", "license": "MIT", "dependencies": { - "@jridgewell/sourcemap-codec": "^1.4.15" + "@jridgewell/sourcemap-codec": "^1.4.15", + "@typescript-eslint/types": "^8.2.0" } }, "node_modules/esrecurse": { @@ -3248,9 +3535,9 @@ } }, "node_modules/expect-type": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.2.tgz", - "integrity": "sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz", + "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==", "devOptional": true, "license": "Apache-2.0", "engines": { @@ -3341,9 +3628,9 @@ } }, "node_modules/flatted": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", - "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.4.1.tgz", + "integrity": "sha512-IxfVbRFVlV8V/yRaGzk0UVIcsKKHMSfYw66T/u4nTwlWteQePsxe//LjudR1AMX4tZW3WFCh3Zqa/sjlqpbURQ==", "dev": true, "license": "ISC" }, @@ -3361,16 +3648,6 @@ "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, - "node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/glob-parent": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", @@ -3385,9 +3662,9 @@ } }, "node_modules/globals": { - "version": "16.3.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-16.3.0.tgz", - "integrity": "sha512-bqWEnJ1Nt3neqx2q5SFfGS8r/ahumIakg3HcwtNlrVlwXIeNumWn/c7Pn/wKzGhf6SaW6H6uWXLqC30STCMchQ==", + "version": "16.5.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-16.5.0.tgz", + "integrity": "sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==", "dev": true, "license": "MIT", "engines": { @@ -3397,13 +3674,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/graphemer": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", - "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", - "dev": true, - "license": "MIT" - }, "node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -3414,39 +3684,26 @@ "node": ">=8" } }, - "node_modules/hasown": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, "node_modules/hono": { - "version": "4.10.7", - "resolved": "https://registry.npmjs.org/hono/-/hono-4.10.7.tgz", - "integrity": "sha512-icXIITfw/07Q88nLSkB9aiUrd8rYzSweK681Kjo/TSggaGbOX4RRyxxm71v+3PC8C/j+4rlxGeoTRxQDkaJkUw==", + "version": "4.12.8", + "resolved": "https://registry.npmjs.org/hono/-/hono-4.12.8.tgz", + "integrity": "sha512-VJCEvtrezO1IAR+kqEYnxUOoStaQPGrCmX3j4wDTNOcD1uRPFpGlwQUIW8niPuvHXaTUxeOUl5MMDGrl+tmO9A==", "license": "MIT", "engines": { "node": ">=16.9.0" } }, "node_modules/html-encoding-sniffer": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz", - "integrity": "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-6.0.0.tgz", + "integrity": "sha512-CV9TW3Y3f8/wT0BRFc1/KAVQ3TUHiXmaAb6VW9vtiMFf7SLoMd1PdAc4W3KFOFETBJUb90KatHqlsZMWV+R9Gg==", "devOptional": true, "license": "MIT", "dependencies": { - "whatwg-encoding": "^3.1.1" + "@exodus/bytes": "^1.6.0" }, "engines": { - "node": ">=18" + "node": "^20.19.0 || ^22.12.0 || >=24.0.0" } }, "node_modules/http-proxy-agent": { @@ -3477,19 +3734,6 @@ "node": ">= 14" } }, - "node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/ignore": { "version": "5.3.2", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", @@ -3527,22 +3771,6 @@ "node": ">=0.8.19" } }, - "node_modules/is-core-module": { - "version": "2.16.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", - "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", - "dev": true, - "license": "MIT", - "dependencies": { - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -3566,13 +3794,6 @@ "node": ">=0.10.0" } }, - "node_modules/is-module": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz", - "integrity": "sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g==", - "dev": true, - "license": "MIT" - }, "node_modules/is-potential-custom-element-name": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", @@ -3597,9 +3818,9 @@ "license": "ISC" }, "node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", "dev": true, "license": "MIT", "dependencies": { @@ -3610,18 +3831,19 @@ } }, "node_modules/jsdom": { - "version": "27.2.0", - "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-27.2.0.tgz", - "integrity": "sha512-454TI39PeRDW1LgpyLPyURtB4Zx1tklSr6+OFOipsxGUH1WMTvk6C65JQdrj455+DP2uJ1+veBEHTGFKWVLFoA==", + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-27.4.0.tgz", + "integrity": "sha512-mjzqwWRD9Y1J1KUi7W97Gja1bwOOM5Ug0EZ6UDK3xS7j7mndrkwozHtSblfomlzyB4NepioNt+B2sOSzczVgtQ==", "devOptional": true, "license": "MIT", "dependencies": { - "@acemir/cssom": "^0.9.23", - "@asamuzakjp/dom-selector": "^6.7.4", - "cssstyle": "^5.3.3", + "@acemir/cssom": "^0.9.28", + "@asamuzakjp/dom-selector": "^6.7.6", + "@exodus/bytes": "^1.6.0", + "cssstyle": "^5.3.4", "data-urls": "^6.0.0", "decimal.js": "^10.6.0", - "html-encoding-sniffer": "^4.0.0", + "html-encoding-sniffer": "^6.0.0", "http-proxy-agent": "^7.0.2", "https-proxy-agent": "^7.0.6", "is-potential-custom-element-name": "^1.0.1", @@ -3631,7 +3853,6 @@ "tough-cookie": "^6.0.0", "w3c-xmlserializer": "^5.0.0", "webidl-conversions": "^8.0.0", - "whatwg-encoding": "^3.1.1", "whatwg-mimetype": "^4.0.0", "whatwg-url": "^15.1.0", "ws": "^8.18.3", @@ -3764,11 +3985,11 @@ "license": "MIT" }, "node_modules/lru-cache": { - "version": "11.2.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.2.tgz", - "integrity": "sha512-F9ODfyqML2coTIsQpSkRHnLSZMtkU8Q+mSfcaIyKwy58u+8k5nvAYeiNhsyMARvzNcXJ9QfWVrcPsC9e9rAxtg==", + "version": "11.2.7", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.7.tgz", + "integrity": "sha512-aY/R+aEsRelme17KGQa/1ZSIpLpNYYrhcrepKTZgE+W3WM16YMCaPwOHLHsmopZHELU0Ojin1lPVxKR0MihncA==", "devOptional": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "engines": { "node": "20 || >=22" } @@ -3783,16 +4004,16 @@ } }, "node_modules/mdn-data": { - "version": "2.12.2", - "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.12.2.tgz", - "integrity": "sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA==", + "version": "2.27.1", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.27.1.tgz", + "integrity": "sha512-9Yubnt3e8A0OKwxYSXyhLymGW4sCufcLG6VdiDdUGVkPhpqLxlvP5vl1983gQjJl3tqbrM731mjaZaP68AgosQ==", "devOptional": true, "license": "CC0-1.0" }, "node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz", + "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", "dev": true, "license": "ISC", "dependencies": { @@ -3877,9 +4098,9 @@ } }, "node_modules/openai": { - "version": "6.10.0", - "resolved": "https://registry.npmjs.org/openai/-/openai-6.10.0.tgz", - "integrity": "sha512-ITxOGo7rO3XRMiKA5l7tQ43iNNu+iXGFAcf2t+aWVzzqRaS0i7m1K2BhxNdaveB+5eENhO0VY1FkiZzhBk4v3A==", + "version": "6.29.0", + "resolved": "https://registry.npmjs.org/openai/-/openai-6.29.0.tgz", + "integrity": "sha512-YxoArl2BItucdO89/sN6edksV0x47WUTgkgVfCgX7EuEMhbirENsgYe5oO4LTjBL9PtdKtk2WqND1gSLcTd2yw==", "license": "Apache-2.0", "bin": { "openai": "bin/cli" @@ -4025,13 +4246,6 @@ "node": ">=8" } }, - "node_modules/path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", - "dev": true, - "license": "MIT" - }, "node_modules/pathe": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", @@ -4039,6 +4253,103 @@ "devOptional": true, "license": "MIT" }, + "node_modules/pg": { + "version": "8.20.0", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.20.0.tgz", + "integrity": "sha512-ldhMxz2r8fl/6QkXnBD3CR9/xg694oT6DZQ2s6c/RI28OjtSOpxnPrUCGOBJ46RCUxcWdx3p6kw/xnDHjKvaRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pg-connection-string": "^2.12.0", + "pg-pool": "^3.13.0", + "pg-protocol": "^1.13.0", + "pg-types": "2.2.0", + "pgpass": "1.0.5" + }, + "engines": { + "node": ">= 16.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.3.0" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-cloudflare": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.3.0.tgz", + "integrity": "sha512-6lswVVSztmHiRtD6I8hw4qP/nDm1EJbKMRhf3HCYaqud7frGysPv7FYJ5noZQdhQtN2xJnimfMtvQq21pdbzyQ==", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/pg-connection-string": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.12.0.tgz", + "integrity": "sha512-U7qg+bpswf3Cs5xLzRqbXbQl85ng0mfSV/J0nnA31MCLgvEaAo7CIhmeyrmJpOr7o+zm0rXK+hNnT5l9RHkCkQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.13.0", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.13.0.tgz", + "integrity": "sha512-gB+R+Xud1gLFuRD/QgOIgGOBE2KCQPaPwkzBBGC9oG69pHTkhQeIuejVIk3/cnDyX39av2AxomQiyPT13WKHQA==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.13.0.tgz", + "integrity": "sha512-zzdvXfS6v89r6v7OcFCHfHlyG/wvry1ALxZo4LqgUoy7W9xhBDMaqOuMiF3qEV45VqsN6rdlcehHrfDtlCPc8w==", + "dev": true, + "license": "MIT" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "dev": true, + "license": "MIT", + "dependencies": { + "split2": "^4.1.0" + } + }, "node_modules/picocolors": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", @@ -4060,9 +4371,9 @@ } }, "node_modules/postcss": { - "version": "8.5.6", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", - "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "version": "8.5.8", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.8.tgz", + "integrity": "sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==", "devOptional": true, "funding": [ { @@ -4183,9 +4494,9 @@ } }, "node_modules/postcss-selector-parser": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz", - "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", + "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", "dev": true, "license": "MIT", "dependencies": { @@ -4196,6 +4507,49 @@ "node": ">=4" } }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.1.tgz", + "integrity": "sha512-5+5HqXnsZPE65IJZSMkZtURARZelel2oXUEO8rH83VS/hxH5vv1uHquPg5wZs8yMAfdv971IU+kcPUczi7NVBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -4207,9 +4561,9 @@ } }, "node_modules/prettier": { - "version": "3.7.4", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.7.4.tgz", - "integrity": "sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==", + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.8.1.tgz", + "integrity": "sha512-UOnG6LftzbdaHZcKoPFtOcCKztrQ57WkHDeRD9t/PTQtmT0NHSeWWepj6pS0z/N7+08BHFDQVUrfmfMRcZwbMg==", "license": "MIT", "bin": { "prettier": "bin/prettier.cjs" @@ -4242,13 +4596,12 @@ } }, "node_modules/readdirp": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", - "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", - "dev": true, + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-5.0.0.tgz", + "integrity": "sha512-9u/XQ1pvrQtYyMpZe7DXKv2p5CNvyVwzUB6uhLAnQwHMSgKMBR62lc7AHljaeteeHXn11XTAaLLUVZYVZyuRBQ==", "license": "MIT", "engines": { - "node": ">= 14.18.0" + "node": ">= 20.19.0" }, "funding": { "type": "individual", @@ -4265,27 +4618,6 @@ "node": ">=0.10.0" } }, - "node_modules/resolve": { - "version": "1.22.10", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", - "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-core-module": "^2.16.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/resolve-from": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", @@ -4297,9 +4629,9 @@ } }, "node_modules/rollup": { - "version": "4.50.1", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.50.1.tgz", - "integrity": "sha512-78E9voJHwnXQMiQdiqswVLZwJIzdBKJ1GdI5Zx6XwoFKUIk09/sSrr+05QFzvYb8q6Y9pPV45zzDuYa3907TZA==", + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.59.0.tgz", + "integrity": "sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==", "devOptional": true, "license": "MIT", "dependencies": { @@ -4313,27 +4645,31 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.50.1", - "@rollup/rollup-android-arm64": "4.50.1", - "@rollup/rollup-darwin-arm64": "4.50.1", - "@rollup/rollup-darwin-x64": "4.50.1", - "@rollup/rollup-freebsd-arm64": "4.50.1", - "@rollup/rollup-freebsd-x64": "4.50.1", - "@rollup/rollup-linux-arm-gnueabihf": "4.50.1", - "@rollup/rollup-linux-arm-musleabihf": "4.50.1", - "@rollup/rollup-linux-arm64-gnu": "4.50.1", - "@rollup/rollup-linux-arm64-musl": "4.50.1", - "@rollup/rollup-linux-loongarch64-gnu": "4.50.1", - "@rollup/rollup-linux-ppc64-gnu": "4.50.1", - "@rollup/rollup-linux-riscv64-gnu": "4.50.1", - "@rollup/rollup-linux-riscv64-musl": "4.50.1", - "@rollup/rollup-linux-s390x-gnu": "4.50.1", - "@rollup/rollup-linux-x64-gnu": "4.50.1", - "@rollup/rollup-linux-x64-musl": "4.50.1", - "@rollup/rollup-openharmony-arm64": "4.50.1", - "@rollup/rollup-win32-arm64-msvc": "4.50.1", - "@rollup/rollup-win32-ia32-msvc": "4.50.1", - "@rollup/rollup-win32-x64-msvc": "4.50.1", + "@rollup/rollup-android-arm-eabi": "4.59.0", + "@rollup/rollup-android-arm64": "4.59.0", + "@rollup/rollup-darwin-arm64": "4.59.0", + "@rollup/rollup-darwin-x64": "4.59.0", + "@rollup/rollup-freebsd-arm64": "4.59.0", + "@rollup/rollup-freebsd-x64": "4.59.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.59.0", + "@rollup/rollup-linux-arm-musleabihf": "4.59.0", + "@rollup/rollup-linux-arm64-gnu": "4.59.0", + "@rollup/rollup-linux-arm64-musl": "4.59.0", + "@rollup/rollup-linux-loong64-gnu": "4.59.0", + "@rollup/rollup-linux-loong64-musl": "4.59.0", + "@rollup/rollup-linux-ppc64-gnu": "4.59.0", + "@rollup/rollup-linux-ppc64-musl": "4.59.0", + "@rollup/rollup-linux-riscv64-gnu": "4.59.0", + "@rollup/rollup-linux-riscv64-musl": "4.59.0", + "@rollup/rollup-linux-s390x-gnu": "4.59.0", + "@rollup/rollup-linux-x64-gnu": "4.59.0", + "@rollup/rollup-linux-x64-musl": "4.59.0", + "@rollup/rollup-openbsd-x64": "4.59.0", + "@rollup/rollup-openharmony-arm64": "4.59.0", + "@rollup/rollup-win32-arm64-msvc": "4.59.0", + "@rollup/rollup-win32-ia32-msvc": "4.59.0", + "@rollup/rollup-win32-x64-gnu": "4.59.0", + "@rollup/rollup-win32-x64-msvc": "4.59.0", "fsevents": "~2.3.2" } }, @@ -4350,13 +4686,6 @@ "node": ">=6" } }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "devOptional": true, - "license": "MIT" - }, "node_modules/saxes": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz", @@ -4378,9 +4707,9 @@ "license": "MIT" }, "node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", "dev": true, "license": "ISC", "bin": { @@ -4452,6 +4781,16 @@ "node": ">=0.10.0" } }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">= 10.x" + } + }, "node_modules/stackback": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", @@ -4460,9 +4799,9 @@ "license": "MIT" }, "node_modules/std-env": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz", - "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-4.0.0.tgz", + "integrity": "sha512-zUMPtQ/HBY3/50VbpkupYHbRroTRZJPRLvreamgErJVys0ceuzMkD44J/QjqhHjOzK42GQ3QZIeFG1OYfOtKqQ==", "devOptional": true, "license": "MIT" }, @@ -4492,23 +4831,10 @@ "node": ">=8" } }, - "node_modules/supports-preserve-symlinks-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", - "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/svelte": { - "version": "5.54.0", - "resolved": "https://registry.npmjs.org/svelte/-/svelte-5.54.0.tgz", - "integrity": "sha512-TTDxwYnHkova6Wsyj1PGt9TByuWqvMoeY1bQiuAf2DM/JeDSMw7FjRKzk8K/5mJ99vGOKhbCqTDpyAKwjp4igg==", + "node_modules/svelte": { + "version": "5.55.3", + "resolved": "https://registry.npmjs.org/svelte/-/svelte-5.55.3.tgz", + "integrity": "sha512-dS1N+i3bA1v+c4UDb750MlN5vCO82G6vxh8HeTsPsTdJ1BLsN1zxSyDlIdBBqUjqZ/BxEwM8UrFf98aaoVnZFQ==", "license": "MIT", "dependencies": { "@jridgewell/remapping": "^2.3.4", @@ -4522,7 +4848,7 @@ "clsx": "^2.1.1", "devalue": "^5.6.4", "esm-env": "^1.2.1", - "esrap": "^2.2.2", + "esrap": "^2.2.4", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", @@ -4556,10 +4882,40 @@ "typescript": ">=5.0.0" } }, + "node_modules/svelte-check/node_modules/chokidar": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", + "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "readdirp": "^4.0.1" + }, + "engines": { + "node": ">= 14.16.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/svelte-check/node_modules/readdirp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", + "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14.18.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + }, "node_modules/svelte-eslint-parser": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/svelte-eslint-parser/-/svelte-eslint-parser-1.4.0.tgz", - "integrity": "sha512-fjPzOfipR5S7gQ/JvI9r2H8y9gMGXO3JtmrylHLLyahEMquXI0lrebcjT+9/hNgDej0H7abTyox5HpHmW1PSWA==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/svelte-eslint-parser/-/svelte-eslint-parser-1.6.0.tgz", + "integrity": "sha512-qoB1ehychT6OxEtQAqc/guSqLS20SlA53Uijl7x375s8nlUT0lb9ol/gzraEEatQwsyPTJo87s2CmKL9Xab+Uw==", "dev": true, "license": "MIT", "dependencies": { @@ -4568,11 +4924,12 @@ "espree": "^10.0.0", "postcss": "^8.4.49", "postcss-scss": "^4.0.9", - "postcss-selector-parser": "^7.0.0" + "postcss-selector-parser": "^7.0.0", + "semver": "^7.7.2" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0", - "pnpm": "10.18.3" + "pnpm": "10.30.3" }, "funding": { "url": "https://github.com/sponsors/ota-meshi" @@ -4616,9 +4973,9 @@ "license": "MIT" }, "node_modules/tinyexec": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.2.tgz", - "integrity": "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.4.tgz", + "integrity": "sha512-u9r3uZC0bdpGOXtlxUIdwf9pkmvhqJdrVCH9fapQtgy/OeTTMZ1nqH7agtvEfmGui6e1XxjcdrlxvxJvc3sMqw==", "devOptional": true, "license": "MIT", "engines": { @@ -4643,9 +5000,9 @@ } }, "node_modules/tinyrainbow": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-3.0.3.tgz", - "integrity": "sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-3.1.0.tgz", + "integrity": "sha512-Bf+ILmBgretUrdJxzXM0SgXLZ3XfiaUuOj/IKQHuTXip+05Xn+uyEYdVg0kYDipTBcLrCVyUzAPz7QmArb0mmw==", "devOptional": true, "license": "MIT", "engines": { @@ -4653,22 +5010,22 @@ } }, "node_modules/tldts": { - "version": "7.0.19", - "resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.19.tgz", - "integrity": "sha512-8PWx8tvC4jDB39BQw1m4x8y5MH1BcQ5xHeL2n7UVFulMPH/3Q0uiamahFJ3lXA0zO2SUyRXuVVbWSDmstlt9YA==", + "version": "7.0.26", + "resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.26.tgz", + "integrity": "sha512-WiGwQjr0qYdNNG8KpMKlSvpxz652lqa3Rd+/hSaDcY4Uo6SKWZq2LAF+hsAhUewTtYhXlorBKgNF3Kk8hnjGoQ==", "devOptional": true, "license": "MIT", "dependencies": { - "tldts-core": "^7.0.19" + "tldts-core": "^7.0.26" }, "bin": { "tldts": "bin/cli.js" } }, "node_modules/tldts-core": { - "version": "7.0.19", - "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.19.tgz", - "integrity": "sha512-lJX2dEWx0SGH4O6p+7FPwYmJ/bu1JbcGJ8RLaG9b7liIgZ85itUVEPbMtWRVrde/0fnDPEPHW10ZsKW3kVsE9A==", + "version": "7.0.26", + "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.26.tgz", + "integrity": "sha512-5WJ2SqFsv4G2Dwi7ZFVRnz6b2H1od39QME1lc2y5Ew3eWiZMAeqOAfWpRP9jHvhUl881406QtZTODvjttJs+ew==", "devOptional": true, "license": "MIT" }, @@ -4679,683 +5036,225 @@ "devOptional": true, "license": "MIT", "engines": { - "node": ">=6" - } - }, - "node_modules/tough-cookie": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-6.0.0.tgz", - "integrity": "sha512-kXuRi1mtaKMrsLUxz3sQYvVl37B0Ns6MzfrtV5DvJceE9bPyspOqk9xxv7XbZWcfLWbFmm997vl83qUWVJA64w==", - "devOptional": true, - "license": "BSD-3-Clause", - "dependencies": { - "tldts": "^7.0.5" - }, - "engines": { - "node": ">=16" - } - }, - "node_modules/tr46": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-6.0.0.tgz", - "integrity": "sha512-bLVMLPtstlZ4iMQHpFHTR7GAGj2jxi8Dg0s2h2MafAE4uSWF98FC/3MomU51iQAMf8/qDUbKWf5GxuvvVcXEhw==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "punycode": "^2.3.1" - }, - "engines": { - "node": ">=20" - } - }, - "node_modules/ts-algebra": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ts-algebra/-/ts-algebra-2.0.0.tgz", - "integrity": "sha512-FPAhNPFMrkwz76P7cdjdmiShwMynZYN6SgOujD1urY4oNm80Ou9oMdmbR45LotcKOXoy7wSmHkRFE6Mxbrhefw==", - "license": "MIT" - }, - "node_modules/ts-api-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", - "integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18.12" - }, - "peerDependencies": { - "typescript": ">=4.8.4" - } - }, - "node_modules/ts-blank-space": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/ts-blank-space/-/ts-blank-space-0.6.2.tgz", - "integrity": "sha512-hZjcHdHrveEKI67v8OzI90a1bizgoDkY7ekE4fH89qJhZgxvmjfBOv98aibCU7OpKbvV3R9p/qd3DrzZqT1cFQ==", - "license": "Apache-2.0", - "dependencies": { - "typescript": "5.1.6 - 5.9.x" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/tslib": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", - "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", - "license": "0BSD" - }, - "node_modules/type-check": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", - "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", - "dev": true, - "license": "MIT", - "dependencies": { - "prelude-ls": "^1.2.1" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/typescript": { - "version": "5.9.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", - "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", - "license": "Apache-2.0", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, - "node_modules/typescript-eslint": { - "version": "8.48.1", - "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.48.1.tgz", - "integrity": "sha512-FbOKN1fqNoXp1hIl5KYpObVrp0mCn+CLgn479nmu2IsRMrx2vyv74MmsBLVlhg8qVwNFGbXSp8fh1zp8pEoC2A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/eslint-plugin": "8.48.1", - "@typescript-eslint/parser": "8.48.1", - "@typescript-eslint/typescript-estree": "8.48.1", - "@typescript-eslint/utils": "8.48.1" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <6.0.0" - } - }, - "node_modules/undici-types": { - "version": "7.16.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", - "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", - "devOptional": true, - "license": "MIT" - }, - "node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", - "dev": true, - "license": "MIT" - }, - "node_modules/vite": { - "version": "7.1.5", - "resolved": "https://registry.npmjs.org/vite/-/vite-7.1.5.tgz", - "integrity": "sha512-4cKBO9wR75r0BeIWWWId9XK9Lj6La5X846Zw9dFfzMRw38IlTk2iCcUt6hsyiDRcPidc55ZParFYDXi0nXOeLQ==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "esbuild": "^0.25.0", - "fdir": "^6.5.0", - "picomatch": "^4.0.3", - "postcss": "^8.5.6", - "rollup": "^4.43.0", - "tinyglobby": "^0.2.15" - }, - "bin": { - "vite": "bin/vite.js" - }, - "engines": { - "node": "^20.19.0 || >=22.12.0" - }, - "funding": { - "url": "https://github.com/vitejs/vite?sponsor=1" - }, - "optionalDependencies": { - "fsevents": "~2.3.3" - }, - "peerDependencies": { - "@types/node": "^20.19.0 || >=22.12.0", - "jiti": ">=1.21.0", - "less": "^4.0.0", - "lightningcss": "^1.21.0", - "sass": "^1.70.0", - "sass-embedded": "^1.70.0", - "stylus": ">=0.54.8", - "sugarss": "^5.0.0", - "terser": "^5.16.0", - "tsx": "^4.8.1", - "yaml": "^2.4.2" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "jiti": { - "optional": true - }, - "less": { - "optional": true - }, - "lightningcss": { - "optional": true - }, - "sass": { - "optional": true - }, - "sass-embedded": { - "optional": true - }, - "stylus": { - "optional": true - }, - "sugarss": { - "optional": true - }, - "terser": { - "optional": true - }, - "tsx": { - "optional": true - }, - "yaml": { - "optional": true - } - } - }, - "node_modules/vite/node_modules/@esbuild/aix-ppc64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.12.tgz", - "integrity": "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==", - "cpu": [ - "ppc64" - ], - "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/android-arm": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.12.tgz", - "integrity": "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==", - "cpu": [ - "arm" - ], - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/android-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.12.tgz", - "integrity": "sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/android-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.12.tgz", - "integrity": "sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/darwin-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.12.tgz", - "integrity": "sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/darwin-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.12.tgz", - "integrity": "sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.12.tgz", - "integrity": "sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/freebsd-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.12.tgz", - "integrity": "sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-arm": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.12.tgz", - "integrity": "sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==", - "cpu": [ - "arm" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.12.tgz", - "integrity": "sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-ia32": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.12.tgz", - "integrity": "sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==", - "cpu": [ - "ia32" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-loong64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.12.tgz", - "integrity": "sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==", - "cpu": [ - "loong64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-mips64el": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.12.tgz", - "integrity": "sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==", - "cpu": [ - "mips64el" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-ppc64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.12.tgz", - "integrity": "sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==", - "cpu": [ - "ppc64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-riscv64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.12.tgz", - "integrity": "sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==", - "cpu": [ - "riscv64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-s390x": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.12.tgz", - "integrity": "sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==", - "cpu": [ - "s390x" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.12.tgz", - "integrity": "sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.12.tgz", - "integrity": "sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" + "node": ">=6" } }, - "node_modules/vite/node_modules/@esbuild/netbsd-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.12.tgz", - "integrity": "sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], + "node_modules/tough-cookie": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-6.0.1.tgz", + "integrity": "sha512-LktZQb3IeoUWB9lqR5EWTHgW/VTITCXg4D21M+lvybRVdylLrRMnqaIONLVb5mav8vM19m44HIcGq4qASeu2Qw==", + "devOptional": true, + "license": "BSD-3-Clause", + "dependencies": { + "tldts": "^7.0.5" + }, "engines": { - "node": ">=18" + "node": ">=16" } }, - "node_modules/vite/node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.12.tgz", - "integrity": "sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==", - "cpu": [ - "arm64" - ], + "node_modules/tr46": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-6.0.0.tgz", + "integrity": "sha512-bLVMLPtstlZ4iMQHpFHTR7GAGj2jxi8Dg0s2h2MafAE4uSWF98FC/3MomU51iQAMf8/qDUbKWf5GxuvvVcXEhw==", + "devOptional": true, "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], + "dependencies": { + "punycode": "^2.3.1" + }, "engines": { - "node": ">=18" + "node": ">=20" } }, - "node_modules/vite/node_modules/@esbuild/openbsd-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.12.tgz", - "integrity": "sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==", - "cpu": [ - "x64" - ], + "node_modules/ts-algebra": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ts-algebra/-/ts-algebra-2.0.0.tgz", + "integrity": "sha512-FPAhNPFMrkwz76P7cdjdmiShwMynZYN6SgOujD1urY4oNm80Ou9oMdmbR45LotcKOXoy7wSmHkRFE6Mxbrhefw==", + "license": "MIT" + }, + "node_modules/ts-api-utils": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.4.0.tgz", + "integrity": "sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==", + "dev": true, "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], "engines": { - "node": ">=18" + "node": ">=18.12" + }, + "peerDependencies": { + "typescript": ">=4.8.4" } }, - "node_modules/vite/node_modules/@esbuild/openharmony-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.12.tgz", - "integrity": "sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "openharmony" - ], + "node_modules/ts-blank-space": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/ts-blank-space/-/ts-blank-space-0.6.2.tgz", + "integrity": "sha512-hZjcHdHrveEKI67v8OzI90a1bizgoDkY7ekE4fH89qJhZgxvmjfBOv98aibCU7OpKbvV3R9p/qd3DrzZqT1cFQ==", + "license": "Apache-2.0", + "dependencies": { + "typescript": "5.1.6 - 5.9.x" + }, "engines": { - "node": ">=18" + "node": ">=18.0.0" } }, - "node_modules/vite/node_modules/@esbuild/sunos-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.12.tgz", - "integrity": "sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==", - "cpu": [ - "x64" - ], + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], + "dependencies": { + "prelude-ls": "^1.2.1" + }, "engines": { - "node": ">=18" + "node": ">= 0.8.0" } }, - "node_modules/vite/node_modules/@esbuild/win32-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.12.tgz", - "integrity": "sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, "engines": { - "node": ">=18" + "node": ">=14.17" } }, - "node_modules/vite/node_modules/@esbuild/win32-ia32": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.12.tgz", - "integrity": "sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==", - "cpu": [ - "ia32" - ], + "node_modules/typescript-eslint": { + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.57.1.tgz", + "integrity": "sha512-fLvZWf+cAGw3tqMCYzGIU6yR8K+Y9NT2z23RwOjlNFF2HwSB3KhdEFI5lSBv8tNmFkkBShSjsCjzx1vahZfISA==", + "dev": true, "license": "MIT", - "optional": true, - "os": [ - "win32" - ], + "dependencies": { + "@typescript-eslint/eslint-plugin": "8.57.1", + "@typescript-eslint/parser": "8.57.1", + "@typescript-eslint/typescript-estree": "8.57.1", + "@typescript-eslint/utils": "8.57.1" + }, "engines": { - "node": ">=18" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", + "typescript": ">=4.8.4 <6.0.0" } }, - "node_modules/vite/node_modules/@esbuild/win32-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.12.tgz", - "integrity": "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" + "node_modules/undici-types": { + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", + "devOptional": true, + "license": "MIT" + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" } }, - "node_modules/vite/node_modules/esbuild": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.12.tgz", - "integrity": "sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==", + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true, + "license": "MIT" + }, + "node_modules/vite": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.1.tgz", + "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==", "devOptional": true, - "hasInstallScript": true, "license": "MIT", + "dependencies": { + "esbuild": "^0.27.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" + }, "bin": { - "esbuild": "bin/esbuild" + "vite": "bin/vite.js" }, "engines": { - "node": ">=18" + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.12", - "@esbuild/android-arm": "0.25.12", - "@esbuild/android-arm64": "0.25.12", - "@esbuild/android-x64": "0.25.12", - "@esbuild/darwin-arm64": "0.25.12", - "@esbuild/darwin-x64": "0.25.12", - "@esbuild/freebsd-arm64": "0.25.12", - "@esbuild/freebsd-x64": "0.25.12", - "@esbuild/linux-arm": "0.25.12", - "@esbuild/linux-arm64": "0.25.12", - "@esbuild/linux-ia32": "0.25.12", - "@esbuild/linux-loong64": "0.25.12", - "@esbuild/linux-mips64el": "0.25.12", - "@esbuild/linux-ppc64": "0.25.12", - "@esbuild/linux-riscv64": "0.25.12", - "@esbuild/linux-s390x": "0.25.12", - "@esbuild/linux-x64": "0.25.12", - "@esbuild/netbsd-arm64": "0.25.12", - "@esbuild/netbsd-x64": "0.25.12", - "@esbuild/openbsd-arm64": "0.25.12", - "@esbuild/openbsd-x64": "0.25.12", - "@esbuild/openharmony-arm64": "0.25.12", - "@esbuild/sunos-x64": "0.25.12", - "@esbuild/win32-arm64": "0.25.12", - "@esbuild/win32-ia32": "0.25.12", - "@esbuild/win32-x64": "0.25.12" + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "lightningcss": "^1.21.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } } }, "node_modules/vitefu": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/vitefu/-/vitefu-1.1.1.tgz", - "integrity": "sha512-B/Fegf3i8zh0yFbpzZ21amWzHmuNlLlmJT6n7bu5e+pCHUKQIfXSYokrqOBGEMMe9UG2sostKQF9mml/vYaWJQ==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vitefu/-/vitefu-1.1.2.tgz", + "integrity": "sha512-zpKATdUbzbsycPFBN71nS2uzBUQiVnFoOrr2rvqv34S1lcAgMKKkjWleLGeiJlZ8lwCXvtWaRn7R3ZC16SYRuw==", "devOptional": true, "license": "MIT", "workspaces": [ @@ -5364,7 +5263,7 @@ "tests/projects/workspace/packages/*" ], "peerDependencies": { - "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0-beta.0" + "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-beta.0" }, "peerDependenciesMeta": { "vite": { @@ -5373,31 +5272,31 @@ } }, "node_modules/vitest": { - "version": "4.0.15", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-4.0.15.tgz", - "integrity": "sha512-n1RxDp8UJm6N0IbJLQo+yzLZ2sQCDyl1o0LeugbPWf8+8Fttp29GghsQBjYJVmWq3gBFfe9Hs1spR44vovn2wA==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-4.1.0.tgz", + "integrity": "sha512-YbDrMF9jM2Lqc++2530UourxZHmkKLxrs4+mYhEwqWS97WJ7wOYEkcr+QfRgJ3PW9wz3odRijLZjHEaRLTNbqw==", "devOptional": true, "license": "MIT", "dependencies": { - "@vitest/expect": "4.0.15", - "@vitest/mocker": "4.0.15", - "@vitest/pretty-format": "4.0.15", - "@vitest/runner": "4.0.15", - "@vitest/snapshot": "4.0.15", - "@vitest/spy": "4.0.15", - "@vitest/utils": "4.0.15", - "es-module-lexer": "^1.7.0", - "expect-type": "^1.2.2", + "@vitest/expect": "4.1.0", + "@vitest/mocker": "4.1.0", + "@vitest/pretty-format": "4.1.0", + "@vitest/runner": "4.1.0", + "@vitest/snapshot": "4.1.0", + "@vitest/spy": "4.1.0", + "@vitest/utils": "4.1.0", + "es-module-lexer": "^2.0.0", + "expect-type": "^1.3.0", "magic-string": "^0.30.21", "obug": "^2.1.1", "pathe": "^2.0.3", "picomatch": "^4.0.3", - "std-env": "^3.10.0", + "std-env": "^4.0.0-rc.1", "tinybench": "^2.9.0", "tinyexec": "^1.0.2", "tinyglobby": "^0.2.15", "tinyrainbow": "^3.0.3", - "vite": "^6.0.0 || ^7.0.0", + "vite": "^6.0.0 || ^7.0.0 || ^8.0.0-0", "why-is-node-running": "^2.3.0" }, "bin": { @@ -5413,12 +5312,13 @@ "@edge-runtime/vm": "*", "@opentelemetry/api": "^1.9.0", "@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0", - "@vitest/browser-playwright": "4.0.15", - "@vitest/browser-preview": "4.0.15", - "@vitest/browser-webdriverio": "4.0.15", - "@vitest/ui": "4.0.15", + "@vitest/browser-playwright": "4.1.0", + "@vitest/browser-preview": "4.1.0", + "@vitest/browser-webdriverio": "4.1.0", + "@vitest/ui": "4.1.0", "happy-dom": "*", - "jsdom": "*" + "jsdom": "*", + "vite": "^6.0.0 || ^7.0.0 || ^8.0.0-0" }, "peerDependenciesMeta": { "@edge-runtime/vm": { @@ -5447,6 +5347,9 @@ }, "jsdom": { "optional": true + }, + "vite": { + "optional": false } } }, @@ -5464,28 +5367,15 @@ } }, "node_modules/webidl-conversions": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-8.0.0.tgz", - "integrity": "sha512-n4W4YFyz5JzOfQeA8oN7dUYpR+MBP3PIUsn2jLjWXwK5ASUzt0Jc/A5sAUZoCYFJRGF0FBKJ+1JjN43rNdsQzA==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-8.0.1.tgz", + "integrity": "sha512-BMhLD/Sw+GbJC21C/UgyaZX41nPt8bUTg+jWyDeg7e7YN4xOM05YPSIXceACnXVtqyEw/LMClUQMtMZ+PGGpqQ==", "devOptional": true, "license": "BSD-2-Clause", "engines": { "node": ">=20" } }, - "node_modules/whatwg-encoding": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz", - "integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "iconv-lite": "0.6.3" - }, - "engines": { - "node": ">=18" - } - }, "node_modules/whatwg-fetch": { "version": "3.6.20", "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-3.6.20.tgz", @@ -5561,9 +5451,10 @@ } }, "node_modules/ws": { - "version": "8.18.3", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz", - "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==", + "version": "8.19.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz", + "integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==", + "devOptional": true, "license": "MIT", "engines": { "node": ">=10.0.0" @@ -5598,6 +5489,16 @@ "devOptional": true, "license": "MIT" }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.4" + } + }, "node_modules/yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", diff --git a/package.json b/package.json index 13cb758c9..90854a220 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "@fuzdev/zzz", "version": "0.0.1", "description": "local-first forge for power users and devs", - "motto": "nice web things for the tired", + "tagline": "nice web things for the tired", "glyph": "💤", "logo": "logo.svg", "logo_alt": "three sleepy z's", @@ -25,7 +25,7 @@ "test": "gro test", "preview": "vite preview", "deploy": "gro deploy", - "serve": "gro build && npm run preview & node dist_server/server/server.js" + "serve": "gro build && npm run preview" }, "type": "module", "engines": { @@ -37,16 +37,20 @@ }, "devDependencies": { "@changesets/changelog-git": "^0.2.1", + "@electric-sql/pglite": "^0.3.16", + "@fuzdev/fuz_app": "^0.12.0", "@fuzdev/fuz_code": "^0.45.1", - "@fuzdev/fuz_css": "^0.57.0", - "@fuzdev/fuz_ui": "^0.191.1", - "@fuzdev/fuz_util": "^0.55.0", + "@fuzdev/fuz_css": "^0.58.0", + "@fuzdev/fuz_ui": "^0.191.4", + "@fuzdev/fuz_util": "^0.56.0", "@jridgewell/trace-mapping": "^0.3.31", - "@ryanatkn/eslint-config": "^0.9.0", - "@sveltejs/adapter-node": "^5.4.0", + "@node-rs/argon2": "^2.0.2", + "@ryanatkn/eslint-config": "^0.10.1", + "@sveltejs/acorn-typescript": "^1.0.9", "@sveltejs/adapter-static": "^3.0.10", - "@sveltejs/kit": "^2.55.0", + "@sveltejs/kit": "^2.57.1", "@sveltejs/vite-plugin-svelte": "^6.2.4", + "@types/deno": "^2.5.0", "@types/estree": "^1.0.8", "@types/node": "^24.10.1", "@webref/css": "^8.2.0", @@ -55,26 +59,28 @@ "jsdom": "^27.2.0", "magic-string": "^0.30.21", "ollama": "^0.6.3", + "pg": "^8.20.0", "prettier": "^3.7.4", "prettier-plugin-svelte": "^3.4.1", - "svelte": "^5.54.0", + "svelte": "^5.55.3", "svelte-check": "^4.4.5", "svelte2tsx": "^0.7.52", "tslib": "^2.8.1", "typescript": "^5.9.3", "typescript-eslint": "^8.48.1", + "vite": "^7.3.1", "vitest": "^4.0.15", "zimmerframe": "^1.1.4" }, "dependencies": { "@anthropic-ai/sdk": "^0.71.2", - "@fuzdev/gro": "^0.197.1", + "@fuzdev/blake3_wasm": "^0.1.1", + "@fuzdev/gro": "^0.197.3", "@google/generative-ai": "^0.24.1", - "@hono/node-server": "^1.19.6", - "@hono/node-ws": "^1.2.0", + "@xterm/xterm": "^6.0.0", "date-fns": "^4.1.0", "esm-env": "^1.2.2", - "hono": "^4.10.7", + "hono": "^4.12.7", "openai": "^6.10.0", "zod": "^4.3.6" }, diff --git a/scripts/dev.ts b/scripts/dev.ts new file mode 100644 index 000000000..689901ae2 --- /dev/null +++ b/scripts/dev.ts @@ -0,0 +1,164 @@ +/** + * Dev orchestration script — Rust backend + Vite frontend. + * + * Builds zzz_server, starts it with env from .env.development, + * then starts the Vite dev server with the proxy pointed at + * the Rust backend. Ctrl+C kills both. + * + * Usage: deno task dev + * + * @module + */ + +import {load_env_file} from '@fuzdev/fuz_app/env/dotenv.js'; + +import {runtime, set_permissions} from './setup_helpers.ts'; + +const RUST_BACKEND_PORT = 8999; +const ENV_FILE = '.env.development'; + +// -- Load environment --------------------------------------------------------- + +console.log(`[dev] loading ${ENV_FILE}`); +const env = await load_env_file(runtime, ENV_FILE); +if (!env) { + console.error(`[dev] FATAL: ${ENV_FILE} not found — run: deno task dev:setup`); + Deno.exit(1); +} + +// -- Ensure bootstrap token exists -------------------------------------------- + +const token_path = env.BOOTSTRAP_TOKEN_PATH; +if (token_path) { + try { + await Deno.stat(token_path); + } catch { + // Create directory and token file + const dir = token_path.includes('/') ? token_path.substring(0, token_path.lastIndexOf('/')) : '.'; + await Deno.mkdir(dir, {recursive: true}); + const token_bytes = new Uint8Array(32); + crypto.getRandomValues(token_bytes); + const token = Array.from(token_bytes, (b) => b.toString(16).padStart(2, '0')).join(''); + await Deno.writeTextFile(token_path, token); + await set_permissions(token_path, 0o600); + console.log(`[dev] created bootstrap token at ${token_path}`); + } +} + +// Override port so the Rust backend binds to the same port the Vite proxy expects. +// The Vite proxy reads PUBLIC_SERVER_PROXIED_PORT from the process env. +env.PORT = String(RUST_BACKEND_PORT); +env.PUBLIC_SERVER_PROXIED_PORT = String(RUST_BACKEND_PORT); +env.PUBLIC_WEBSOCKET_URL = `ws://localhost:${RUST_BACKEND_PORT}/api/ws`; + +// Build the merged env for child processes. +const child_env: Record = {}; +for (const [key, value] of Object.entries(Deno.env.toObject())) { + child_env[key] = value; +} +for (const [key, value] of Object.entries(env)) { + child_env[key] = value; +} + +// -- Build Rust backend ------------------------------------------------------- + +console.log('[dev] building zzz_server...'); +const build = new Deno.Command('cargo', { + args: ['build', '-p', 'zzz_server'], + stdout: 'inherit', + stderr: 'inherit', +}); + +const build_result = await build.output(); +if (!build_result.success) { + console.error('[dev] FATAL: cargo build failed'); + Deno.exit(1); +} +console.log('[dev] build complete'); + +// -- Start Rust backend ------------------------------------------------------- + +console.log(`[dev] starting zzz_server on port ${RUST_BACKEND_PORT}...`); +const server_process = new Deno.Command('./target/debug/zzz_server', { + args: ['--port', String(RUST_BACKEND_PORT)], + env: child_env, + stdout: 'inherit', + stderr: 'inherit', +}).spawn(); + +// Wait for health check +const health_url = `http://localhost:${RUST_BACKEND_PORT}/health`; +const health_timeout_ms = 30_000; +const health_start = Date.now(); +let healthy = false; + +while (Date.now() - health_start < health_timeout_ms) { + try { + const res = await fetch(health_url); + if (res.ok) { + healthy = true; + break; + } + } catch { + // not ready yet + } + await new Promise((r) => setTimeout(r, 200)); +} + +if (!healthy) { + console.error(`[dev] FATAL: zzz_server did not become healthy within ${health_timeout_ms}ms`); + server_process.kill('SIGTERM'); + Deno.exit(1); +} +console.log('[dev] zzz_server healthy'); + +// -- Start Vite dev server ---------------------------------------------------- + +console.log('[dev] starting vite dev server...'); +const vite_process = new Deno.Command('npx', { + args: ['vite', 'dev'], + env: child_env, + stdout: 'inherit', + stderr: 'inherit', +}).spawn(); + +// -- Shutdown handling -------------------------------------------------------- + +let shutting_down = false; + +const shutdown = (): void => { + if (shutting_down) return; + shutting_down = true; + console.log('\n[dev] shutting down...'); + try { + vite_process.kill('SIGTERM'); + } catch { + // already dead + } + try { + server_process.kill('SIGTERM'); + } catch { + // already dead + } +}; + +Deno.addSignalListener('SIGINT', shutdown); +Deno.addSignalListener('SIGTERM', shutdown); + +// Wait for either process to exit, then tear down the other. +const server_status = server_process.status; +const vite_status = vite_process.status; + +const first_exit = await Promise.race([ + server_status.then((s) => ({who: 'zzz_server', status: s})), + vite_status.then((s) => ({who: 'vite', status: s})), +]); + +if (!shutting_down) { + console.log(`[dev] ${first_exit.who} exited (code ${first_exit.status.code}), shutting down...`); + shutdown(); +} + +// Wait for remaining process. +await Promise.allSettled([server_status, vite_status]); +console.log('[dev] done'); diff --git a/scripts/dev_setup.ts b/scripts/dev_setup.ts new file mode 100644 index 000000000..a3dcc2f7f --- /dev/null +++ b/scripts/dev_setup.ts @@ -0,0 +1,22 @@ +/** + * First-time development environment setup. + * + * Idempotent — safe to re-run. Skips steps that are already done. + * + * Usage: deno task dev:setup + * + * @module + */ + +import {setup_env_file} from '@fuzdev/fuz_app/dev/setup.js'; + +import {runtime, set_permissions} from './setup_helpers.ts'; + +console.log('zzz dev setup'); +console.log(); + +console.log('Environment file:'); +await setup_env_file(runtime, '.env.development', '.env.development.example', {set_permissions}); +console.log(); + +console.log('Done. Next: gro dev'); diff --git a/scripts/prod_setup.ts b/scripts/prod_setup.ts new file mode 100644 index 000000000..0dc99dbc3 --- /dev/null +++ b/scripts/prod_setup.ts @@ -0,0 +1,22 @@ +/** + * Production environment setup. + * + * Idempotent — safe to re-run. Skips steps that are already done. + * + * Usage: deno task prod:setup + * + * @module + */ + +import {setup_env_file} from '@fuzdev/fuz_app/dev/setup.js'; + +import {runtime, set_permissions} from './setup_helpers.ts'; + +console.log('zzz prod setup'); +console.log(); + +console.log('Environment file:'); +await setup_env_file(runtime, '.env.production', '.env.production.example', {set_permissions}); +console.log(); + +console.log('Done.'); diff --git a/scripts/setup_helpers.ts b/scripts/setup_helpers.ts new file mode 100644 index 000000000..2fd70d357 --- /dev/null +++ b/scripts/setup_helpers.ts @@ -0,0 +1,16 @@ +/** + * Zzz-specific setup helpers. + * + * Provides the Deno runtime and permissions callback for zzz setup scripts. + * Setup utilities are imported directly from `@fuzdev/fuz_app/dev/setup`. + * + * @module + */ + +import {create_deno_runtime} from '@fuzdev/fuz_app/runtime/deno.js'; + +/** Deno runtime for zzz setup scripts. */ +export const runtime = create_deno_runtime([]); + +/** Set file permissions (wrapper around Deno.chmod). */ +export const set_permissions = (path: string, mode: number): Promise => Deno.chmod(path, mode); diff --git a/src/lib/ActionListitem.svelte b/src/lib/ActionListitem.svelte index 0faeb5564..57edad652 100644 --- a/src/lib/ActionListitem.svelte +++ b/src/lib/ActionListitem.svelte @@ -22,7 +22,7 @@ - {/if} - {/snippet} - - -{#snippet children_default(popover: Popover)} - {#if children} - {@render children(popover, () => confirm(popover))} - {:else} - - {/if} -{/snippet} - - diff --git a/src/lib/ContentEditor.svelte b/src/lib/ContentEditor.svelte index 548aa9264..93ad4d4f0 100644 --- a/src/lib/ContentEditor.svelte +++ b/src/lib/ContentEditor.svelte @@ -43,7 +43,7 @@ onsave?: ((value: string) => void) | undefined; } = $props(); - let textarea_el: HTMLTextAreaElement | undefined = $state(); + let textarea_el: HTMLTextAreaElement | undefined = $state.raw(); const token_count = $derived(token_count_prop ?? estimate_token_count(content)); @@ -104,7 +104,7 @@ content = new_content; focus(); }} - class="plain icon_button font_size_lg" + class="plain icon-button font_size_lg" > diff --git a/src/lib/ContextmenuEntryToggle.svelte b/src/lib/ContextmenuEntryToggle.svelte index 9691fb0b0..0caa673d3 100644 --- a/src/lib/ContextmenuEntryToggle.svelte +++ b/src/lib/ContextmenuEntryToggle.svelte @@ -22,7 +22,11 @@ children?: Snippet<[enabled: boolean]> | undefined; } = $props(); - if (DEV && label && children) throw new Error('cannot provide both label and children'); + if (DEV) { + $effect.pre(() => { + if (label && children) throw new Error('cannot provide both label and children'); + }); + } const final_children = $derived(children ?? children_default); diff --git a/src/lib/Dashboard.svelte b/src/lib/Dashboard.svelte index f0021eddb..804d3f503 100644 --- a/src/lib/Dashboard.svelte +++ b/src/lib/Dashboard.svelte @@ -10,9 +10,16 @@ import {logo_zzz} from './logos.js'; import NavLink from './NavLink.svelte'; import Glyph from './Glyph.svelte'; - import {GLYPH_ARROW_LEFT, GLYPH_ARROW_RIGHT, GLYPH_PROJECT, GLYPH_TAB} from './glyphs.js'; + import { + GLYPH_ARROW_LEFT, + GLYPH_ARROW_RIGHT, + GLYPH_DESK, + GLYPH_PROJECT, + GLYPH_TAB, + } from './glyphs.js'; import {frontend_context} from './frontend.svelte.js'; import {main_nav_items_default, to_nav_link_href} from './nav.js'; + import {DESK_WIDTH} from './DeskMenu.svelte'; // TODO dashboard should be mounted with Markdown @@ -26,11 +33,12 @@ const SIDEBAR_WIDTH_MAX = 180; const sidebar_width = $derived(app.ui.show_sidebar ? SIDEBAR_WIDTH_MAX : 0); + const desk_width = $derived(app.ui.show_desk_menu && app.ui.desk_pinned ? DESK_WIDTH : 0); - let futureclicks = $state(0); + let futureclicks = $state.raw(0); const FUTURECLICKS = 3; // Track if futureclicks has been activated at least once - let futureclicks_activated = $state(false); + let futureclicks_activated = $state.raw(false); onNavigate((navigation) => { // Only reset clicks when navigating away from the root page // and we're not already in activated state @@ -75,7 +83,18 @@ { - if (e.key === '`' && !is_editable(e.target)) { + if ( + e.key === 'Escape' && + app.ui.show_desk_menu && + !app.ui.desk_pinned && + !is_editable(e.target) + ) { + app.ui.toggle_desk_menu(false); + swallow(e); + } else if (e.key === '~' && !is_editable(e.target)) { + app.ui.toggle_desk_menu(); + swallow(e); + } else if (e.key === '`' && !is_editable(e.target)) { app.ui.toggle_sidebar(); swallow(e); } @@ -83,10 +102,15 @@ /> -
+
{@render children()}
@@ -104,7 +128,7 @@ { if (futureclicks_activated) { // If already activated once, toggle immediately when on root @@ -140,9 +164,9 @@ {#snippet children(selected)} {#if typeof link.icon === 'string'} - {link.label} + {link.label} {:else} - + + + + {#if !app.ui.show_desk_menu} + + {/if}
diff --git a/src/lib/DashboardActions.svelte b/src/lib/DashboardActions.svelte index e4c9f5ece..52605f2ab 100644 --- a/src/lib/DashboardActions.svelte +++ b/src/lib/DashboardActions.svelte @@ -15,7 +15,7 @@ // TODO could potentially be removed from the collection by some external process, // so having this state be component-local solves some problems but not all - let selected_action: Action | null = $state(null); + let selected_action: Action | null = $state.raw(null);
@@ -49,7 +49,10 @@ style:grid-template-columns="320px 1fr" style:gap="var(--space_md)" > -
+
- - diff --git a/src/lib/DashboardChats.svelte b/src/lib/DashboardChats.svelte index 8351ceabb..204612828 100644 --- a/src/lib/DashboardChats.svelte +++ b/src/lib/DashboardChats.svelte @@ -23,7 +23,7 @@ -
+
- - diff --git a/src/lib/DiskfileEditorView.svelte b/src/lib/DiskfileEditorView.svelte index 4a0062488..5559e85d3 100644 --- a/src/lib/DiskfileEditorView.svelte +++ b/src/lib/DiskfileEditorView.svelte @@ -27,10 +27,10 @@ const app = frontend_context.get(); // TODO @many refactor, maybe move a collection on `app.diskfiles`? one problem is the contextmenu can't access it without hacking something with context - const editor_state = new DiskfileEditorState({app, diskfile}); + const editor_state = new DiskfileEditorState({app, diskfile}); // TODO make diskfile a getter // Reference to the content editor component - let content_editor: {focus: () => void} | undefined = $state(); + let content_editor: {focus: () => void} | undefined = $state.raw(); // TODO refactor, try to remove $effect(() => { @@ -68,9 +68,9 @@ token_count={editor_state.current_token_count} placeholder={GLYPH_PLACEHOLDER + ' ' + diskfile.path_relative} readonly={false} - attrs={{class: 'height:100% border_radius_0'}} - onsave={async (value) => { - await app.diskfiles.update(diskfile.path, value); + attrs={{class: 'height:100% border-radius:0'}} + onsave={async () => { + await editor_state.save_changes(); }} />
diff --git a/src/lib/DiskfileExplorer.svelte b/src/lib/DiskfileExplorer.svelte index bff985d2d..302f87ec5 100644 --- a/src/lib/DiskfileExplorer.svelte +++ b/src/lib/DiskfileExplorer.svelte @@ -69,9 +69,9 @@ {#if zzz_dir === undefined}
 
{:else if zzz_dir === null} -
+
{:else} -
+
{zzz_dir}
{#snippet children(diskfile)} {@const selected = diskfiles.selected_file_id === diskfile.id} -
+
diff --git a/src/lib/DiskfileTabListitem.svelte b/src/lib/DiskfileTabListitem.svelte index dcb1f6c21..6685f44eb 100644 --- a/src/lib/DiskfileTabListitem.svelte +++ b/src/lib/DiskfileTabListitem.svelte @@ -25,14 +25,14 @@
{ @@ -58,7 +58,7 @@
diff --git a/src/lib/OllamaCreateModel.svelte b/src/lib/OllamaCreateModel.svelte index 6f0d1a18b..cef698863 100644 --- a/src/lib/OllamaCreateModel.svelte +++ b/src/lib/OllamaCreateModel.svelte @@ -33,7 +33,7 @@

create model

- diff --git a/src/lib/OllamaModelDetail.svelte b/src/lib/OllamaModelDetail.svelte index cf90762dc..86bfe9271 100644 --- a/src/lib/OllamaModelDetail.svelte +++ b/src/lib/OllamaModelDetail.svelte @@ -4,6 +4,7 @@ import PendingAnimation from '@fuzdev/fuz_ui/PendingAnimation.svelte'; import Details from '@fuzdev/fuz_ui/Details.svelte'; import type {Snippet} from 'svelte'; + import ConfirmButton from '@fuzdev/fuz_app/ui/ConfirmButton.svelte'; import Glyph from './Glyph.svelte'; import { @@ -15,7 +16,6 @@ GLYPH_DISCONNECT, } from './glyphs.js'; import type {Model} from './model.svelte.js'; - import ConfirmButton from './ConfirmButton.svelte'; import ModelContextmenu from './ModelContextmenu.svelte'; import ModelLink from './ModelLink.svelte'; import {format_short_date} from './time_helpers.js'; @@ -52,7 +52,7 @@ {#if onclose}
diff --git a/src/lib/PartContextmenu.svelte b/src/lib/PartContextmenu.svelte index 4ad846974..45e2062fe 100644 --- a/src/lib/PartContextmenu.svelte +++ b/src/lib/PartContextmenu.svelte @@ -25,7 +25,7 @@ const app = frontend_context.get(); - let show_editor = $state(false); + let show_editor = $state.raw(false); diff --git a/src/lib/PartEditorForDiskfile.svelte b/src/lib/PartEditorForDiskfile.svelte index 61da5481b..4c1e40a5d 100644 --- a/src/lib/PartEditorForDiskfile.svelte +++ b/src/lib/PartEditorForDiskfile.svelte @@ -28,12 +28,12 @@ // Create editor state reference - will be initialized in the effect // TODO @many this initialization is awkward, ideally becomes refactored to mostly derived // maybe this instance is created once, and it gets a thunk for the diskfile? `DikfileEditorState.of(() => diskfile)` - let editor_state: DiskfileEditorState | undefined = $state(); + let editor_state: DiskfileEditorState | undefined = $state.raw(); // Keep track of the content editor for focusing - let content_editor: {focus: () => void} | undefined = $state(); + let content_editor: {focus: () => void} | undefined = $state.raw(); - let show_file_picker = $state(false); + let show_file_picker = $state.raw(false); // TODO probably refactor to avoid the effect, look also at `TODO @many refactor, maybe move a collection on `app.diskfiles`?` // Effect for managing editor state lifecycle diff --git a/src/lib/PartRemoveButton.svelte b/src/lib/PartRemoveButton.svelte index 213ef18cd..5a1fca589 100644 --- a/src/lib/PartRemoveButton.svelte +++ b/src/lib/PartRemoveButton.svelte @@ -1,11 +1,11 @@ - - -
- {#if button} - {@render button(popover)} - {:else} - - {/if} - - {#if popover.visible} -
- {@render popover_content(popover)} -
- {/if} -
diff --git a/src/lib/PromptContextmenu.svelte b/src/lib/PromptContextmenu.svelte index 0e32329e4..19a537272 100644 --- a/src/lib/PromptContextmenu.svelte +++ b/src/lib/PromptContextmenu.svelte @@ -23,7 +23,7 @@ const app = frontend_context.get(); - let show_diskfile_picker = $state(false); + let show_diskfile_picker = $state.raw(false); diff --git a/src/lib/ProviderLink.svelte b/src/lib/ProviderLink.svelte index 156fbb9f1..eba766ec8 100644 --- a/src/lib/ProviderLink.svelte +++ b/src/lib/ProviderLink.svelte @@ -3,6 +3,7 @@ import {resolve} from '$app/paths'; import type {SvelteHTMLElements} from 'svelte/elements'; import {page} from '$app/state'; + import {DEV} from 'esm-env'; import type {Provider} from './provider.svelte.js'; import {GLYPH_PROVIDER} from './glyphs.js'; @@ -27,11 +28,15 @@ fallback?: Snippet | undefined; } = $props(); - if (icon && children) { - console.error('icon and children are mutually exclusive'); - } - if (fallback && fallback_attrs) { - console.error('fallback and fallback_attrs are mutually exclusive'); + if (DEV) { + $effect.pre(() => { + if (icon && children) { + console.error('icon and children are mutually exclusive'); + } + if (fallback && fallback_attrs) { + console.error('fallback and fallback_attrs are mutually exclusive'); + } + }); } const selected = $derived( @@ -41,7 +46,7 @@ {#if provider} - {#if children} {@render children()} {:else} @@ -65,14 +70,3 @@ > missing provider {/if} - - diff --git a/src/lib/SocketMessageQueue.svelte b/src/lib/SocketMessageQueue.svelte index 58d8592ca..e15e99f74 100644 --- a/src/lib/SocketMessageQueue.svelte +++ b/src/lib/SocketMessageQueue.svelte @@ -3,12 +3,12 @@ import {format} from 'date-fns'; import {SvelteMap} from 'svelte/reactivity'; import CopyToClipboard from '@fuzdev/fuz_ui/CopyToClipboard.svelte'; + import ConfirmButton from '@fuzdev/fuz_app/ui/ConfirmButton.svelte'; + import PopoverButton from '@fuzdev/fuz_app/ui/PopoverButton.svelte'; import type {Socket, QueuedMessage, FailedMessage} from './socket.svelte.js'; import Glyph from './Glyph.svelte'; import {GLYPH_RETRY, GLYPH_REMOVE, GLYPH_INFO} from './glyphs.js'; - import ConfirmButton from './ConfirmButton.svelte'; - import PopoverButton from './PopoverButton.svelte'; import {format_timestamp} from './time_helpers.js'; import {DURATION_SM} from './helpers.js'; @@ -120,7 +120,7 @@ }; -
+
@@ -135,7 +135,7 @@ {#if socket.connected}
diff --git a/src/lib/TerminalCommandInput.svelte b/src/lib/TerminalCommandInput.svelte new file mode 100644 index 000000000..060694e01 --- /dev/null +++ b/src/lib/TerminalCommandInput.svelte @@ -0,0 +1,38 @@ + + +
+ + +
+ + diff --git a/src/lib/TerminalContextmenu.svelte b/src/lib/TerminalContextmenu.svelte new file mode 100644 index 000000000..d78f99a42 --- /dev/null +++ b/src/lib/TerminalContextmenu.svelte @@ -0,0 +1,26 @@ + + + + +{#snippet entries()} + {#if get_terminal_text} + + {/if} + +{/snippet} diff --git a/src/lib/TerminalPresetBar.svelte b/src/lib/TerminalPresetBar.svelte new file mode 100644 index 000000000..085cafea0 --- /dev/null +++ b/src/lib/TerminalPresetBar.svelte @@ -0,0 +1,124 @@ + + +
+ {#each presets as preset (preset.id)} + + + {#if ondelete} + + {/if} + + {/each} + + {#if oncreate} + {#if adding} + + + + + + + {:else} + + {/if} + {/if} +
+ + diff --git a/src/lib/TerminalRunItem.svelte b/src/lib/TerminalRunItem.svelte new file mode 100644 index 000000000..7cfa0abf2 --- /dev/null +++ b/src/lib/TerminalRunItem.svelte @@ -0,0 +1,139 @@ + + + +
+
+ $ {display_command} + + {#if exited} + + exited {exit_code ?? '?'} + + {:else} + running + {/if} + + {#if onrestart} + + {/if} +
+
+ +
+
+ + +
+
+
+ + diff --git a/src/lib/TerminalRunner.svelte b/src/lib/TerminalRunner.svelte new file mode 100644 index 000000000..5985116de --- /dev/null +++ b/src/lib/TerminalRunner.svelte @@ -0,0 +1,184 @@ + + +
+
+
+ {#each runs as run (run.terminal_id)} +
+ +
+ {/each} +
+
+ + {#if runs.length === 0} +

no commands run yet — use a preset or type a command below

+ {/if} + + {#if error_message} +

{error_message}

+ {/if} + +
+ + +
+
+ + diff --git a/src/lib/TerminalView.svelte b/src/lib/TerminalView.svelte new file mode 100644 index 000000000..400d32929 --- /dev/null +++ b/src/lib/TerminalView.svelte @@ -0,0 +1,202 @@ + + +
+
+ terminal {terminal_id.slice(0, 8)} +
+ + +
+
+
+
+ + diff --git a/src/lib/ThreadContextmenu.svelte b/src/lib/ThreadContextmenu.svelte index 8ea45117d..b99f48d9c 100644 --- a/src/lib/ThreadContextmenu.svelte +++ b/src/lib/ThreadContextmenu.svelte @@ -23,7 +23,7 @@ const app = frontend_context.get(); - let show_model_picker = $state(false); + let show_model_picker = $state.raw(false); diff --git a/src/lib/ThreadListitem.svelte b/src/lib/ThreadListitem.svelte index 7d6b08d49..4217f8dc5 100644 --- a/src/lib/ThreadListitem.svelte +++ b/src/lib/ThreadListitem.svelte @@ -1,8 +1,9 @@ -
{:else} -
+
{#each projects.projects as project (project.id)}

{project.name}

{project.description}

-
+
{#each project.domains as domain (domain.id)} -
+
{domain.name} {#if !domain.ssl} - no SSL + no SSL {/if}
{/each} @@ -60,56 +60,56 @@ diff --git a/src/routes/projects/[project_id]/repos/+page.svelte b/src/routes/projects/[project_id]/repos/+page.svelte index e974b5a5c..b7c6e9b73 100644 --- a/src/routes/projects/[project_id]/repos/+page.svelte +++ b/src/routes/projects/[project_id]/repos/+page.svelte @@ -16,7 +16,7 @@ const project = $derived(projects.current_project); -
+
{#if project} @@ -24,7 +24,7 @@ {/if} -
+
{#if project_viewmodel?.project}

repos

@@ -73,13 +73,13 @@
diff --git a/src/routes/workspaces/+page.svelte b/src/routes/workspaces/+page.svelte new file mode 100644 index 000000000..a18a69655 --- /dev/null +++ b/src/routes/workspaces/+page.svelte @@ -0,0 +1,131 @@ + + +
+
+

Workspaces

+

+ Directories the daemon is watching. Open a workspace to access its files and receive change + events. +

+
+ + +
+

Open Workspace

+
{ + e.preventDefault(); + void handle_open(); + }} + > + + +
+ {#if error_message} +

{error_message}

+ {/if} +
+ + +
+

Open Workspaces

+ {#if app.workspaces.items.by_id.size === 0} +

+ No workspaces open. Use the form above or run zzz <dir> to open one. +

+ {:else} +
    + {#each app.workspaces.items.values as workspace (workspace.id)} +
  • + + +
  • + {/each} +
+ {/if} +
+
+ + diff --git a/src/test/action_event.test.ts b/src/test/action_event.test.ts index f398b1389..a5cc6d758 100644 --- a/src/test/action_event.test.ts +++ b/src/test/action_event.test.ts @@ -1,12 +1,17 @@ -// @slop Claude Opus 4 - // @vitest-environment jsdom -import {test, expect, describe} from 'vitest'; +import {test, describe, assert} from 'vitest'; +import {assert_rejects} from '@fuzdev/fuz_util/testing.js'; -import {create_action_event, create_action_event_from_json} from '$lib/action_event.js'; -import type {ActionEventEnvironment} from '$lib/action_event_types.js'; -import type {ActionSpecUnion} from '$lib/action_spec.js'; +import { + create_action_event, + create_action_event_from_json, +} from '@fuzdev/fuz_app/actions/action_event.js'; +import type { + ActionEventEnvironment, + ActionExecutor, +} from '@fuzdev/fuz_app/actions/action_event_types.js'; +import type {ActionSpecUnion} from '@fuzdev/fuz_app/actions/action_spec.js'; import { ping_action_spec, filer_change_action_spec, @@ -14,12 +19,10 @@ import { completion_create_action_spec, } from '$lib/action_specs.js'; import {create_uuid} from '$lib/zod_helpers.js'; -import type {ActionExecutor} from '$lib/action_types.js'; // Mock environment for testing class TestEnvironment implements ActionEventEnvironment { executor: ActionExecutor = 'frontend'; - peer: any = {}; // Mock peer, not used in tests handlers: Map any>> = new Map(); specs: Map = new Map(); @@ -51,17 +54,17 @@ describe('ActionEvent', () => { const env = new TestEnvironment([ping_action_spec]); const event = create_action_event(env, ping_action_spec, undefined); - expect(event.data.kind).toBe('request_response'); - expect(event.data.phase).toBe('send_request'); - expect(event.data.step).toBe('initial'); - expect(event.data.method).toBe('ping'); - expect(event.data.executor).toBe('frontend'); - expect(event.data.input).toBeUndefined(); - expect(event.data.output).toBe(null); - expect(event.data.error).toBe(null); - expect(event.data.request).toBe(null); - expect(event.data.response).toBe(null); - expect(event.data.notification).toBe(null); + assert.strictEqual(event.data.kind, 'request_response'); + assert.strictEqual(event.data.phase, 'send_request'); + assert.strictEqual(event.data.step, 'initial'); + assert.strictEqual(event.data.method, 'ping'); + assert.strictEqual(event.data.executor, 'frontend'); + assert.ok(event.data.input === undefined); + assert.isNull(event.data.output); + assert.isNull(event.data.error); + assert.isNull(event.data.request); + assert.isNull(event.data.response); + assert.isNull(event.data.notification); }); test('creates event with input data', () => { @@ -78,14 +81,14 @@ describe('ActionEvent', () => { const event = create_action_event(env, completion_create_action_spec, input); - expect(event.data.input).toEqual(input); + assert.deepEqual(event.data.input, input); }); test('creates event with specified initial phase', () => { const env = new TestEnvironment([ping_action_spec]); const event = create_action_event(env, ping_action_spec, undefined, 'receive_request'); - expect(event.data.phase).toBe('receive_request'); + assert.strictEqual(event.data.phase, 'receive_request'); }); test('throws for invalid executor/initiator combination', () => { @@ -93,8 +96,9 @@ describe('ActionEvent', () => { env.executor = 'frontend'; // filer_change has initiator: 'backend', so frontend can't initiate send - expect(() => create_action_event(env, filer_change_action_spec, {})).toThrow( - "executor 'frontend' cannot initiate action 'filer_change'", + assert.throws( + () => create_action_event(env, filer_change_action_spec, {}), + /executor 'frontend' cannot initiate action 'filer_change'/, ); }); }); @@ -106,9 +110,9 @@ describe('ActionEvent', () => { event.parse(); - expect(event.data.step).toBe('parsed'); + assert.strictEqual(event.data.step, 'parsed'); // ping has void input, so it should remain undefined - expect(event.data.input).toBeUndefined(); + assert.ok(event.data.input === undefined); }); test('parses complex input with validation', () => { @@ -126,8 +130,8 @@ describe('ActionEvent', () => { const event = create_action_event(env, completion_create_action_spec, input); event.parse(); - expect(event.data.step).toBe('parsed'); - expect(event.data.input).toEqual(input); + assert.strictEqual(event.data.step, 'parsed'); + assert.deepEqual(event.data.input, input); }); test('fails on invalid input', () => { @@ -142,10 +146,10 @@ describe('ActionEvent', () => { const event = create_action_event(env, completion_create_action_spec, invalid_input); event.parse(); - expect(event.data.step).toBe('failed'); - expect(event.data.error).toBeDefined(); - expect(event.data.error?.code).toBe(-32602); - expect(event.data.error?.message).toContain('failed to parse input'); + assert.strictEqual(event.data.step, 'failed'); + assert.isDefined(event.data.error); + assert.strictEqual(event.data.error?.code, -32602); + assert.include(event.data.error?.message, 'failed to parse input'); }); test('throws when not in initial step', () => { @@ -155,7 +159,7 @@ describe('ActionEvent', () => { event.parse(); // First parse succeeds // Second parse should throw - expect(() => event.parse()).toThrow("cannot parse from step 'parsed' - must be 'initial'"); + assert.throws(() => event.parse(), /cannot parse from step 'parsed' - must be 'initial'/); }); }); @@ -172,12 +176,12 @@ describe('ActionEvent', () => { await event.handle_async(); - expect(event.data.step).toBe('handled'); + assert.strictEqual(event.data.step, 'handled'); // send_request doesn't produce output - expect(event.data.output).toBe(null); + assert.isNull(event.data.output); // But it should have created a request - expect(event.data.request).toBeDefined(); - expect(event.data.request?.method).toBe('ping'); + assert.isDefined(event.data.request); + assert.strictEqual(event.data.request?.method, 'ping'); }); test('handles missing handler gracefully', async () => { @@ -189,7 +193,7 @@ describe('ActionEvent', () => { await event.handle_async(); - expect(event.data.step).toBe('handled'); + assert.strictEqual(event.data.step, 'handled'); }); test('captures handler errors', async () => { @@ -205,11 +209,11 @@ describe('ActionEvent', () => { await event.handle_async(); // Handler errors transition to error phase, not directly to failed - expect(event.data.step).toBe('parsed'); - expect(event.data.phase).toBe('send_error'); - expect(event.data.error).toBeDefined(); - expect(event.data.error?.code).toBe(-32603); - expect(event.data.error?.message).toContain('unknown error'); + assert.strictEqual(event.data.step, 'parsed'); + assert.strictEqual(event.data.phase, 'send_error'); + assert.isDefined(event.data.error); + assert.strictEqual(event.data.error?.code, -32603); + assert.include(event.data.error?.message, 'unknown error'); }); test('send_error handler can handle errors gracefully', async () => { @@ -224,8 +228,8 @@ describe('ActionEvent', () => { // Error handler logs and completes successfully env.add_handler('ping', 'send_error', (event) => { error_logged = true; - expect(event.data.error).toBeDefined(); - expect(event.data.error?.message).toContain('primary handler error'); + assert.isDefined(event.data.error); + assert.include(event.data.error?.message, 'primary handler error'); // Error handler completes without throwing }); @@ -234,17 +238,17 @@ describe('ActionEvent', () => { await event.handle_async(); // First error transitions to send_error - expect(event.data.phase).toBe('send_error'); - expect(event.data.step).toBe('parsed'); + assert.strictEqual(event.data.phase, 'send_error'); + assert.strictEqual(event.data.step, 'parsed'); // Handle error phase await event.handle_async(); // Error handler completed successfully - expect(error_logged).toBe(true); - expect(event.data.step).toBe('failed'); - expect(event.data.phase).toBe('send_error'); - expect(event.is_complete()).toBe(true); + assert.ok(error_logged); + assert.strictEqual(event.data.step, 'failed'); + assert.strictEqual(event.data.phase, 'send_error'); + assert.ok(event.is_complete()); }); test('receive_error handler can handle errors gracefully', async () => { @@ -254,8 +258,8 @@ describe('ActionEvent', () => { // Error handler can inspect and handle the error env.add_handler('ping', 'receive_error', (event) => { error_handled = true; - expect(event.data.error).toBeDefined(); - expect(event.data.error?.code).toBe(-32603); + assert.isDefined(event.data.error); + assert.strictEqual(event.data.error?.code, -32603); // Could implement retry logic, fallback, logging, etc. }); @@ -285,16 +289,16 @@ describe('ActionEvent', () => { event.parse(); // Should be in receive_error phase - expect(event.data.phase).toBe('receive_error'); - expect(event.data.step).toBe('parsed'); + assert.strictEqual(event.data.phase, 'receive_error'); + assert.strictEqual(event.data.step, 'parsed'); // Handle error phase await event.handle_async(); // Error handler completed successfully - expect(error_handled).toBe(true); - expect(event.data.step).toBe('handled'); - expect(event.is_complete()).toBe(true); + assert.ok(error_handled); + assert.strictEqual(event.data.step, 'handled'); + assert.ok(event.is_complete()); }); test('validates output for phases that expect it', async () => { @@ -310,9 +314,9 @@ describe('ActionEvent', () => { await event.handle_async(); - expect(event.data.step).toBe('handled'); - expect(event.data.output).toBeDefined(); - expect(event.data.output).toHaveProperty('ping_id'); + assert.strictEqual(event.data.step, 'handled'); + assert.isDefined(event.data.output); + assert.ok(Object.hasOwn(event.data.output as any, 'ping_id')); }); test('throws when not in parsed step', async () => { @@ -320,9 +324,8 @@ describe('ActionEvent', () => { const event = create_action_event(env, ping_action_spec, undefined); // Not parsed yet - await expect(event.handle_async()).rejects.toThrow( - "cannot handle from step 'initial' - must be 'parsed'", - ); + const error = await assert_rejects(() => event.handle_async()); + assert.include(error.message, "cannot handle from step 'initial' - must be 'parsed'"); }); test('is no-op when already failed', async () => { @@ -338,15 +341,15 @@ describe('ActionEvent', () => { event.parse(); // Should be failed after parsing invalid input - expect(event.data.step).toBe('failed'); + assert.strictEqual(event.data.step, 'failed'); const original_error = event.data.error; // handle_async should be no-op await event.handle_async(); // State should remain unchanged - expect(event.data.step).toBe('failed'); - expect(event.data.error).toBe(original_error); + assert.strictEqual(event.data.step, 'failed'); + assert.strictEqual(event.data.error, original_error); }); }); @@ -362,8 +365,8 @@ describe('ActionEvent', () => { event.handle_sync(); - expect(event.data.step).toBe('handled'); - expect(event.data.output).toEqual(output); + assert.strictEqual(event.data.step, 'handled'); + assert.deepEqual(event.data.output, output); }); test('throws for async actions', () => { @@ -371,8 +374,9 @@ describe('ActionEvent', () => { const event = create_action_event(env, ping_action_spec, undefined); event.parse(); - expect(() => event.handle_sync()).toThrow( - 'handle_sync can only be used with synchronous local_call actions', + assert.throws( + () => event.handle_sync(), + /handle_sync can only be used with synchronous local_call actions/, ); }); @@ -384,15 +388,15 @@ describe('ActionEvent', () => { event.parse(); // Should be failed after parsing invalid input - expect(event.data.step).toBe('failed'); + assert.strictEqual(event.data.step, 'failed'); const original_error = event.data.error; // handle_sync should be no-op event.handle_sync(); // State should remain unchanged - expect(event.data.step).toBe('failed'); - expect(event.data.error).toBe(original_error); + assert.strictEqual(event.data.step, 'failed'); + assert.strictEqual(event.data.error, original_error); }); }); @@ -405,16 +409,16 @@ describe('ActionEvent', () => { event.parse(); await event.handle_async(); - expect(event.data.phase).toBe('send_request'); - expect(event.data.step).toBe('handled'); + assert.strictEqual(event.data.phase, 'send_request'); + assert.strictEqual(event.data.step, 'handled'); // Transition to receive_response event.transition('receive_response'); - expect(event.data.phase).toBe('receive_response'); - expect(event.data.step).toBe('initial'); + assert.strictEqual(event.data.phase, 'receive_response'); + assert.strictEqual(event.data.step, 'initial'); // Request should be preserved - expect(event.data.request).toBeDefined(); + assert.isDefined(event.data.request); }); test('throws for invalid phase transition', async () => { @@ -425,8 +429,9 @@ describe('ActionEvent', () => { await event.handle_async(); // Can't go from send_request to send_response - expect(() => event.transition('send_response')).toThrow( - "Invalid phase transition from 'send_request' to 'send_response'", + assert.throws( + () => event.transition('send_response'), + /Invalid phase transition from 'send_request' to 'send_response'/, ); }); @@ -435,8 +440,9 @@ describe('ActionEvent', () => { const event = create_action_event(env, ping_action_spec, undefined); // Still in initial step - expect(() => event.transition('receive_response')).toThrow( - "cannot transition from step 'initial' - must be 'handled'", + assert.throws( + () => event.transition('receive_response'), + /cannot transition from step 'initial' - must be 'handled'/, ); }); @@ -460,11 +466,11 @@ describe('ActionEvent', () => { // Transition to send_response event.transition('send_response'); - expect(event.data.phase).toBe('send_response'); - expect(event.data.request).toEqual(request); - expect(event.data.output).toEqual({ping_id: request.id}); - expect(event.data.response).toBeDefined(); - expect(event.data.response).toHaveProperty('result'); + assert.strictEqual(event.data.phase, 'send_response'); + assert.deepEqual(event.data.request, request); + assert.deepEqual(event.data.output, {ping_id: request.id}); + assert.isDefined(event.data.response); + assert.ok(Object.hasOwn(event.data.response as any, 'result')); }); test('is no-op when already failed', async () => { @@ -485,14 +491,14 @@ describe('ActionEvent', () => { await event.handle_async(); // First error transitions to send_error - expect(event.data.step).toBe('parsed'); - expect(event.data.phase).toBe('send_error'); + assert.strictEqual(event.data.step, 'parsed'); + assert.strictEqual(event.data.phase, 'send_error'); // Handle error phase - this will throw and transition to failed await event.handle_async(); // Now should be failed after error handler error - expect(event.data.step).toBe('failed'); + assert.strictEqual(event.data.step, 'failed'); const original_error = event.data.error; const original_phase = event.data.phase; @@ -500,9 +506,9 @@ describe('ActionEvent', () => { event.transition('receive_response'); // State should remain unchanged - expect(event.data.step).toBe('failed'); - expect(event.data.phase).toBe(original_phase); - expect(event.data.error).toBe(original_error); + assert.strictEqual(event.data.step, 'failed'); + assert.strictEqual(event.data.phase, original_phase); + assert.strictEqual(event.data.error, original_error); }); }); @@ -520,7 +526,7 @@ describe('ActionEvent', () => { event.set_request(request); - expect(event.data.request).toEqual(request); + assert.deepEqual(event.data.request, request); }); test('set_response() sets response and extracts output', () => { @@ -549,8 +555,8 @@ describe('ActionEvent', () => { event.set_response(response); - expect(event.data.response).toEqual(response); - expect(event.data.output).toEqual(response.result); + assert.deepEqual(event.data.response, response); + assert.deepEqual(event.data.output, response.result); }); test('error response transitions to receive_error phase on parse', () => { @@ -586,11 +592,11 @@ describe('ActionEvent', () => { // Parse should detect the error and transition to receive_error phase event.parse(); - expect(event.data.step).toBe('parsed'); - expect(event.data.phase).toBe('receive_error'); - expect(event.data.error).toEqual(errorResponse.error); - expect(event.data.response).toEqual(errorResponse); - expect(event.data.output).toBe(null); + assert.strictEqual(event.data.step, 'parsed'); + assert.strictEqual(event.data.phase, 'receive_error'); + assert.deepEqual(event.data.error, errorResponse.error); + assert.deepEqual(event.data.response, errorResponse); + assert.isNull(event.data.output); }); test('set_notification() sets notification data', () => { @@ -609,19 +615,21 @@ describe('ActionEvent', () => { event.set_notification(notification); - expect(event.data.notification).toEqual(notification); + assert.deepEqual(event.data.notification, notification); }); test('setters throw for wrong phase/kind', () => { const env = new TestEnvironment([ping_action_spec]); const event = create_action_event(env, ping_action_spec, undefined); - expect(() => event.set_request({} as any)).toThrow( - 'can only set request in receive_request phase', + assert.throws( + () => event.set_request({} as any), + /can only set request in receive_request phase/, ); - expect(() => event.set_notification({} as any)).toThrow( - 'can only set notification in receive phase', + assert.throws( + () => event.set_notification({} as any), + /can only set notification in receive phase/, ); }); }); @@ -633,7 +641,7 @@ describe('ActionEvent', () => { const event = create_action_event(env, ping_action_spec, undefined); // Not complete in initial state - expect(event.is_complete()).toBe(false); + assert.ok(!event.is_complete()); // Handle through to receive_response event.parse(); @@ -648,7 +656,7 @@ describe('ActionEvent', () => { await event.handle_async(); // receive_response is terminal for request_response - expect(event.is_complete()).toBe(true); + assert.ok(event.is_complete()); }); test('returns true for failed state', () => { @@ -657,8 +665,8 @@ describe('ActionEvent', () => { event.parse(); // Will fail due to invalid input - expect(event.data.step).toBe('failed'); - expect(event.is_complete()).toBe(true); + assert.strictEqual(event.data.step, 'failed'); + assert.ok(event.is_complete()); }); test('returns false for non-terminal phases', () => { @@ -668,7 +676,7 @@ describe('ActionEvent', () => { event.parse(); // Parsed but not handled - expect(event.is_complete()).toBe(false); + assert.ok(!event.is_complete()); }); }); @@ -688,8 +696,8 @@ describe('ActionEvent', () => { event.parse(); - expect(changes).toHaveLength(1); - expect(changes[0]).toEqual({ + assert.strictEqual(changes.length, 1); + assert.deepEqual(changes[0], { old_step: 'initial', new_step: 'parsed', }); @@ -705,12 +713,12 @@ describe('ActionEvent', () => { }); event.parse(); - expect(call_count).toBe(1); + assert.strictEqual(call_count, 1); cleanup(); await event.handle_async(); - expect(call_count).toBe(1); // No additional calls + assert.strictEqual(call_count, 1); // No additional calls }); test('multiple listeners work independently', () => { @@ -730,8 +738,8 @@ describe('ActionEvent', () => { event.parse(); - expect(listener1_calls).toEqual(['parsed']); - expect(listener2_calls).toEqual(['parsed']); + assert.deepEqual(listener1_calls, ['parsed']); + assert.deepEqual(listener2_calls, ['parsed']); }); }); @@ -745,15 +753,15 @@ describe('ActionEvent', () => { const json = event.toJSON(); - expect(json.kind).toBe('request_response'); - expect(json.phase).toBe('send_request'); - expect(json.step).toBe('handled'); - expect(json.request).toBeDefined(); + assert.strictEqual(json.kind, 'request_response'); + assert.strictEqual(json.phase, 'send_request'); + assert.strictEqual(json.step, 'handled'); + assert.isDefined(json.request); // Reconstruct from JSON const restored = create_action_event_from_json(json, env); - expect(restored.data).toEqual(event.data); + assert.deepEqual(restored.data, event.data); }); test('throws when spec not found for deserialization', () => { @@ -773,50 +781,9 @@ describe('ActionEvent', () => { notification: null, }; - expect(() => create_action_event_from_json(json as any, env)).toThrow( - "no spec found for method 'unknown_method'", - ); - }); - }); - - describe('environment helpers', () => { - test('app getter works for frontend environment', () => { - const env = new TestEnvironment([ping_action_spec]); - env.executor = 'frontend'; - - const event = create_action_event(env, ping_action_spec, undefined); - - expect(event.app).toBe(env); - }); - - test('backend getter works for backend environment', () => { - const env = new TestEnvironment([ping_action_spec]); - env.executor = 'backend'; - - const event = create_action_event(env, ping_action_spec, undefined); - - expect(event.backend).toBe(env); - }); - - test('app getter throws for backend environment', () => { - const env = new TestEnvironment([ping_action_spec]); - env.executor = 'backend'; - - const event = create_action_event(env, ping_action_spec, undefined); - - expect(() => event.app).toThrow( - '`action_event.app` can only be accessed in frontend environments', - ); - }); - - test('backend getter throws for frontend environment', () => { - const env = new TestEnvironment([ping_action_spec]); - env.executor = 'frontend'; - - const event = create_action_event(env, ping_action_spec, undefined); - - expect(() => event.backend).toThrow( - '`action_event.backend` can only be accessed in backend environments', + assert.throws( + () => create_action_event_from_json(json as any, env), + /no spec found for method 'unknown_method'/, ); }); }); @@ -835,14 +802,14 @@ describe('ActionEvent', () => { event.parse(); // Should fail during parsing - expect(event.data.step).toBe('failed'); - expect(event.data.error).toBeDefined(); - expect(event.data.error?.code).toBe(-32602); - expect(event.data.error?.message).toContain('failed to parse input'); + assert.strictEqual(event.data.step, 'failed'); + assert.isDefined(event.data.error); + assert.strictEqual(event.data.error?.code, -32602); + assert.include(event.data.error?.message, 'failed to parse input'); // Should be a no-op when handling after parse failure await event.handle_async(); - expect(event.data.step).toBe('failed'); // Still failed, no change + assert.strictEqual(event.data.step, 'failed'); // Still failed, no change }); test('remote_notification creates notification in send phase', async () => { @@ -866,9 +833,9 @@ describe('ActionEvent', () => { event.parse(); await event.handle_async(); - expect(event.data.notification).toBeDefined(); - expect(event.data.notification?.method).toBe('filer_change'); - expect(event.data.notification?.params).toEqual(input); + assert.isDefined(event.data.notification); + assert.strictEqual(event.data.notification?.method, 'filer_change'); + assert.deepEqual(event.data.notification?.params, input); }); test('local_call completes in single phase', () => { @@ -880,10 +847,10 @@ describe('ActionEvent', () => { event.parse(); event.handle_sync(); - expect(event.data.phase).toBe('execute'); - expect(event.data.step).toBe('handled'); - expect(event.data.output).toEqual({show: false}); - expect(event.is_complete()).toBe(true); + assert.strictEqual(event.data.phase, 'execute'); + assert.strictEqual(event.data.step, 'handled'); + assert.deepEqual(event.data.output, {show: false}); + assert.ok(event.is_complete()); }); }); }); diff --git a/src/test/cell.svelte.base.test.ts b/src/test/cell.svelte.base.test.ts index 259cf8881..4185da72a 100644 --- a/src/test/cell.svelte.base.test.ts +++ b/src/test/cell.svelte.base.test.ts @@ -1,15 +1,14 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, vi, beforeEach, describe} from 'vitest'; +import {test, vi, beforeEach, describe, assert} from 'vitest'; import {z} from 'zod'; import {Cell, type CellOptions} from '$lib/cell.svelte.js'; import {CellJson} from '$lib/cell_types.js'; import {create_uuid, get_datetime_now, UuidWithDefault} from '$lib/zod_helpers.js'; import {Frontend} from '$lib/frontend.svelte.js'; -import {monkeypatch_zzz_for_tests} from './test_helpers.ts'; + +import {monkeypatch_zzz_for_tests} from './test_helpers.js'; // Constants for testing const TEST_ID = create_uuid(); @@ -59,16 +58,16 @@ describe('Cell initialization', () => { }); // Verify basic properties - expect(test_cell.id).toBe(TEST_ID); - expect(test_cell.created).toBe(TEST_DATETIME); - expect(test_cell.updated).toBe(test_cell.created); - expect(test_cell.text).toBe('Sample'); - expect(test_cell.number).toBe(42); - expect(test_cell.items).toEqual(['item1', 'item2']); + assert.strictEqual(test_cell.id, TEST_ID); + assert.strictEqual(test_cell.created, TEST_DATETIME); + assert.strictEqual(test_cell.updated, test_cell.created); + assert.strictEqual(test_cell.text, 'Sample'); + assert.strictEqual(test_cell.number, 42); + assert.deepEqual(test_cell.items, ['item1', 'item2']); // Verify cell was registered - expect(app.cell_registry.all.has(TEST_ID)).toBe(true); - expect(app.cell_registry.all.get(TEST_ID)).toBe(test_cell); + assert.ok(app.cell_registry.all.has(TEST_ID)); + assert.ok(app.cell_registry.all.get(TEST_ID) === (test_cell as any)); }); test('uses default values when json is empty', () => { @@ -77,13 +76,13 @@ describe('Cell initialization', () => { }); // Should use schema defaults - expect(test_cell.id).toBeDefined(); - expect(test_cell.created).toBeDefined(); - expect(test_cell.updated).toBe(test_cell.created); - expect(test_cell.text).toBe(''); - expect(test_cell.number).toBe(0); - expect(test_cell.items).toEqual([]); - expect(test_cell.flag).toBe(true); + assert.isDefined(test_cell.id); + assert.isDefined(test_cell.created); + assert.strictEqual(test_cell.updated, test_cell.created); + assert.strictEqual(test_cell.text, ''); + assert.strictEqual(test_cell.number, 0); + assert.deepEqual(test_cell.items, []); + assert.ok(test_cell.flag); }); test('derived schema properties are correctly calculated', () => { @@ -95,25 +94,25 @@ describe('Cell initialization', () => { }); // Check if schema keys contain expected fields - expect(test_cell.schema_keys).toContain('id'); - expect(test_cell.schema_keys).toContain('text'); - expect(test_cell.schema_keys).toContain('number'); - expect(test_cell.schema_keys).toContain('items'); + assert.include(test_cell.schema_keys, 'id'); + assert.include(test_cell.schema_keys, 'text'); + assert.include(test_cell.schema_keys, 'number'); + assert.include(test_cell.schema_keys, 'items'); // Check if field schemas are correctly mapped - expect(test_cell.field_schemas.size).toBeGreaterThan(0); - expect(test_cell.field_schemas.has('text')).toBe(true); - expect(test_cell.field_schemas.has('number')).toBe(true); + assert.ok(test_cell.field_schemas.size > 0); + assert.ok(test_cell.field_schemas.has('text')); + assert.ok(test_cell.field_schemas.has('number')); // Test schema info for an array type const items_info = test_cell.field_schema_info.get('items'); - expect(items_info?.is_array).toBe(true); - expect(items_info?.type).toBe('ZodArray'); + assert.ok(items_info?.is_array); + assert.strictEqual(items_info.type, 'ZodArray'); // Test schema info for a scalar type const text_info = test_cell.field_schema_info.get('text'); - expect(text_info?.is_array).toBe(false); - expect(text_info?.type).toBe('ZodString'); + assert.ok(!text_info?.is_array); + assert.strictEqual(text_info?.type, 'ZodString'); }); }); @@ -130,8 +129,8 @@ describe('Cell registry lifecycle', () => { }); // Cell should be registered automatically in init() - expect(app.cell_registry.all.has(cell_id)).toBe(true); - expect(app.cell_registry.all.get(cell_id)).toBe(test_cell); + assert.ok(app.cell_registry.all.has(cell_id)); + assert.ok(app.cell_registry.all.get(cell_id) === (test_cell as any)); }); test('dispose removes from registry', () => { @@ -146,13 +145,13 @@ describe('Cell registry lifecycle', () => { }); // Verify initial registration - expect(app.cell_registry.all.has(cell_id)).toBe(true); + assert.ok(app.cell_registry.all.has(cell_id)); // Dispose cell test_cell.dispose(); // Should be removed from registry - expect(app.cell_registry.all.has(cell_id)).toBe(false); + assert.ok(!app.cell_registry.all.has(cell_id)); }); test('dispose is safe to call multiple times', () => { @@ -167,10 +166,10 @@ describe('Cell registry lifecycle', () => { // First dispose test_cell.dispose(); - expect(app.cell_registry.all.has(cell_id)).toBe(false); + assert.ok(!app.cell_registry.all.has(cell_id)); // Second dispose should not throw - expect(() => test_cell.dispose()).not.toThrow(); + assert.doesNotThrow(() => test_cell.dispose()); }); }); @@ -201,11 +200,11 @@ describe('Cell id handling', () => { const initial_id = cell.id; // Verify initial state - expect(cell.id).toBe(initial_id); + assert.strictEqual(cell.id, initial_id); // Create a new id to set const new_id = create_uuid(); - expect(new_id).not.toBe(initial_id); + assert.notStrictEqual(new_id, initial_id); // Set new id through set_json cell.set_json({ @@ -216,8 +215,8 @@ describe('Cell id handling', () => { }); // Verify id was changed to the new value - expect(cell.id).toBe(new_id); - expect(cell.id).not.toBe(initial_id); + assert.strictEqual(cell.id, new_id); + assert.notStrictEqual(cell.id, initial_id); }); test('set_json_partial updates id when included in partial update', () => { @@ -235,11 +234,11 @@ describe('Cell id handling', () => { }); // Verify id was updated and other properties preserved - expect(cell.id).toBe(new_id); - expect(cell.id).not.toBe(initial_id); - expect(cell.type).toBe('test'); - expect(cell.content).toBe(''); - expect(cell.version).toBe(3); + assert.strictEqual(cell.id, new_id); + assert.notStrictEqual(cell.id, initial_id); + assert.strictEqual(cell.type, 'test'); + assert.strictEqual(cell.content, ''); + assert.strictEqual(cell.version, 3); }); test('set_json_partial preserves id when not included in partial update', () => { @@ -254,9 +253,9 @@ describe('Cell id handling', () => { }); // Verify id preserved and content updated - expect(cell.id).toBe(initial_id); - expect(cell.content).toBe('Partial update content'); - expect(cell.content).not.toBe(initial_content); + assert.strictEqual(cell.id, initial_id); + assert.strictEqual(cell.content, 'Partial update content'); + assert.notStrictEqual(cell.content, initial_content); }); test('schema validation rejects invalid id formats', () => { @@ -264,11 +263,11 @@ describe('Cell id handling', () => { const cell = new IdTestCell({app}); // Attempt to set invalid id - expect(() => { + assert.throws(() => { cell.set_json_partial({ id: 'not-a-valid-uuid' as any, }); - }).toThrow(); + }); }); test('clone creates a new id instead of copying the original', () => { @@ -287,13 +286,13 @@ describe('Cell id handling', () => { const cloned_cell = cell.clone(); // Verify clone has new id but same content - expect(cloned_cell.id).not.toBe(original_id); - expect(cloned_cell.content).toBe('Original content'); - expect(cloned_cell.version).toBe(1); + assert.notStrictEqual(cloned_cell.id, original_id); + assert.strictEqual(cloned_cell.content, 'Original content'); + assert.strictEqual(cloned_cell.version, 1); // Verify changing clone doesn't affect original cloned_cell.content = 'Changed in clone'; - expect(cell.content).toBe('Original content'); + assert.strictEqual(cell.content, 'Original content'); }); }); @@ -312,11 +311,11 @@ describe('Cell serialization', () => { const json = test_cell.to_json(); - expect(json.id).toBe(TEST_ID); - expect(json.created).toBe(TEST_DATETIME); - expect(json.text).toBe('JSON Test'); - expect(json.number).toBe(100); - expect(json.items).toEqual(['value1', 'value2']); + assert.strictEqual(json.id, TEST_ID); + assert.strictEqual(json.created, TEST_DATETIME); + assert.strictEqual(json.text, 'JSON Test'); + assert.strictEqual(json.number, 100); + assert.deepEqual(json.items, ['value1', 'value2']); }); test('toJSON method works with JSON.stringify', () => { @@ -331,8 +330,8 @@ describe('Cell serialization', () => { const stringified = JSON.stringify(test_cell); const parsed = JSON.parse(stringified); - expect(parsed.id).toBe(TEST_ID); - expect(parsed.text).toBe('Stringify Test'); + assert.strictEqual(parsed.id, TEST_ID); + assert.strictEqual(parsed.text, 'Stringify Test'); }); test('derived json properties update when cell changes', () => { @@ -346,21 +345,21 @@ describe('Cell serialization', () => { }); // Check initial values - expect(test_cell.json.text).toBe('Initial'); - expect(test_cell.json.number).toBe(10); + assert.strictEqual(test_cell.json.text, 'Initial'); + assert.strictEqual(test_cell.json.number, 10); // Update values test_cell.text = 'Updated'; test_cell.number = 20; // Check derived properties updated - expect(test_cell.json.text).toBe('Updated'); - expect(test_cell.json.number).toBe(20); + assert.strictEqual(test_cell.json.text, 'Updated'); + assert.strictEqual(test_cell.json.number, 20); // Check derived serialized JSON const parsed = JSON.parse(test_cell.json_serialized); - expect(parsed.text).toBe('Updated'); - expect(parsed.number).toBe(20); + assert.strictEqual(parsed.text, 'Updated'); + assert.strictEqual(parsed.number, 20); }); }); @@ -381,17 +380,17 @@ describe('Cell modification methods', () => { items: ['new1', 'new2'], }); - expect(test_cell.text).toBe('Updated via set_json'); - expect(test_cell.number).toBe(50); - expect(test_cell.items).toEqual(['new1', 'new2']); - expect(test_cell.id).not.toBe(TEST_ID); // id should be new + assert.strictEqual(test_cell.text, 'Updated via set_json'); + assert.strictEqual(test_cell.number, 50); + assert.deepEqual(test_cell.items, ['new1', 'new2']); + assert.notStrictEqual(test_cell.id, TEST_ID); // id should be new }); test('set_json rejects invalid data', () => { const test_cell = new BasicTestCell({app}); // Should reject invalid data with a schema error - expect(() => test_cell.set_json({number: 'not a number' as any})).toThrow(); + assert.throws(() => test_cell.set_json({number: 'not a number' as any})); }); test('set_json_partial updates only specified properties', () => { @@ -413,13 +412,13 @@ describe('Cell modification methods', () => { }); // Verify updated properties - expect(test_cell.text).toBe('Updated text'); - expect(test_cell.number).toBe(20); + assert.strictEqual(test_cell.text, 'Updated text'); + assert.strictEqual(test_cell.number, 20); // Verify untouched properties - expect(test_cell.items).toEqual(['item1', 'item2']); - expect(test_cell.flag).toBe(true); - expect(test_cell.id).toBe(TEST_ID); + assert.deepEqual(test_cell.items, ['item1', 'item2']); + assert.ok(test_cell.flag); + assert.strictEqual(test_cell.id, TEST_ID); }); test('set_json_partial handles null or undefined input', () => { @@ -432,12 +431,12 @@ describe('Cell modification methods', () => { }); // These should not throw errors - expect(() => test_cell.set_json_partial(null!)).not.toThrow(); - expect(() => test_cell.set_json_partial(undefined!)).not.toThrow(); + assert.doesNotThrow(() => test_cell.set_json_partial(null!)); + assert.doesNotThrow(() => test_cell.set_json_partial(undefined!)); // Properties should remain unchanged - expect(test_cell.id).toBe(TEST_ID); - expect(test_cell.text).toBe('Initial'); + assert.strictEqual(test_cell.id, TEST_ID); + assert.strictEqual(test_cell.text, 'Initial'); }); test('set_json_partial validates merged data against schema', () => { @@ -450,10 +449,10 @@ describe('Cell modification methods', () => { }); // Should reject invalid data with a schema error - expect(() => test_cell.set_json_partial({number: 'not a number' as any})).toThrow(); + assert.throws(() => test_cell.set_json_partial({number: 'not a number' as any})); // Original values should remain unchanged after failed update - expect(test_cell.text).toBe('Initial'); + assert.strictEqual(test_cell.text, 'Initial'); }); }); @@ -473,17 +472,17 @@ describe('Cell date formatting', () => { }); // Verify date objects - expect(test_cell.created_date).toBeInstanceOf(Date); - expect(test_cell.updated_date).toBeInstanceOf(Date); + assert.instanceOf(test_cell.created_date, Date); + assert.instanceOf(test_cell.updated_date, Date); // Verify formatted strings exist - expect(test_cell.created_formatted_short_date).not.toBeNull(); - expect(test_cell.created_formatted_datetime).not.toBeNull(); - expect(test_cell.created_formatted_time).not.toBeNull(); + assert.ok(test_cell.created_formatted_short_date); + assert.ok(test_cell.created_formatted_datetime); + assert.ok(test_cell.created_formatted_time); - expect(test_cell.updated_formatted_short_date).not.toBeNull(); - expect(test_cell.updated_formatted_datetime).not.toBeNull(); - expect(test_cell.updated_formatted_time).not.toBeNull(); + assert.ok(test_cell.updated_formatted_short_date); + assert.ok(test_cell.updated_formatted_datetime); + assert.ok(test_cell.updated_formatted_time); }); test('handles null updated date', () => { @@ -496,10 +495,10 @@ describe('Cell date formatting', () => { }, }); - expect(test_cell.updated_date).not.toBeNull(); - expect(test_cell.updated_formatted_short_date).not.toBeNull(); - expect(test_cell.updated_formatted_datetime).not.toBeNull(); - expect(test_cell.updated_formatted_time).not.toBeNull(); + assert.ok(test_cell.updated_date); + assert.ok(test_cell.updated_formatted_short_date); + assert.ok(test_cell.updated_formatted_datetime); + assert.ok(test_cell.updated_formatted_time); }); }); @@ -518,22 +517,22 @@ describe('Cell cloning', () => { const clone = original.clone(); // Should have same values - expect(clone.text).toBe('Original'); - expect(clone.number).toBe(42); - expect(clone.items).toEqual(['value1']); + assert.strictEqual(clone.text, 'Original'); + assert.strictEqual(clone.number, 42); + assert.deepEqual(clone.items, ['value1']); // But be a different instance - expect(clone).not.toBe(original); - expect(clone.id).not.toBe(original.id); // Should have new id + assert.notStrictEqual(clone, original); + assert.notStrictEqual(clone.id, original.id); // Should have new id // Changes to one shouldn't affect the other clone.text = 'Changed'; clone.number = 100; clone.items.push('value2'); - expect(original.text).toBe('Original'); - expect(original.number).toBe(42); - expect(original.items).toEqual(['value1']); + assert.strictEqual(original.text, 'Original'); + assert.strictEqual(original.number, 42); + assert.deepEqual(original.items, ['value1']); }); test('clone registers new instance in registry', () => { @@ -547,9 +546,9 @@ describe('Cell cloning', () => { const clone = original.clone(); // Both instances should be registered - expect(app.cell_registry.all.has(original.id)).toBe(true); - expect(app.cell_registry.all.has(clone.id)).toBe(true); - expect(app.cell_registry.all.get(clone.id)).toBe(clone); + assert.ok(app.cell_registry.all.has(original.id)); + assert.ok(app.cell_registry.all.has(clone.id)); + assert.ok(app.cell_registry.all.get(clone.id) === (clone as any)); }); }); @@ -564,10 +563,10 @@ describe('Schema validation', () => { }); // Initial state should be valid - expect(test_cell.json_parsed.success).toBe(true); + assert.ok(test_cell.json_parsed.success); // Invalid initialization should throw - expect( + assert.throws( () => new BasicTestCell({ app, @@ -576,6 +575,6 @@ describe('Schema validation', () => { text: 123 as any, }, }), - ).toThrow(); + ); }); }); diff --git a/src/test/cell.svelte.decoders.test.ts b/src/test/cell.svelte.decoders.test.ts index c5b91addf..36b76e074 100644 --- a/src/test/cell.svelte.decoders.test.ts +++ b/src/test/cell.svelte.decoders.test.ts @@ -1,8 +1,6 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, vi, beforeEach} from 'vitest'; +import {test, vi, beforeEach, assert} from 'vitest'; import {z} from 'zod'; import {Cell, type CellOptions} from '$lib/cell.svelte.js'; @@ -10,7 +8,8 @@ import {CellJson} from '$lib/cell_types.js'; import {DatetimeNow, get_datetime_now, create_uuid, UuidWithDefault} from '$lib/zod_helpers.js'; import {HANDLED} from '$lib/cell_helpers.js'; import {Frontend} from '$lib/frontend.svelte.js'; -import {monkeypatch_zzz_for_tests} from './test_helpers.ts'; + +import {monkeypatch_zzz_for_tests} from './test_helpers.js'; // Constants for testing const TEST_ID = create_uuid(); @@ -60,7 +59,7 @@ test('Cell allows schema keys with no properties if a decoder is provided', () = }, }); - expect(cell.captured_value).toBe(99); + assert.strictEqual(cell.captured_value, 99); }); test('Cell supports virtual properties with custom handling', () => { @@ -104,9 +103,9 @@ test('Cell supports virtual properties with custom handling', () => { }, }); - expect(cell.visible_prop).toBe('visible'); - expect('hidden_prop' in cell).toBe(false); - expect(cell.processed_value).toBe(84); // 42 * 2 + assert.strictEqual(cell.visible_prop, 'visible'); + assert.ok(!('hidden_prop' in cell)); + assert.strictEqual(cell.processed_value, 84); // 42 * 2 }); test('Cell handles sentinel values with proper precedence', () => { @@ -159,13 +158,13 @@ test('Cell handles sentinel values with proper precedence', () => { }, }); - expect(cell.decoder_calls).toContain('handled_field_called'); - expect(cell.decoder_calls).toContain('default_field_called'); - expect(cell.decoder_calls).toContain('normal_field_called'); + assert.include(cell.decoder_calls, 'handled_field_called'); + assert.include(cell.decoder_calls, 'default_field_called'); + assert.include(cell.decoder_calls, 'normal_field_called'); - expect(cell.handled_field).toBe('initial_value'); - expect(cell.default_field).toBe(42); - expect(cell.normal_field).toBe(true); + assert.strictEqual(cell.handled_field, 'initial_value'); + assert.strictEqual(cell.default_field, 42); + assert.ok(cell.normal_field); }); test('Cell parser defaults take precedence over schema defaults', () => { @@ -201,6 +200,6 @@ test('Cell parser defaults take precedence over schema defaults', () => { json: {}, }); - expect(cell.id).toBe('parser_default_id'); - expect(cell.text).toBe('schema_default_text'); + assert.strictEqual(cell.id, 'parser_default_id'); + assert.strictEqual(cell.text, 'schema_default_text'); }); diff --git a/src/test/cell.svelte.inheritance.test.ts b/src/test/cell.svelte.inheritance.test.ts index 4230520f4..ec4f58ed2 100644 --- a/src/test/cell.svelte.inheritance.test.ts +++ b/src/test/cell.svelte.inheritance.test.ts @@ -1,8 +1,6 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, vi, beforeEach} from 'vitest'; +import {test, vi, beforeEach, assert} from 'vitest'; import {z} from 'zod'; import {Cell, type CellOptions} from '$lib/cell.svelte.js'; @@ -10,7 +8,8 @@ import {CellJson, type SchemaKeys} from '$lib/cell_types.js'; import {create_uuid, get_datetime_now} from '$lib/zod_helpers.js'; import {HANDLED} from '$lib/cell_helpers.js'; import {Frontend} from '$lib/frontend.svelte.js'; -import {monkeypatch_zzz_for_tests} from './test_helpers.ts'; + +import {monkeypatch_zzz_for_tests} from './test_helpers.js'; // Constants for testing const TEST_ID = create_uuid(); @@ -75,10 +74,10 @@ test('Cell supports overriding assign_property', () => { }, }); - expect(cell.text).toBe('modified_original'); - expect(cell.assignment_log).toContain(`Assigned id: ${TEST_ID}`); - expect(cell.assignment_log).toContain('Assigned text: original'); - expect(cell.assignment_log).toContain('Assigned list: item'); + assert.strictEqual(cell.text, 'modified_original'); + assert.include(cell.assignment_log, `Assigned id: ${TEST_ID}`); + assert.include(cell.assignment_log, 'Assigned text: original'); + assert.include(cell.assignment_log, 'Assigned list: item'); }); test('Cell assign_property returns after handling property correctly', () => { @@ -122,15 +121,15 @@ test('Cell assign_property returns after handling property correctly', () => { }, }); - expect(cell.text).toBe('sample'); - expect(cell.number).toBe(42); + assert.strictEqual(cell.text, 'sample'); + assert.strictEqual(cell.number, 42); - expect(cell.execution_path).toContain('begin-text'); - expect(cell.execution_path).toContain('complete-text'); - expect(cell.execution_path).not.toContain('continue-text'); + assert.include(cell.execution_path, 'begin-text'); + assert.include(cell.execution_path, 'complete-text'); + assert.notInclude(cell.execution_path, 'continue-text'); - expect(cell.execution_path).toContain('begin-number'); - expect(cell.execution_path).toContain('continue-number'); + assert.include(cell.execution_path, 'begin-number'); + assert.include(cell.execution_path, 'continue-number'); }); test('Cell handles inherited properties correctly', () => { @@ -178,19 +177,19 @@ test('Cell handles inherited properties correctly', () => { }, }); - expect(cell.id).toBe(TEST_ID); - expect(cell.text).toBe('base_property'); - expect(cell.number).toBe(30); + assert.strictEqual(cell.id, TEST_ID); + assert.strictEqual(cell.text, 'base_property'); + assert.strictEqual(cell.number, 30); - expect(cell.derived_method()).toBe('derived_result'); - expect(cell.base_method()).toBe('overridden_result'); + assert.strictEqual(cell.derived_method(), 'derived_result'); + assert.strictEqual(cell.base_method(), 'overridden_result'); - expect('id' in cell).toBe(true); + assert.ok('id' in cell); const json = cell.json; - expect(json.id).toBe(TEST_ID); - expect(json.text).toBe('base_property'); - expect(json.number).toBe(30); + assert.strictEqual(json.id, TEST_ID); + assert.strictEqual(json.text, 'base_property'); + assert.strictEqual(json.number, 30); }); test('Cell properly handles collections with HANDLED sentinel', () => { @@ -242,8 +241,8 @@ test('Cell properly handles collections with HANDLED sentinel', () => { }, }); - expect(cell.stored_items).toEqual(['ONE', 'TWO', 'THREE']); - expect(cell.json.collection).toEqual(['ONE', 'TWO', 'THREE']); + assert.deepEqual(cell.stored_items, ['ONE', 'TWO', 'THREE']); + assert.deepEqual(cell.json.collection, ['ONE', 'TWO', 'THREE']); }); test('Cell registration and unregistration works correctly', () => { @@ -270,13 +269,13 @@ test('Cell registration and unregistration works correctly', () => { }); // Cell should be automatically registered - expect(app.cell_registry.all.has(cell_id)).toBe(true); - expect(app.cell_registry.all.get(cell_id)).toBe(cell); + assert.ok(app.cell_registry.all.has(cell_id)); + assert.ok(app.cell_registry.all.get(cell_id) === (cell as any)); // Test disposal cell.dispose(); - expect(app.cell_registry.all.has(cell_id)).toBe(false); + assert.ok(!app.cell_registry.all.has(cell_id)); // Test that disposing again is safe - expect(() => cell.dispose()).not.toThrow(); + assert.doesNotThrow(() => cell.dispose()); }); diff --git a/src/test/cell.svelte.special_types.test.ts b/src/test/cell.svelte.special_types.test.ts index ce1e831f5..efdc456c6 100644 --- a/src/test/cell.svelte.special_types.test.ts +++ b/src/test/cell.svelte.special_types.test.ts @@ -1,15 +1,14 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, vi, beforeEach} from 'vitest'; +import {test, vi, beforeEach, assert} from 'vitest'; import {z} from 'zod'; import {Cell, type CellOptions} from '$lib/cell.svelte.js'; import {CellJson, type SchemaKeys} from '$lib/cell_types.js'; import {DatetimeNow, get_datetime_now, create_uuid, UuidWithDefault} from '$lib/zod_helpers.js'; import {Frontend} from '$lib/frontend.svelte.js'; -import {monkeypatch_zzz_for_tests} from './test_helpers.ts'; + +import {monkeypatch_zzz_for_tests} from './test_helpers.js'; // Constants for testing const TEST_ID = create_uuid(); @@ -64,8 +63,9 @@ test('Cell uses registry for instantiating class relationships', () => { const test_object = {key: 'value'}; const result = cell.test_instantiate(test_object, 'TestType'); - expect(mock_instantiate).toHaveBeenCalledWith('TestType', test_object); - expect(result).toEqual({type: 'TestType', key: 'value'}); + assert.ok(mock_instantiate.mock.calls.length > 0); + assert.deepEqual(mock_instantiate.mock.calls[0], ['TestType', test_object] as any); + assert.deepEqual(result, {type: 'TestType', key: 'value'}); // Clean up mock_instantiate.mockRestore(); @@ -100,13 +100,13 @@ test('Cell.encode_property uses $state.snapshot for values', () => { // Test with Date object const test_date = new Date(`${TEST_YEAR}-01-15`); const encoded_date = cell.test_encode(test_date, 'date_field'); - expect(encoded_date instanceof Date).toBe(true); - expect((encoded_date as Date).getFullYear()).toBe(TEST_YEAR); + assert.ok(encoded_date instanceof Date); + assert.strictEqual(encoded_date.getFullYear(), TEST_YEAR); // Test with nested object const test_object = {outer: {inner: 42}}; const encoded_object = cell.test_encode(test_object, 'object_field'); - expect(encoded_object).toEqual(test_object); + assert.deepEqual(encoded_object, test_object); }); test('Cell handles special types like Map and Set', () => { @@ -160,25 +160,25 @@ test('Cell handles special types like Map and Set', () => { }); // Verify Map handling - expect(cell.map_field).toBeInstanceOf(Map); - expect(cell.map_field.get('key1')).toBe(1); - expect(cell.map_field.get('key2')).toBe(2); + assert.instanceOf(cell.map_field, Map); + assert.strictEqual(cell.map_field.get('key1'), 1); + assert.strictEqual(cell.map_field.get('key2'), 2); // Verify Set handling - expect(cell.set_field).toBeInstanceOf(Set); - expect(cell.set_field.has('item1')).toBe(true); - expect(cell.set_field.has('item2')).toBe(true); - expect(cell.set_field.has('item3')).toBe(true); + assert.instanceOf(cell.set_field, Set); + assert.ok(cell.set_field.has('item1')); + assert.ok(cell.set_field.has('item2')); + assert.ok(cell.set_field.has('item3')); // Test manual decoding const map_result = cell.test_decode([['key3', 3]], 'map_field'); - expect(map_result).toBeInstanceOf(Map); - expect(map_result.get('key3')).toBe(3); + assert.instanceOf(map_result, Map); + assert.strictEqual(map_result.get('key3'), 3); const set_result = cell.test_decode(['item4', 'item5'], 'set_field'); - expect(set_result).toBeInstanceOf(Set); - expect(set_result.has('item4')).toBe(true); - expect(set_result.has('item5')).toBe(true); + assert.instanceOf(set_result, Set); + assert.ok(set_result.has('item4')); + assert.ok(set_result.has('item5')); }); test('Cell - JSON serialization excludes undefined values correctly', () => { @@ -233,32 +233,32 @@ test('Cell - JSON serialization excludes undefined values correctly', () => { // Test minimal cell serialization const minimal_json = minimal_cell.to_json(); - expect(minimal_json.type).toBe('type1'); - expect(minimal_json.name).toBeUndefined(); - expect(minimal_json.data).toBeUndefined(); - expect(minimal_json.items).toBeUndefined(); - expect(minimal_json.state).toBeUndefined(); + assert.strictEqual(minimal_json.type, 'type1'); + assert.ok(minimal_json.name === undefined); + assert.ok(minimal_json.data === undefined); + assert.ok(minimal_json.items === undefined); + assert.ok(minimal_json.state === undefined); // Test complete cell serialization const complete_json = complete_cell.to_json(); - expect(complete_json.type).toBe('type2'); - expect(complete_json.name).toBe('test_name'); - expect(complete_json.data).toEqual({code: 'test_code'}); - expect(complete_json.data?.value).toBeUndefined(); - expect(complete_json.items).toEqual(['item1', 'item2']); - expect(complete_json.state).toBeUndefined(); + assert.strictEqual(complete_json.type, 'type2'); + assert.strictEqual(complete_json.name, 'test_name'); + assert.deepEqual(complete_json.data, {code: 'test_code'}); + assert.ok(complete_json.data?.value === undefined); + assert.deepEqual(complete_json.items, ['item1', 'item2']); + assert.ok(complete_json.state === undefined); // Test JSON stringification const minimal_string = JSON.stringify(minimal_cell); const parsed_minimal = JSON.parse(minimal_string); - expect(parsed_minimal.name).toBeUndefined(); - expect(parsed_minimal.data).toBeUndefined(); - expect(parsed_minimal.items).toBeUndefined(); - expect(parsed_minimal.state).toBeUndefined(); + assert.ok(parsed_minimal.name === undefined); + assert.ok(parsed_minimal.data === undefined); + assert.ok(parsed_minimal.items === undefined); + assert.ok(parsed_minimal.state === undefined); // Test nested property handling const complete_string = JSON.stringify(complete_cell); const parsed_complete = JSON.parse(complete_string); - expect(parsed_complete.data.code).toBe('test_code'); - expect('value' in parsed_complete.data).toBe(false); + assert.strictEqual(parsed_complete.data.code, 'test_code'); + assert.ok(!('value' in parsed_complete.data)); }); diff --git a/src/test/cell_helpers.test.ts b/src/test/cell_helpers.test.ts index 64290dc2d..a95cc201d 100644 --- a/src/test/cell_helpers.test.ts +++ b/src/test/cell_helpers.test.ts @@ -1,14 +1,12 @@ -// @slop Claude Sonnet 3.7 - -import {describe, test, expect} from 'vitest'; +import {describe, test, assert} from 'vitest'; import {z} from 'zod'; import {get_schema_class_info} from '$lib/cell_helpers.js'; describe('get_schema_class_info', () => { test('handles null or undefined schemas', () => { - expect(get_schema_class_info(null)).toBeNull(); - expect(get_schema_class_info(undefined)).toBeNull(); + assert.isNull(get_schema_class_info(null)); + assert.isNull(get_schema_class_info(undefined)); }); test('identifies basic schema types correctly', () => { @@ -20,14 +18,14 @@ describe('get_schema_class_info', () => { const number_info = get_schema_class_info(number_schema); const boolean_info = get_schema_class_info(boolean_schema); - expect(string_info?.type).toBe('ZodString'); - expect(string_info?.is_array).toBe(false); + assert.strictEqual(string_info?.type, 'ZodString'); + assert.ok(!string_info?.is_array); - expect(number_info?.type).toBe('ZodNumber'); - expect(number_info?.is_array).toBe(false); + assert.strictEqual(number_info?.type, 'ZodNumber'); + assert.ok(!number_info?.is_array); - expect(boolean_info?.type).toBe('ZodBoolean'); - expect(boolean_info?.is_array).toBe(false); + assert.strictEqual(boolean_info?.type, 'ZodBoolean'); + assert.ok(!boolean_info?.is_array); }); test('identifies array schemas correctly', () => { @@ -40,14 +38,14 @@ describe('get_schema_class_info', () => { const object_array_info = get_schema_class_info(object_array); // Test array identification - expect(string_array_info?.type).toBe('ZodArray'); - expect(string_array_info?.is_array).toBe(true); + assert.strictEqual(string_array_info?.type, 'ZodArray'); + assert.ok(string_array_info?.is_array); - expect(number_array_info?.type).toBe('ZodArray'); - expect(number_array_info?.is_array).toBe(true); + assert.strictEqual(number_array_info?.type, 'ZodArray'); + assert.ok(number_array_info?.is_array); - expect(object_array_info?.type).toBe('ZodArray'); - expect(object_array_info?.is_array).toBe(true); + assert.strictEqual(object_array_info?.type, 'ZodArray'); + assert.ok(object_array_info?.is_array); }); test('handles default wrapped schemas', () => { @@ -58,12 +56,12 @@ describe('get_schema_class_info', () => { const array_default_info = get_schema_class_info(array_with_default); // Default shouldn't change the core type - expect(string_default_info?.type).toBe('ZodString'); - expect(string_default_info?.is_array).toBe(false); + assert.strictEqual(string_default_info?.type, 'ZodString'); + assert.ok(!string_default_info?.is_array); // This is what's failing in the test - default-wrapped arrays should still be identified as arrays - expect(array_default_info?.type).toBe('ZodArray'); - expect(array_default_info?.is_array).toBe(true); + assert.strictEqual(array_default_info?.type, 'ZodArray'); + assert.ok(array_default_info?.is_array); }); test('handles object schemas', () => { @@ -73,8 +71,8 @@ describe('get_schema_class_info', () => { }); const object_info = get_schema_class_info(object_schema); - expect(object_info?.type).toBe('ZodObject'); - expect(object_info?.is_array).toBe(false); + assert.strictEqual(object_info?.type, 'ZodObject'); + assert.ok(!object_info?.is_array); }); test('detects class names set with cell_class', () => { @@ -82,7 +80,7 @@ describe('get_schema_class_info', () => { const schema_with_class = schema.meta({cell_class_name: 'TestClass'}); const info = get_schema_class_info(schema_with_class); - expect(info?.class_name).toBe('TestClass'); + assert.strictEqual(info?.class_name, 'TestClass'); }); test('detects element classes from element metadata', () => { @@ -90,8 +88,8 @@ describe('get_schema_class_info', () => { const array_schema = z.array(element_schema); const info = get_schema_class_info(array_schema); - expect(info?.is_array).toBe(true); - expect(info?.element_class).toBe('ElementClass'); + assert.ok(info?.is_array); + assert.strictEqual(info.element_class, 'ElementClass'); }); test('handles default-wrapped array with element metadata', () => { @@ -99,8 +97,8 @@ describe('get_schema_class_info', () => { const array_schema = z.array(element_schema).default([]); const info = get_schema_class_info(array_schema); - expect(info?.is_array).toBe(true); - expect(info?.element_class).toBe('ElementClass'); + assert.ok(info?.is_array); + assert.strictEqual(info.element_class, 'ElementClass'); }); test('reads element class from nested element schema', () => { @@ -112,8 +110,8 @@ describe('get_schema_class_info', () => { // Verify that get_schema_class_info can read element metadata const info = get_schema_class_info(array_schema); - expect(info?.is_array).toBe(true); - expect(info?.element_class).toBe('DirectElementClass'); + assert.ok(info?.is_array); + assert.strictEqual(info.element_class, 'DirectElementClass'); }); test('handles ZodDefault containing a ZodArray', () => { @@ -122,17 +120,17 @@ describe('get_schema_class_info', () => { const array_schema_default = array_schema.default([]); // We can see what the internal structure of ZodDefault looks like - expect(array_schema_default._zod.def).toBeDefined(); - expect(array_schema_default._zod.def.type).toBe('default'); - expect(array_schema_default._zod.def.innerType).toBeDefined(); - expect(array_schema_default._zod.def.innerType.def.type).toBe('array'); + assert.isDefined(array_schema_default._zod.def); + assert.strictEqual(array_schema_default._zod.def.type, 'default'); + assert.isDefined(array_schema_default._zod.def.innerType); + assert.strictEqual(array_schema_default._zod.def.innerType.def.type, 'array'); // Now test the function with our default-wrapped array const info = get_schema_class_info(array_schema_default); // The function should see through the ZodDefault to the ZodArray inside - expect(info?.type).toBe('ZodArray'); - expect(info?.is_array).toBe(true); + assert.strictEqual(info?.type, 'ZodArray'); + assert.ok(info?.is_array); }); test('handles complex nested schema wrapping', () => { @@ -140,15 +138,15 @@ describe('get_schema_class_info', () => { const nested_array_schema = z.array(z.string()).optional().default([]); const nested_info = get_schema_class_info(nested_array_schema); - expect(nested_info?.type).toBe('ZodArray'); - expect(nested_info?.is_array).toBe(true); + assert.strictEqual(nested_info?.type, 'ZodArray'); + assert.ok(nested_info?.is_array); // More extreme nesting: ZodDefault -> ZodOptional -> ZodDefault -> ZodArray const extreme_nesting = z.array(z.number()).default([]).optional().default([]); const extreme_info = get_schema_class_info(extreme_nesting); - expect(extreme_info?.type).toBe('ZodArray'); - expect(extreme_info?.is_array).toBe(true); + assert.strictEqual(extreme_info?.type, 'ZodArray'); + assert.ok(extreme_info?.is_array); }); test('handles ZodEffects wrapping arrays', () => { @@ -158,8 +156,8 @@ describe('get_schema_class_info', () => { .refine((arr) => arr.length > 0, {message: 'Array must not be empty'}); const refined_info = get_schema_class_info(refined_array); - expect(refined_info?.type).toBe('ZodArray'); - expect(refined_info?.is_array).toBe(true); + assert.strictEqual(refined_info?.type, 'ZodArray'); + assert.ok(refined_info?.is_array); // ZodEffects (transform) wrapping an array with default const transformed_array = z @@ -168,8 +166,8 @@ describe('get_schema_class_info', () => { .transform((arr) => arr.map((n) => n * 2)); const transformed_info = get_schema_class_info(transformed_array); - expect(transformed_info?.type).toBe('ZodArray'); - expect(transformed_info?.is_array).toBe(true); + assert.strictEqual(transformed_info?.type, 'ZodArray'); + assert.ok(transformed_info?.is_array); }); test('handles combinations of optional, default, and refinement', () => { @@ -182,8 +180,8 @@ describe('get_schema_class_info', () => { .optional(); const chain_info = get_schema_class_info(complex_chain); - expect(chain_info?.type).toBe('ZodArray'); - expect(chain_info?.is_array).toBe(true); + assert.strictEqual(chain_info?.type, 'ZodArray'); + assert.ok(chain_info?.is_array); }); test('recursive unwrapping preserves metadata through wrappers', () => { @@ -196,8 +194,8 @@ describe('get_schema_class_info', () => { // Check that metadata is preserved const info = get_schema_class_info(wrapped_array); - expect(info?.element_class).toBe('TestElement'); - expect(info?.is_array).toBe(true); + assert.strictEqual(info?.element_class, 'TestElement'); + assert.ok(info?.is_array); }); test('handles deeply nested schemas with element metadata', () => { @@ -207,8 +205,8 @@ describe('get_schema_class_info', () => { // Verify metadata is found correctly through the wrappers const info = get_schema_class_info(nested_schema); - expect(info?.is_array).toBe(true); - expect(info?.element_class).toBe('NestedElement'); + assert.ok(info?.is_array); + assert.strictEqual(info.element_class, 'NestedElement'); }); }); @@ -218,13 +216,13 @@ describe('cell_class', () => { const result = schema.meta({cell_class_name: 'TestCellClass'}); // Should add the metadata via .meta() - expect(result.meta()?.cell_class_name).toBe('TestCellClass'); + assert.strictEqual(result.meta()?.cell_class_name, 'TestCellClass'); // Should return a new schema instance (due to .meta() creating a new instance) - expect(result).not.toBe(schema); + assert.notStrictEqual(result, schema); // Get schema info should report it correctly const info = get_schema_class_info(result); - expect(info?.class_name).toBe('TestCellClass'); + assert.strictEqual(info?.class_name, 'TestCellClass'); }); }); diff --git a/src/test/codegen.test.ts b/src/test/codegen.test.ts index 80acb42b5..6c0c95c38 100644 --- a/src/test/codegen.test.ts +++ b/src/test/codegen.test.ts @@ -1,15 +1,13 @@ -// @slop Claude Opus 4 - // @vitest-environment jsdom -import {test, expect, describe} from 'vitest'; - +import {test, describe, assert} from 'vitest'; import { ImportBuilder, get_executor_phases, get_handler_return_type, generate_phase_handlers, -} from '$lib/codegen.js'; +} from '@fuzdev/fuz_app/actions/action_codegen.js'; + import { ping_action_spec, session_load_action_spec, @@ -26,7 +24,7 @@ describe('ImportBuilder', () => { imports.add_type('$lib/types.js', 'Foo'); imports.add_type('$lib/types.js', 'Bar'); - expect(imports.build()).toBe(`import type {Bar, Foo} from '$lib/types.js';`); + assert.strictEqual(imports.build(), `import type {Bar, Foo} from '$lib/types.js';`); }); test('add_types helper adds multiple types at once', () => { @@ -34,15 +32,18 @@ describe('ImportBuilder', () => { imports.add_types('$lib/types.js', 'TypeA', 'TypeB', 'TypeC'); - expect(imports.build()).toBe(`import type {TypeA, TypeB, TypeC} from '$lib/types.js';`); + assert.strictEqual( + imports.build(), + `import type {TypeA, TypeB, TypeC} from '$lib/types.js';`, + ); }); test('empty imports returns empty string', () => { const imports = new ImportBuilder(); - expect(imports.build()).toBe(''); - expect(imports.has_imports()).toBe(false); - expect(imports.import_count).toBe(0); + assert.strictEqual(imports.build(), ''); + assert.ok(!imports.has_imports()); + assert.strictEqual(imports.import_count, 0); }); }); @@ -54,7 +55,8 @@ describe('ImportBuilder', () => { imports.add_type('$lib/utils.js', 'HelperType'); imports.add('$lib/utils.js', 'another_helper'); - expect(imports.build()).toBe( + assert.strictEqual( + imports.build(), `import {another_helper, helper, type HelperType} from '$lib/utils.js';`, ); }); @@ -67,7 +69,8 @@ describe('ImportBuilder', () => { imports.add('$lib/mixed.js', 'value'); // This makes it mixed imports.add_type('$lib/mixed.js', 'TypeC'); - expect(imports.build()).toBe( + assert.strictEqual( + imports.build(), `import {value, type TypeA, type TypeB, type TypeC} from '$lib/mixed.js';`, ); }); @@ -84,7 +87,8 @@ describe('ImportBuilder', () => { imports.add('$lib/mixed.js', 'm_value'); // Should sort values first (alphabetically), then types (alphabetically) - expect(imports.build()).toBe( + assert.strictEqual( + imports.build(), `import {a_value, m_value, z_value, type AType, type MType, type ZType} from '$lib/mixed.js';`, ); }); @@ -96,7 +100,7 @@ describe('ImportBuilder', () => { imports.add('$lib/action_specs.js', '* as specs'); - expect(imports.build()).toBe(`import * as specs from '$lib/action_specs.js';`); + assert.strictEqual(imports.build(), `import * as specs from '$lib/action_specs.js';`); }); test('namespace import with other imports from same module', () => { @@ -108,9 +112,9 @@ describe('ImportBuilder', () => { const result = imports.build(); const lines = result.split('\n'); - expect(lines).toHaveLength(2); - expect(lines).toContain(`import * as utils from '$lib/utils.js';`); - expect(lines).toContain(`import {something} from '$lib/other.js';`); + assert.strictEqual(lines.length, 2); + assert.include(lines, `import * as utils from '$lib/utils.js';`); + assert.include(lines, `import {something} from '$lib/other.js';`); }); test('add_many with namespace import', () => { @@ -118,7 +122,7 @@ describe('ImportBuilder', () => { imports.add_many('$lib/helpers.js', '* as helpers'); - expect(imports.build()).toBe(`import * as helpers from '$lib/helpers.js';`); + assert.strictEqual(imports.build(), `import * as helpers from '$lib/helpers.js';`); }); test('namespace imports are not mixed with regular imports', () => { @@ -129,7 +133,7 @@ describe('ImportBuilder', () => { imports.add('$lib/module.js', 'specific'); // Namespace imports should be on their own line - expect(imports.build()).toBe(`import * as mod from '$lib/module.js';`); + assert.strictEqual(imports.build(), `import * as mod from '$lib/module.js';`); }); }); @@ -140,7 +144,7 @@ describe('ImportBuilder', () => { imports.add_type('$lib/utils.js', 'Item'); imports.add('$lib/utils.js', 'Item'); // Upgrades to value - expect(imports.build()).toBe(`import {Item} from '$lib/utils.js';`); + assert.strictEqual(imports.build(), `import {Item} from '$lib/utils.js';`); }); test('type import does not downgrade existing value import', () => { @@ -149,7 +153,7 @@ describe('ImportBuilder', () => { imports.add('$lib/utils.js', 'Item'); imports.add_type('$lib/utils.js', 'Item'); // Should not downgrade - expect(imports.build()).toBe(`import {Item} from '$lib/utils.js';`); + assert.strictEqual(imports.build(), `import {Item} from '$lib/utils.js';`); }); test('duplicate imports are deduplicated', () => { @@ -159,7 +163,7 @@ describe('ImportBuilder', () => { imports.add_type('$lib/types.js', 'Foo'); imports.add_type('$lib/types.js', 'Foo'); - expect(imports.build()).toBe(`import type {Foo} from '$lib/types.js';`); + assert.strictEqual(imports.build(), `import type {Foo} from '$lib/types.js';`); }); test('namespace imports override previous imports', () => { @@ -168,7 +172,7 @@ describe('ImportBuilder', () => { imports.add('$lib/module.js', 'foo'); imports.add('$lib/module.js', '* as module'); // Should override - expect(imports.build()).toBe(`import * as module from '$lib/module.js';`); + assert.strictEqual(imports.build(), `import * as module from '$lib/module.js';`); }); }); @@ -183,10 +187,10 @@ describe('ImportBuilder', () => { const result = imports.build(); const lines = result.split('\n'); - expect(lines).toHaveLength(3); - expect(lines).toContain(`import type {TypeA, TypeB} from '$lib/types.js';`); - expect(lines).toContain(`import {util} from '$lib/utils.js';`); - expect(lines).toContain(`import type {SchemaA, SchemaB} from '$lib/schemas.js';`); + assert.strictEqual(lines.length, 3); + assert.include(lines, `import type {TypeA, TypeB} from '$lib/types.js';`); + assert.include(lines, `import {util} from '$lib/utils.js';`); + assert.include(lines, `import type {SchemaA, SchemaB} from '$lib/schemas.js';`); }); test('imports are sorted alphabetically within modules', () => { @@ -196,7 +200,10 @@ describe('ImportBuilder', () => { imports.add_type('$lib/types.js', 'Apple'); imports.add_type('$lib/types.js', 'Middle'); - expect(imports.build()).toBe(`import type {Apple, Middle, Zebra} from '$lib/types.js';`); + assert.strictEqual( + imports.build(), + `import type {Apple, Middle, Zebra} from '$lib/types.js';`, + ); }); test('handles imports with underscores and numbers correctly', () => { @@ -208,7 +215,8 @@ describe('ImportBuilder', () => { imports.add_type('$lib/types.js', 'PUBLIC_TYPE'); // Underscores sort before letters in most locales - expect(imports.build()).toBe( + assert.strictEqual( + imports.build(), `import type {PUBLIC_TYPE, Type_1, Type_2, _Private_Type} from '$lib/types.js';`, ); }); @@ -228,13 +236,13 @@ describe('ImportBuilder', () => { const lines = imports.preview(); // Module order should be based on insertion order - expect(lines[0]).toContain('$lib/third.js'); - expect(lines[1]).toContain('$lib/first.js'); - expect(lines[2]).toContain('$lib/second.js'); + assert.include(lines[0], '$lib/third.js'); + assert.include(lines[1], '$lib/first.js'); + assert.include(lines[2], '$lib/second.js'); // But items within modules are sorted - expect(lines[0]).toBe(`import type {Type3, Type3b} from '$lib/third.js';`); - expect(lines[1]).toBe(`import type {Type1, Type1b} from '$lib/first.js';`); + assert.strictEqual(lines[0], `import type {Type3, Type3b} from '$lib/third.js';`); + assert.strictEqual(lines[1], `import type {Type1, Type1b} from '$lib/first.js';`); }); test('handles mixed namespace and regular imports across modules', () => { @@ -247,11 +255,11 @@ describe('ImportBuilder', () => { const lines = imports.preview(); - expect(lines).toHaveLength(4); - expect(lines).toContain(`import * as specs from '$lib/specs.js';`); - expect(lines).toContain(`import type {TypeA} from '$lib/types.js';`); - expect(lines).toContain(`import {helper} from '$lib/utils.js';`); - expect(lines).toContain(`import * as schemas from '$lib/schemas.js';`); + assert.strictEqual(lines.length, 4); + assert.include(lines, `import * as specs from '$lib/specs.js';`); + assert.include(lines, `import type {TypeA} from '$lib/types.js';`); + assert.include(lines, `import {helper} from '$lib/utils.js';`); + assert.include(lines, `import * as schemas from '$lib/schemas.js';`); }); }); @@ -259,27 +267,27 @@ describe('ImportBuilder', () => { test('has_imports returns correct state', () => { const imports = new ImportBuilder(); - expect(imports.has_imports()).toBe(false); + assert.ok(!imports.has_imports()); imports.add_type('$lib/types.js', 'Foo'); - expect(imports.has_imports()).toBe(true); + assert.ok(imports.has_imports()); }); test('import_count returns correct count', () => { const imports = new ImportBuilder(); - expect(imports.import_count).toBe(0); + assert.strictEqual(imports.import_count, 0); imports.add_type('$lib/types.js', 'Foo'); - expect(imports.import_count).toBe(1); + assert.strictEqual(imports.import_count, 1); imports.add('$lib/utils.js', 'bar'); - expect(imports.import_count).toBe(2); + assert.strictEqual(imports.import_count, 2); // Adding to existing module doesn't increase count imports.add_type('$lib/types.js', 'Bar'); - expect(imports.import_count).toBe(2); + assert.strictEqual(imports.import_count, 2); }); test('preview returns array of import statements', () => { @@ -290,9 +298,9 @@ describe('ImportBuilder', () => { const preview = imports.preview(); - expect(preview).toHaveLength(2); - expect(preview[0]).toBe(`import type {Bar, Foo} from '$lib/types.js';`); - expect(preview[1]).toBe(`import {helper} from '$lib/utils.js';`); + assert.strictEqual(preview.length, 2); + assert.strictEqual(preview[0], `import type {Bar, Foo} from '$lib/types.js';`); + assert.strictEqual(preview[1], `import {helper} from '$lib/utils.js';`); }); test('clear removes all imports', () => { @@ -301,12 +309,12 @@ describe('ImportBuilder', () => { imports.add_types('$lib/types.js', 'Foo', 'Bar'); imports.add('$lib/utils.js', 'helper'); - expect(imports.import_count).toBe(2); + assert.strictEqual(imports.import_count, 2); imports.clear(); - expect(imports.import_count).toBe(0); - expect(imports.build()).toBe(''); + assert.strictEqual(imports.import_count, 0); + assert.strictEqual(imports.build(), ''); }); test('chaining works correctly', () => { @@ -319,8 +327,8 @@ describe('ImportBuilder', () => { .clear() .add_type('$lib/final.js', 'Final'); - expect(result).toBe(imports); // Chainable - expect(imports.build()).toBe(`import type {Final} from '$lib/final.js';`); + assert.strictEqual(result, imports); // Chainable + assert.strictEqual(imports.build(), `import type {Final} from '$lib/final.js';`); }); }); @@ -330,7 +338,7 @@ describe('ImportBuilder', () => { imports.add_many('$lib/utils.js', 'util_a', 'util_b', 'util_c'); - expect(imports.build()).toBe(`import {util_a, util_b, util_c} from '$lib/utils.js';`); + assert.strictEqual(imports.build(), `import {util_a, util_b, util_c} from '$lib/utils.js';`); }); test('add_many can handle namespace imports', () => { @@ -339,7 +347,7 @@ describe('ImportBuilder', () => { imports.add_many('$lib/all.js', '* as all', 'specific'); // Only the namespace import should be used - expect(imports.build()).toBe(`import * as all from '$lib/all.js';`); + assert.strictEqual(imports.build(), `import * as all from '$lib/all.js';`); }); }); @@ -350,8 +358,8 @@ describe('ImportBuilder', () => { imports.add('$lib/module.js', ''); // Empty imports should be ignored - expect(imports.build()).toBe(''); - expect(imports.has_imports()).toBe(false); + assert.strictEqual(imports.build(), ''); + assert.ok(!imports.has_imports()); }); test('handles special characters in import names', () => { @@ -360,7 +368,7 @@ describe('ImportBuilder', () => { imports.add('$lib/module.js', '$special'); imports.add('$lib/module.js', '_underscore'); - expect(imports.build()).toBe(`import {$special, _underscore} from '$lib/module.js';`); + assert.strictEqual(imports.build(), `import {$special, _underscore} from '$lib/module.js';`); }); }); }); @@ -369,7 +377,7 @@ describe('get_executor_phases', () => { describe('request_response actions', () => { test('frontend initiator - ping spec', () => { // ping has initiator: 'both' - expect(get_executor_phases(ping_action_spec, 'frontend')).toEqual([ + assert.deepEqual(get_executor_phases(ping_action_spec, 'frontend'), [ 'send_request', 'receive_response', 'send_error', @@ -377,7 +385,7 @@ describe('get_executor_phases', () => { 'receive_request', 'send_response', ]); - expect(get_executor_phases(ping_action_spec, 'backend')).toEqual([ + assert.deepEqual(get_executor_phases(ping_action_spec, 'backend'), [ 'send_request', 'receive_response', 'send_error', @@ -389,13 +397,13 @@ describe('get_executor_phases', () => { test('frontend initiator - session_load spec', () => { // load_session has initiator: 'frontend' - expect(get_executor_phases(session_load_action_spec, 'frontend')).toEqual([ + assert.deepEqual(get_executor_phases(session_load_action_spec, 'frontend'), [ 'send_request', 'receive_response', 'send_error', 'receive_error', ]); - expect(get_executor_phases(session_load_action_spec, 'backend')).toEqual([ + assert.deepEqual(get_executor_phases(session_load_action_spec, 'backend'), [ 'receive_request', 'send_response', 'send_error', @@ -404,13 +412,13 @@ describe('get_executor_phases', () => { test('frontend initiator - completion_create spec', () => { // create_completion has initiator: 'frontend' - expect(get_executor_phases(completion_create_action_spec, 'frontend')).toEqual([ + assert.deepEqual(get_executor_phases(completion_create_action_spec, 'frontend'), [ 'send_request', 'receive_response', 'send_error', 'receive_error', ]); - expect(get_executor_phases(completion_create_action_spec, 'backend')).toEqual([ + assert.deepEqual(get_executor_phases(completion_create_action_spec, 'backend'), [ 'receive_request', 'send_response', 'send_error', @@ -421,16 +429,16 @@ describe('get_executor_phases', () => { describe('remote_notification actions', () => { test('backend initiator - filer_change spec', () => { // filer_change has initiator: 'backend' - expect(get_executor_phases(filer_change_action_spec, 'frontend')).toEqual(['receive']); - expect(get_executor_phases(filer_change_action_spec, 'backend')).toEqual(['send']); + assert.deepEqual(get_executor_phases(filer_change_action_spec, 'frontend'), ['receive']); + assert.deepEqual(get_executor_phases(filer_change_action_spec, 'backend'), ['send']); }); }); describe('local_call actions', () => { test('frontend initiator - toggle_main_menu spec', () => { // toggle_main_menu has initiator: 'frontend' - expect(get_executor_phases(toggle_main_menu_action_spec, 'frontend')).toEqual(['execute']); - expect(get_executor_phases(toggle_main_menu_action_spec, 'backend')).toEqual([]); + assert.deepEqual(get_executor_phases(toggle_main_menu_action_spec, 'frontend'), ['execute']); + assert.deepEqual(get_executor_phases(toggle_main_menu_action_spec, 'backend'), []); }); }); @@ -438,8 +446,8 @@ describe('get_executor_phases', () => { test('phases are returned in correct order', () => { const frontend_phases = get_executor_phases(ping_action_spec, 'frontend'); // Send phases should come before receive phases - expect(frontend_phases.indexOf('send_request')).toBeLessThan( - frontend_phases.indexOf('receive_request'), + assert.ok( + frontend_phases.indexOf('send_request') < frontend_phases.indexOf('receive_request'), ); }); @@ -448,7 +456,7 @@ describe('get_executor_phases', () => { ...toggle_main_menu_action_spec, initiator: 'backend' as const, }; - expect(get_executor_phases(spec_with_backend_only, 'frontend')).toEqual([]); + assert.deepEqual(get_executor_phases(spec_with_backend_only, 'frontend'), []); }); }); }); @@ -460,29 +468,32 @@ describe('get_handler_return_type', () => { // ping_action_spec is a request/response action const result = get_handler_return_type(ping_action_spec, 'receive_request', imports, './'); - expect(result).toBe(`ActionOutputs['ping'] | Promise`); + assert.strictEqual(result, `ActionOutputs['ping'] | Promise`); // Check that ActionOutputs was added to imports const built = imports.build(); - expect(built).toContain('ActionOutputs'); - expect(built).toContain('./action_collections.js'); + assert.include(built, 'ActionOutputs'); + assert.include(built, './action_collections.js'); }); test('other phases return void and do not add imports', () => { const imports = new ImportBuilder(); - expect(get_handler_return_type(session_load_action_spec, 'send_request', imports, './')).toBe( + assert.strictEqual( + get_handler_return_type(session_load_action_spec, 'send_request', imports, './'), 'void | Promise', ); - expect( + assert.strictEqual( get_handler_return_type(session_load_action_spec, 'send_response', imports, './'), - ).toBe('void | Promise'); - expect( + 'void | Promise', + ); + assert.strictEqual( get_handler_return_type(session_load_action_spec, 'receive_response', imports, './'), - ).toBe('void | Promise'); + 'void | Promise', + ); // Should not add ActionOutputs for void returns - expect(imports.build()).toBe(''); + assert.strictEqual(imports.build(), ''); }); }); @@ -497,10 +508,10 @@ describe('get_handler_return_type', () => { imports, './', ); - expect(result).toBe(`ActionOutputs['toggle_main_menu']`); + assert.strictEqual(result, `ActionOutputs['toggle_main_menu']`); // Should add ActionOutputs import - expect(imports.build()).toContain('ActionOutputs'); + assert.include(imports.build(), 'ActionOutputs'); }); test('execute phase returns Promise for async local_call', () => { @@ -513,7 +524,8 @@ describe('get_handler_return_type', () => { }; const result = get_handler_return_type(async_local_spec, 'execute', imports, './'); - expect(result).toBe( + assert.strictEqual( + result, `ActionOutputs['toggle_main_menu'] | Promise`, ); }); @@ -523,15 +535,17 @@ describe('get_handler_return_type', () => { test('all phases return void', () => { const imports = new ImportBuilder(); - expect(get_handler_return_type(filer_change_action_spec, 'send', imports, './')).toBe( + assert.strictEqual( + get_handler_return_type(filer_change_action_spec, 'send', imports, './'), 'void | Promise', ); - expect(get_handler_return_type(filer_change_action_spec, 'receive', imports, './')).toBe( + assert.strictEqual( + get_handler_return_type(filer_change_action_spec, 'receive', imports, './'), 'void | Promise', ); // Should not add imports for void returns - expect(imports.build()).toBe(''); + assert.strictEqual(imports.build(), ''); }); }); @@ -541,15 +555,15 @@ describe('get_handler_return_type', () => { // First call adds import get_handler_return_type(ping_action_spec, 'receive_request', imports, './'); - expect(imports.import_count).toBe(1); + assert.strictEqual(imports.import_count, 1); // Second call doesn't add duplicate get_handler_return_type(session_load_action_spec, 'receive_request', imports, './'); - expect(imports.import_count).toBe(1); + assert.strictEqual(imports.import_count, 1); // Void return doesn't add import get_handler_return_type(ping_action_spec, 'send_request', imports, './'); - expect(imports.import_count).toBe(1); + assert.strictEqual(imports.import_count, 1); }); }); }); @@ -560,52 +574,55 @@ describe('generate_phase_handlers', () => { const imports = new ImportBuilder(); const result = generate_phase_handlers(toggle_main_menu_action_spec, 'backend', imports); - expect(result).toBe('toggle_main_menu?: never'); - expect(imports.has_imports()).toBe(false); + assert.strictEqual(result, 'toggle_main_menu?: never'); + assert.ok(!imports.has_imports()); }); test('generates handlers for request_response action', () => { const imports = new ImportBuilder(); const result = generate_phase_handlers(session_load_action_spec, 'frontend', imports); - expect(result).toContain('session_load?: {'); - expect(result).toContain('send_request?:'); - expect(result).toContain('receive_response?:'); - expect(result).not.toContain('receive_request'); + assert.include(result, 'session_load?: {'); + assert.include(result, 'send_request?:'); + assert.include(result, 'receive_response?:'); + assert.notInclude(result, 'receive_request'); // Check imports were added - expect(imports.has_imports()).toBe(true); + assert.ok(imports.has_imports()); const import_str = imports.build(); - expect(import_str).toContain('ActionEvent'); - expect(import_str).toContain('Frontend'); + assert.include(import_str, 'ActionEvent'); + // No environment type in generated output + assert.notInclude(import_str, 'Frontend'); }); test('generates handlers for notification action', () => { const imports = new ImportBuilder(); const result = generate_phase_handlers(filer_change_action_spec, 'backend', imports); - expect(result).toContain('filer_change?: {'); - expect(result).toContain('send?:'); - expect(result).not.toContain('receive?:'); + assert.include(result, 'filer_change?: {'); + assert.include(result, 'send?:'); + assert.notInclude(result, 'receive?:'); const import_str = imports.build(); - expect(import_str).toContain('ActionEvent'); - expect(import_str).toContain('Backend'); + assert.include(import_str, 'ActionEvent'); + // No environment type in generated output + assert.notInclude(import_str, 'Backend'); }); test('generates handlers for local_call action', () => { const imports = new ImportBuilder(); const result = generate_phase_handlers(toggle_main_menu_action_spec, 'frontend', imports); - expect(result).toContain('toggle_main_menu?: {'); - expect(result).toContain('execute?:'); - expect(result).toContain(`ActionOutputs['toggle_main_menu']`); - expect(result).not.toContain('Promise'); // It's a sync action + assert.include(result, 'toggle_main_menu?: {'); + assert.include(result, 'execute?:'); + assert.include(result, `ActionOutputs['toggle_main_menu']`); + assert.notInclude(result, 'Promise'); // It's a sync action const import_str = imports.build(); - expect(import_str).toContain('ActionEvent'); - expect(import_str).toContain('ActionOutputs'); // Added by get_handler_return_type - expect(import_str).toContain('Frontend'); + assert.include(import_str, 'ActionEvent'); + assert.include(import_str, 'ActionOutputs'); // Added by get_handler_return_type + // No environment type in generated output + assert.notInclude(import_str, 'Frontend'); }); test('uses type-only imports when appropriate', () => { @@ -617,7 +634,7 @@ describe('generate_phase_handlers', () => { const lines = import_str.split('\n'); lines.forEach((line) => { if (line.trim()) { - expect(line).toMatch(/^import type/); + assert.match(line, /^import type/); } }); }); @@ -626,29 +643,23 @@ describe('generate_phase_handlers', () => { const imports = new ImportBuilder(); const result = generate_phase_handlers(ping_action_spec, 'frontend', imports); - expect(result).toContain('send_request?:'); - expect(result).toContain('receive_response?:'); - expect(result).toContain('receive_request?:'); - expect(result).toContain('send_response?:'); + assert.include(result, 'send_request?:'); + assert.include(result, 'receive_response?:'); + assert.include(result, 'receive_request?:'); + assert.include(result, 'send_response?:'); }); test('uses phase and step type parameters in handler signature', () => { const imports = new ImportBuilder(); const result = generate_phase_handlers(ping_action_spec, 'frontend', imports); - // Should use the new type parameter syntax instead of data override - expect(result).toContain( - `action_event: ActionEvent<'ping', Frontend, 'send_request', 'handling'>`, - ); - expect(result).toContain( - `action_event: ActionEvent<'ping', Frontend, 'receive_response', 'handling'>`, - ); - expect(result).toContain( - `action_event: ActionEvent<'ping', Frontend, 'receive_request', 'handling'>`, - ); - expect(result).toContain( - `action_event: ActionEvent<'ping', Frontend, 'send_response', 'handling'>`, - ); + // 3-param ActionEvent: method, phase, step (no environment type) + assert.include(result, `action_event: ActionEvent<'ping', 'send_request', 'handling'>`); + assert.include(result, `action_event: ActionEvent<'ping', 'receive_response', 'handling'>`); + assert.include(result, `action_event: ActionEvent<'ping', 'receive_request', 'handling'>`); + assert.include(result, `action_event: ActionEvent<'ping', 'send_response', 'handling'>`); + // No environment type + assert.notInclude(result, 'Frontend'); }); test('handles ActionOutputs import for handlers that return values', () => { @@ -656,12 +667,12 @@ describe('generate_phase_handlers', () => { // ping has receive_request handler on backend which returns output const result = generate_phase_handlers(ping_action_spec, 'backend', imports); - expect(result).toContain('receive_request?:'); - expect(result).toContain(`ActionOutputs['ping'] | Promise`); + assert.include(result, 'receive_request?:'); + assert.include(result, `ActionOutputs['ping'] | Promise`); // Check that ActionOutputs was imported const import_str = imports.build(); - expect(import_str).toContain('ActionOutputs'); + assert.include(import_str, 'ActionOutputs'); }); test('handler formatting is consistent', () => { @@ -670,9 +681,9 @@ describe('generate_phase_handlers', () => { // Check indentation and formatting const lines = result.split('\n'); - expect(lines[0]).toMatch(/^ping\?: \{$/); - expect(lines[1]).toMatch(/^\t\t/); // Two tabs for handler definitions - expect(lines[lines.length - 1]).toMatch(/^\t\}$/); // One tab for closing brace + assert.match(lines[0]!, /^ping\?: \{$/); + assert.match(lines[1]!, /^\t\t/); // Two tabs for handler definitions + assert.match(lines[lines.length - 1]!, /^\t\}$/); // One tab for closing brace }); test('imports are deduplicated across multiple specs', () => { @@ -686,28 +697,31 @@ describe('generate_phase_handlers', () => { const import_str = imports.build(); // Should have exactly one import of each type - expect(import_str.match(/ActionEvent/g)?.length).toBe(1); - expect(import_str.match(/Frontend/g)?.length).toBe(1); - expect(import_str.match(/ActionOutputs/g)?.length).toBe(1); + assert.strictEqual(import_str.match(/ActionEvent/g)?.length, 1); + assert.strictEqual(import_str.match(/ActionOutputs/g)?.length, 1); + // No environment type imports + assert.notInclude(import_str, 'Frontend'); }); - test('frontend generates correct relative import paths', () => { + test('frontend generates correct import paths', () => { const imports = new ImportBuilder(); generate_phase_handlers(ping_action_spec, 'frontend', imports); const import_str = imports.build(); - expect(import_str).toContain("from './action_event.js'"); - expect(import_str).toContain("from './frontend.svelte.js'"); - expect(import_str).toContain("from './action_collections.js'"); + assert.include(import_str, "from '@fuzdev/fuz_app/actions/action_event.js'"); + assert.include(import_str, "from './action_collections.js'"); + // No environment type import paths + assert.notInclude(import_str, 'frontend.svelte.js'); }); - test('backend generates correct relative import paths', () => { + test('backend generates correct import paths', () => { const imports = new ImportBuilder(); generate_phase_handlers(ping_action_spec, 'backend', imports); const import_str = imports.build(); - expect(import_str).toContain("from '../action_event.js'"); - expect(import_str).toContain("from './backend.js'"); - expect(import_str).toContain("from '../action_collections.js'"); + assert.include(import_str, "from '@fuzdev/fuz_app/actions/action_event.js'"); + assert.include(import_str, "from '../action_collections.js'"); + // No environment type import paths + assert.notInclude(import_str, 'backend.js'); }); }); diff --git a/src/test/config_defaults.test.ts b/src/test/config_defaults.test.ts index 1b5c34fc9..10f9a48e2 100644 --- a/src/test/config_defaults.test.ts +++ b/src/test/config_defaults.test.ts @@ -1,4 +1,4 @@ -import {test, expect} from 'vitest'; +import {test, assert} from 'vitest'; import {providers_default, models_default, chat_template_defaults} from '$lib/config_defaults.js'; @@ -11,10 +11,10 @@ test('all model provider_names exist in providers_default', () => { // Check that each model's provider exists for (const provider_name of model_provider_names) { - expect( + assert.ok( provider_names.has(provider_name), `Provider "${provider_name}" used in models_default does not exist in providers_default`, - ).toBe(true); + ); } }); @@ -35,8 +35,9 @@ test('all chat template model_names exist in models_default', () => { } } - expect( + assert.deepEqual( missing_models, + [], `The following models in chat_template_defaults do not exist in models_default: ${missing_models.join(', ')}`, - ).toEqual([]); + ); }); diff --git a/src/test/db_fixture.ts b/src/test/db_fixture.ts new file mode 100644 index 000000000..f39d42202 --- /dev/null +++ b/src/test/db_fixture.ts @@ -0,0 +1,42 @@ +/** + * PGlite database fixture for zzz tests. + * + * Follows the fuz_app consumer pattern: init_schema runs auth migrations, + * describe_db provides per-factory test suite scoping with automatic truncation. + * + * @module + */ + +import { + create_pglite_factory, + create_pg_factory, + create_describe_db, + log_db_factory_status, + drop_auth_schema, + AUTH_INTEGRATION_TRUNCATE_TABLES, + type DbFactory, +} from '@fuzdev/fuz_app/testing/db.js'; +import {run_migrations} from '@fuzdev/fuz_app/db/migrate.js'; +import {AUTH_MIGRATION_NS} from '@fuzdev/fuz_app/auth/migrations.js'; +import type {Db} from '@fuzdev/fuz_app/db/db.js'; + +import {init_zzz_schema} from '$lib/server/db/zzz_schema.js'; + +const init_schema = async (db: Db): Promise => { + await drop_auth_schema(db); + await run_migrations(db, [AUTH_MIGRATION_NS]); + await init_zzz_schema(db); // no-op currently — wired for future zzz-specific DDL +}; + +// No zzz-specific tables yet — auth tables only +const TRUNCATE_TABLES = AUTH_INTEGRATION_TRUNCATE_TABLES; + +export const pglite_factory = create_pglite_factory(init_schema); + +const pg_factory = create_pg_factory(init_schema, process.env.TEST_DATABASE_URL); + +export const db_factories: Array = [pglite_factory, pg_factory]; + +log_db_factory_status(db_factories); + +export const describe_db = create_describe_db([pglite_factory], TRUNCATE_TABLES); diff --git a/src/test/diskfile_editor_state.svelte.base.test.ts b/src/test/diskfile_editor_state.svelte.base.test.ts index ea7942c8d..a2432afc3 100644 --- a/src/test/diskfile_editor_state.svelte.base.test.ts +++ b/src/test/diskfile_editor_state.svelte.base.test.ts @@ -1,14 +1,13 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, beforeEach, describe} from 'vitest'; +import {test, beforeEach, describe, assert} from 'vitest'; import {DiskfileEditorState} from '$lib/diskfile_editor_state.svelte.js'; import {DiskfilePath, SerializableDisknode} from '$lib/diskfile_types.js'; import {Frontend} from '$lib/frontend.svelte.js'; import {Diskfile} from '$lib/diskfile.svelte.js'; -import {monkeypatch_zzz_for_tests} from './test_helpers.ts'; + +import {monkeypatch_zzz_for_tests} from './test_helpers.js'; // Constants for testing const TEST_PATH = DiskfilePath.parse('/path/to/test.txt'); @@ -40,31 +39,31 @@ beforeEach(() => { describe('initialization', () => { test('editor_state initializes with correct values', () => { - expect(editor_state.original_content).toBe(TEST_CONTENT); - expect(editor_state.current_content).toBe(TEST_CONTENT); - expect(editor_state.has_changes).toBe(false); - expect(editor_state.content_was_modified_by_user).toBe(false); - expect(editor_state.unsaved_edit_entry_id).toBeNull(); - expect(editor_state.last_seen_disk_content).toBe(TEST_CONTENT); + assert.strictEqual(editor_state.original_content, TEST_CONTENT); + assert.strictEqual(editor_state.current_content, TEST_CONTENT); + assert.ok(!editor_state.has_changes); + assert.ok(!editor_state.content_was_modified_by_user); + assert.isNull(editor_state.unsaved_edit_entry_id); + assert.strictEqual(editor_state.last_seen_disk_content, TEST_CONTENT); // Selected history entry should be initialized to the current entry const history = app.get_diskfile_history(TEST_PATH); - expect(history).toBeDefined(); - expect(history!.entries.length).toBe(1); - expect(editor_state.selected_history_entry_id).toBe(history!.entries[0]!.id); - expect(history!.entries[0]!.content).toBe(TEST_CONTENT); + assert.isDefined(history); + assert.strictEqual(history.entries.length, 1); + assert.strictEqual(editor_state.selected_history_entry_id, history.entries[0]!.id); + assert.strictEqual(history.entries[0]!.content, TEST_CONTENT); }); test('editor_state initializes with correct history entry', () => { const history = app.get_diskfile_history(TEST_PATH); - expect(history).toBeDefined(); - expect(history!.entries.length).toBe(1); + assert.isDefined(history); + assert.strictEqual(history.entries.length, 1); // The initial entry should contain the original content - expect(history!.entries[0]!.content).toBe(TEST_CONTENT); - expect(history!.entries[0]!.is_unsaved_edit).toBe(false); - expect(history!.entries[0]!.is_disk_change).toBe(false); - expect(history!.entries[0]!.is_original_state).toBe(true); + assert.strictEqual(history.entries[0]!.content, TEST_CONTENT); + assert.ok(!history.entries[0]!.is_unsaved_edit); + assert.ok(!history.entries[0]!.is_disk_change); + assert.ok(history.entries[0]!.is_original_state); }); test('editor_state handles initialization with null content', () => { @@ -82,15 +81,15 @@ describe('initialization', () => { }); // Check state properties - expect(null_editor_state.original_content).toBeNull(); - expect(null_editor_state.current_content).toBe(''); - expect(null_editor_state.has_changes).toBe(false); - expect(null_editor_state.last_seen_disk_content).toBeNull(); + assert.isNull(null_editor_state.original_content); + assert.strictEqual(null_editor_state.current_content, ''); + assert.ok(!null_editor_state.has_changes); + assert.isNull(null_editor_state.last_seen_disk_content); // History should still be created const history = app.get_diskfile_history(null_diskfile.path); - expect(history).toBeDefined(); - expect(history!.entries.length).toBe(0); // No entries for null content + assert.isDefined(history); + assert.strictEqual(history.entries.length, 0); // No entries for null content }); }); @@ -99,35 +98,35 @@ describe('content editing', () => { const new_content = 'Modified content'; editor_state.current_content = new_content; - expect(editor_state.current_content).toBe(new_content); - expect(editor_state.has_changes).toBe(true); - expect(editor_state.content_was_modified_by_user).toBe(true); + assert.strictEqual(editor_state.current_content, new_content); + assert.ok(editor_state.has_changes); + assert.ok(editor_state.content_was_modified_by_user); }); test('content modifications track user edits flag', () => { // Initial state - no user edits - expect(editor_state.content_was_modified_by_user).toBe(false); + assert.ok(!editor_state.content_was_modified_by_user); // Change content - should mark as user-edited editor_state.current_content = 'User edit'; - expect(editor_state.content_was_modified_by_user).toBe(true); + assert.ok(editor_state.content_was_modified_by_user); // Change back to original - should clear user-edited flag editor_state.current_content = TEST_CONTENT; - expect(editor_state.content_was_modified_by_user).toBe(false); + assert.ok(!editor_state.content_was_modified_by_user); }); test('has_changes tracks difference between current and original content', () => { // Initial state - no changes - expect(editor_state.has_changes).toBe(false); + assert.ok(!editor_state.has_changes); // Make a change editor_state.current_content = 'Changed content'; - expect(editor_state.has_changes).toBe(true); + assert.ok(editor_state.has_changes); // Change back to original editor_state.current_content = TEST_CONTENT; - expect(editor_state.has_changes).toBe(false); + assert.ok(!editor_state.has_changes); }); test('editing content preserves selection state', () => { @@ -137,56 +136,56 @@ describe('content editing', () => { // Get the selected entry id const selected_id = editor_state.selected_history_entry_id; - expect(selected_id).not.toBeNull(); + assert.ok(selected_id !== null); // Make another edit editor_state.current_content = 'Second edit'; // Selection should still be active - expect(editor_state.selected_history_entry_id).not.toBeNull(); + assert.ok(editor_state.selected_history_entry_id !== null); // Content should be updated in the selected entry - const updated_entry = history.find_entry_by_id(editor_state.selected_history_entry_id!); - expect(updated_entry).toBeDefined(); - expect(updated_entry!.content).toBe('Second edit'); + const updated_entry = history.find_entry_by_id(editor_state.selected_history_entry_id); + assert.isDefined(updated_entry); + assert.strictEqual(updated_entry.content, 'Second edit'); }); test('editing to match original content clears user modified flag', () => { // Make an edit editor_state.current_content = 'User edit'; - expect(editor_state.content_was_modified_by_user).toBe(true); - expect(editor_state.has_changes).toBe(true); + assert.ok(editor_state.content_was_modified_by_user); + assert.ok(editor_state.has_changes); // Edit back to match original editor_state.current_content = TEST_CONTENT; // Flags should be cleared - expect(editor_state.content_was_modified_by_user).toBe(false); - expect(editor_state.has_changes).toBe(false); + assert.ok(!editor_state.content_was_modified_by_user); + assert.ok(!editor_state.has_changes); }); }); describe('content metrics', () => { test('editor provides accurate content length metrics', () => { // Initial length - expect(editor_state.original_length).toBe(TEST_CONTENT.length); - expect(editor_state.current_length).toBe(TEST_CONTENT.length); - expect(editor_state.length_diff).toBe(0); - expect(editor_state.length_diff_percent).toBe(0); + assert.strictEqual(editor_state.original_length, TEST_CONTENT.length); + assert.strictEqual(editor_state.current_length, TEST_CONTENT.length); + assert.strictEqual(editor_state.length_diff, 0); + assert.strictEqual(editor_state.length_diff_percent, 0); // Update content const new_content = 'Shorter'; editor_state.current_content = new_content; // Check metrics - expect(editor_state.current_length).toBe(new_content.length); - expect(editor_state.length_diff).toBe(new_content.length - TEST_CONTENT.length); + assert.strictEqual(editor_state.current_length, new_content.length); + assert.strictEqual(editor_state.length_diff, new_content.length - TEST_CONTENT.length); // Percent change should be negative const expected_percent = Math.round( ((new_content.length - TEST_CONTENT.length) / TEST_CONTENT.length) * 100, ); - expect(editor_state.length_diff_percent).toBe(expected_percent); + assert.strictEqual(editor_state.length_diff_percent, expected_percent); }); test('editor provides accurate token metrics', () => { @@ -195,9 +194,10 @@ describe('content metrics', () => { editor_state.current_content = token_test_content; // Verify token calculations - expect(editor_state.current_token_count).toBeGreaterThan(0); - expect(editor_state.current_token_count).toBe(editor_state.current_token_count); - expect(editor_state.token_diff).toBe( + assert.ok(editor_state.current_token_count > 0); + assert.strictEqual(editor_state.current_token_count, editor_state.current_token_count); + assert.strictEqual( + editor_state.token_diff, editor_state.current_token_count - editor_state.original_token_count, ); @@ -207,7 +207,7 @@ describe('content metrics', () => { editor_state.original_token_count) * 100, ); - expect(editor_state.token_diff_percent).toBe(expected_token_percent); + assert.strictEqual(editor_state.token_diff_percent, expected_token_percent); }); test('editor handles metrics for empty content', () => { @@ -215,15 +215,15 @@ describe('content metrics', () => { editor_state.current_content = ''; // Check length metrics - expect(editor_state.current_length).toBe(0); - expect(editor_state.length_diff).toBe(-TEST_CONTENT.length); - expect(editor_state.length_diff_percent).toBe(-100); + assert.strictEqual(editor_state.current_length, 0); + assert.strictEqual(editor_state.length_diff, -TEST_CONTENT.length); + assert.strictEqual(editor_state.length_diff_percent, -100); // Check token metrics - expect(editor_state.current_token_count).toBe(0); - expect(editor_state.current_token_count).toBe(0); - expect(editor_state.token_diff).toBe(-editor_state.original_token_count); - expect(editor_state.token_diff_percent).toBe(-100); + assert.strictEqual(editor_state.current_token_count, 0); + assert.strictEqual(editor_state.current_token_count, 0); + assert.strictEqual(editor_state.token_diff, -editor_state.original_token_count); + assert.strictEqual(editor_state.token_diff_percent, -100); }); test('length_diff_percent handles zero original length correctly', () => { @@ -244,12 +244,12 @@ describe('content metrics', () => { empty_editor_state.current_content = 'New content'; // Since original length was 0, percentage should be 100% - expect(empty_editor_state.original_length).toBe(0); - expect(empty_editor_state.length_diff_percent).toBe(100); + assert.strictEqual(empty_editor_state.original_length, 0); + assert.strictEqual(empty_editor_state.length_diff_percent, 100); // Same for tokens - expect(empty_editor_state.original_token_count).toBe(0); - expect(empty_editor_state.token_diff_percent).toBe(100); + assert.strictEqual(empty_editor_state.original_token_count, 0); + assert.strictEqual(empty_editor_state.token_diff_percent, 100); }); }); @@ -271,17 +271,17 @@ describe('file management', () => { editor_state.update_diskfile(another_diskfile); // Verify state was properly updated - expect(editor_state.diskfile).toBe(another_diskfile); - expect(editor_state.original_content).toBe(another_content); - expect(editor_state.current_content).toBe(another_content); - expect(editor_state.has_changes).toBe(false); - expect(editor_state.content_was_modified_by_user).toBe(false); + assert.strictEqual(editor_state.diskfile, another_diskfile); + assert.strictEqual(editor_state.original_content, another_content); + assert.strictEqual(editor_state.current_content, another_content); + assert.ok(!editor_state.has_changes); + assert.ok(!editor_state.content_was_modified_by_user); // History should be initialized for the new file const new_history = app.get_diskfile_history(another_path); - expect(new_history).toBeDefined(); - expect(new_history!.entries.length).toBe(1); - expect(new_history!.entries[0]!.content).toBe(another_content); + assert.isDefined(new_history); + assert.strictEqual(new_history.entries.length, 1); + assert.strictEqual(new_history.entries[0]!.content, another_content); }); test('update_diskfile does nothing when same diskfile is provided', () => { @@ -296,8 +296,8 @@ describe('file management', () => { editor_state.update_diskfile(test_diskfile); // State should remain unchanged - expect(editor_state.current_content).toBe(current_content); - expect(editor_state.content_was_modified_by_user).toBe(current_modified); + assert.strictEqual(editor_state.current_content, current_content); + assert.strictEqual(editor_state.content_was_modified_by_user, current_modified); }); test('reset clears editor state and reverts to original content', () => { @@ -313,47 +313,47 @@ describe('file management', () => { editor_state.reset(); // Verify state is reset - expect(editor_state.current_content).toBe(TEST_CONTENT); - expect(editor_state.has_changes).toBe(false); - expect(editor_state.content_was_modified_by_user).toBe(false); - expect(editor_state.unsaved_edit_entry_id).toBeNull(); - expect(editor_state.selected_history_entry_id).toBeNull(); + assert.strictEqual(editor_state.current_content, TEST_CONTENT); + assert.ok(!editor_state.has_changes); + assert.ok(!editor_state.content_was_modified_by_user); + assert.isNull(editor_state.unsaved_edit_entry_id); + assert.isNull(editor_state.selected_history_entry_id); }); }); describe('derived state', () => { test('derived property has_history is accurate', () => { // Initial state - only one entry, should not have history - expect(editor_state.has_history).toBe(false); + assert.ok(!editor_state.has_history); // Add an entry editor_state.current_content = 'New content'; // Now we should have history - expect(editor_state.has_history).toBe(true); + assert.ok(editor_state.has_history); }); test('derived property has_unsaved_edits is accurate', async () => { // Initial state - no unsaved edits - expect(editor_state.has_unsaved_edits).toBe(false); + assert.ok(!editor_state.has_unsaved_edits); // Make an edit editor_state.current_content = 'Unsaved edit'; // Now we should have unsaved edits - expect(editor_state.has_unsaved_edits).toBe(true); + assert.ok(editor_state.has_unsaved_edits); // Save the changes await editor_state.save_changes(); // No more unsaved edits - expect(editor_state.has_unsaved_edits).toBe(false); + assert.ok(!editor_state.has_unsaved_edits); }); test('derived properties for UI state management', () => { // Initial state - expect(editor_state.can_clear_history).toBe(false); - expect(editor_state.can_clear_unsaved_edits).toBe(false); + assert.ok(!editor_state.can_clear_history); + assert.ok(!editor_state.can_clear_unsaved_edits); // Add a saved entry const history = app.get_diskfile_history(TEST_PATH)!; @@ -361,13 +361,13 @@ describe('derived state', () => { history.add_entry('Saved entry 2'); // Now we can clear history - expect(editor_state.can_clear_history).toBe(true); + assert.ok(editor_state.can_clear_history); // Add an unsaved entry editor_state.current_content = 'Unsaved edit'; // Now we can clear unsaved edits as well - expect(editor_state.can_clear_unsaved_edits).toBe(true); + assert.ok(editor_state.can_clear_unsaved_edits); }); test('content_matching_entry_ids tracks entries with matching content', () => { @@ -378,17 +378,17 @@ describe('derived state', () => { const entry3 = history.add_entry('Duplicate content'); // Initial check - current content doesn't match any entry - expect(editor_state.content_matching_entry_ids).not.toContain(entry1.id); - expect(editor_state.content_matching_entry_ids).not.toContain(entry2.id); - expect(editor_state.content_matching_entry_ids).not.toContain(entry3.id); + assert.notInclude(editor_state.content_matching_entry_ids, entry1.id); + assert.notInclude(editor_state.content_matching_entry_ids, entry2.id); + assert.notInclude(editor_state.content_matching_entry_ids, entry3.id); // Set content to match duplicates editor_state.current_content = 'Duplicate content'; // Verify matching entries are tracked - expect(editor_state.content_matching_entry_ids).toContain(entry2.id); - expect(editor_state.content_matching_entry_ids).toContain(entry3.id); - expect(editor_state.content_matching_entry_ids).not.toContain(entry1.id); + assert.include(editor_state.content_matching_entry_ids, entry2.id); + assert.include(editor_state.content_matching_entry_ids, entry3.id); + assert.notInclude(editor_state.content_matching_entry_ids, entry1.id); }); }); @@ -401,21 +401,21 @@ describe('saving changes', () => { const result = await editor_state.save_changes(); // Verify result and diskfile update - expect(result).toBe(true); - expect(test_diskfile.content).toBe('Content to save'); - expect(editor_state.last_seen_disk_content).toBe('Content to save'); - expect(editor_state.content_was_modified_by_user).toBe(false); + assert.ok(result); + assert.strictEqual(test_diskfile.content, 'Content to save'); + assert.strictEqual(editor_state.last_seen_disk_content, 'Content to save'); + assert.ok(!editor_state.content_was_modified_by_user); }); test('save_changes with no changes returns false', async () => { // Don't make any changes - expect(editor_state.has_changes).toBe(false); + assert.ok(!editor_state.has_changes); // Try to save const result = await editor_state.save_changes(); // Verify nothing was saved - expect(result).toBe(false); + assert.ok(!result); }); test('save_changes creates history entry with correct properties', async () => { @@ -427,8 +427,8 @@ describe('saving changes', () => { // Check history entry const history = app.get_diskfile_history(TEST_PATH)!; - expect(history.entries[0]!.content).toBe('Content to be saved'); - expect(history.entries[0]!.is_unsaved_edit).toBe(false); - expect(history.entries[0]!.is_disk_change).toBe(false); + assert.strictEqual(history.entries[0]!.content, 'Content to be saved'); + assert.ok(!history.entries[0]!.is_unsaved_edit); + assert.ok(!history.entries[0]!.is_disk_change); }); }); diff --git a/src/test/diskfile_editor_state.svelte.disk_changes.test.ts b/src/test/diskfile_editor_state.svelte.disk_changes.test.ts index 614078fcc..a11206f10 100644 --- a/src/test/diskfile_editor_state.svelte.disk_changes.test.ts +++ b/src/test/diskfile_editor_state.svelte.disk_changes.test.ts @@ -1,14 +1,13 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, beforeEach, describe} from 'vitest'; +import {test, beforeEach, describe, assert} from 'vitest'; import {DiskfileEditorState} from '$lib/diskfile_editor_state.svelte.js'; import {DiskfilePath, SerializableDisknode} from '$lib/diskfile_types.js'; import {Frontend} from '$lib/frontend.svelte.js'; import {Diskfile} from '$lib/diskfile.svelte.js'; -import {monkeypatch_zzz_for_tests} from './test_helpers.ts'; + +import {monkeypatch_zzz_for_tests} from './test_helpers.js'; // Constants for testing const TEST_PATH = DiskfilePath.parse('/path/to/test.txt'); @@ -41,7 +40,7 @@ beforeEach(() => { describe('disk change detection', () => { test('identifies when disk content changes', () => { // Initial state - no disk content tracking issues - expect(editor_state.last_seen_disk_content).toBe(TEST_CONTENT); + assert.strictEqual(editor_state.last_seen_disk_content, TEST_CONTENT); // Simulate a change on disk const new_disk_content = 'Content changed on disk'; @@ -51,8 +50,8 @@ describe('disk change detection', () => { editor_state.check_disk_changes(); // Since there are no user edits, content should be auto-updated - expect(editor_state.current_content).toBe(new_disk_content); - expect(editor_state.last_seen_disk_content).toBe(new_disk_content); + assert.strictEqual(editor_state.current_content, new_disk_content); + assert.strictEqual(editor_state.last_seen_disk_content, new_disk_content); }); test('with no user edits automatically updates content and selection', () => { @@ -64,8 +63,8 @@ describe('disk change detection', () => { editor_state.check_disk_changes(); // Content should be auto-updated - expect(editor_state.current_content).toBe(disk_content); - expect(editor_state.last_seen_disk_content).toBe(disk_content); + assert.strictEqual(editor_state.current_content, disk_content); + assert.strictEqual(editor_state.last_seen_disk_content, disk_content); // History should have a new entry with disk change flag const history = app.get_diskfile_history(TEST_PATH)!; @@ -73,13 +72,13 @@ describe('disk change detection', () => { (entry) => entry.is_disk_change && entry.content === disk_content, ); - expect(disk_entry).toMatchObject({ + assert.include(disk_entry, { content: disk_content, is_disk_change: true, }); // Selection should point to the new disk change entry - expect(editor_state.selected_history_entry_id).toBe(disk_entry!.id); + assert.strictEqual(editor_state.selected_history_entry_id, disk_entry!.id); }); test('ignores null content states', () => { @@ -91,7 +90,7 @@ describe('disk change detection', () => { editor_state.check_disk_changes(); // Nothing should happen, no errors - expect(editor_state.last_seen_disk_content).toBeNull(); + assert.isNull(editor_state.last_seen_disk_content); }); test('does nothing if disk content matches last seen', () => { @@ -103,7 +102,7 @@ describe('disk change detection', () => { editor_state.check_disk_changes(); // Last seen should remain unchanged - expect(editor_state.last_seen_disk_content).toBe('Last seen content'); + assert.strictEqual(editor_state.last_seen_disk_content, 'Last seen content'); }); test('handles first-time initialization correctly', () => { @@ -126,24 +125,24 @@ describe('disk change detection', () => { new_editor_state.check_disk_changes(); // last_seen_disk_content should be initialized - expect(new_editor_state.last_seen_disk_content).toBe('Initial content'); - expect(new_editor_state.current_content).toBe('Initial content'); + assert.strictEqual(new_editor_state.last_seen_disk_content, 'Initial content'); + assert.strictEqual(new_editor_state.current_content, 'Initial content'); }); test('with user edits adds disk change to history but preserves user content', () => { // First make a user edit editor_state.current_content = 'User edited content'; - expect(editor_state.content_was_modified_by_user).toBe(true); + assert.ok(editor_state.content_was_modified_by_user); // Simulate disk change test_diskfile.content = 'Changed on disk'; editor_state.check_disk_changes(); // User content should be preserved - expect(editor_state.current_content).toBe('User edited content'); + assert.strictEqual(editor_state.current_content, 'User edited content'); // Last seen content should be updated - expect(editor_state.last_seen_disk_content).toBe('Changed on disk'); + assert.strictEqual(editor_state.last_seen_disk_content, 'Changed on disk'); // Find the disk change entry const history = app.get_diskfile_history(TEST_PATH)!; @@ -151,13 +150,13 @@ describe('disk change detection', () => { (entry) => entry.is_disk_change && entry.content === 'Changed on disk', ); - expect(disk_entry).toMatchObject({ + assert.include(disk_entry, { content: 'Changed on disk', is_disk_change: true, }); // Selection should not automatically change to disk entry - expect(editor_state.selected_history_entry_id).not.toBe(disk_entry!.id); + assert.notStrictEqual(editor_state.selected_history_entry_id, disk_entry!.id); }); test('skips adding disk change if content matches existing entries', () => { @@ -172,7 +171,7 @@ describe('disk change detection', () => { editor_state.check_disk_changes(); // No new entry should be added - expect(history.entries.length).toBe(count_after_first); + assert.strictEqual(history.entries.length, count_after_first); }); test('marks existing entry as disk change when content matches', () => { @@ -184,19 +183,19 @@ describe('disk change detection', () => { }); // Verify the entry isn't marked as a disk change yet - expect(entry.is_disk_change).toBe(false); - expect(entry.is_unsaved_edit).toBe(true); + assert.ok(!entry.is_disk_change); + assert.ok(entry.is_unsaved_edit); // Make a disk change that matches the existing entry's content test_diskfile.content = 'New content on disk'; editor_state.check_disk_changes(); // The existing entry should now be marked as a disk change and not an unsaved edit - expect(history.entries[0]!.is_disk_change).toBe(true); - expect(history.entries[0]!.is_unsaved_edit).toBe(false); + assert.ok(history.entries[0]!.is_disk_change); + assert.ok(!history.entries[0]!.is_unsaved_edit); // No new entry should be created - expect(history.entries.length).toBe(2); // Original + our added entry + assert.strictEqual(history.entries.length, 2); // Original + our added entry }); }); @@ -221,12 +220,12 @@ describe('file history management', () => { (e) => e.content === 'Second disk change' && e.is_disk_change, ); - expect(firstEntry).toMatchObject({ + assert.include(firstEntry, { content: 'First disk change', is_disk_change: true, }); - expect(secondEntry).toMatchObject({ + assert.include(secondEntry, { content: 'Second disk change', is_disk_change: true, }); @@ -242,7 +241,7 @@ describe('file history management', () => { editor_state.check_disk_changes(); // Selection should remain on user's edit - expect(editor_state.selected_history_entry_id).toBe(selected_id); + assert.strictEqual(editor_state.selected_history_entry_id, selected_id); }); test('with user selection of older history maintains that selection during disk change', () => { @@ -252,14 +251,14 @@ describe('file history management', () => { // Select the older entry editor_state.set_content_from_history(older_entry.id); - expect(editor_state.selected_history_entry_id).toBe(older_entry.id); + assert.strictEqual(editor_state.selected_history_entry_id, older_entry.id); // Simulate disk change test_diskfile.content = 'New disk content'; editor_state.check_disk_changes(); // Selection should remain on the older entry - expect(editor_state.selected_history_entry_id).toBe(older_entry.id); + assert.strictEqual(editor_state.selected_history_entry_id, older_entry.id); }); }); @@ -272,13 +271,13 @@ describe('save changes behavior', () => { await editor_state.save_changes(); // Content should be saved to disk - expect(test_diskfile.content).toBe('User edit to save'); + assert.strictEqual(test_diskfile.content, 'User edit to save'); // Last seen disk content should be updated - expect(editor_state.last_seen_disk_content).toBe('User edit to save'); + assert.strictEqual(editor_state.last_seen_disk_content, 'User edit to save'); // User modified flag should be cleared - expect(editor_state.content_was_modified_by_user).toBe(false); + assert.ok(!editor_state.content_was_modified_by_user); }); test('saving during disk changes preserves selected content', async () => { @@ -293,10 +292,10 @@ describe('save changes behavior', () => { await editor_state.save_changes(); // Disk should have user content - expect(test_diskfile.content).toBe('User edit'); + assert.strictEqual(test_diskfile.content, 'User edit'); // Last seen content should be updated - expect(editor_state.last_seen_disk_content).toBe('User edit'); + assert.strictEqual(editor_state.last_seen_disk_content, 'User edit'); }); }); @@ -307,14 +306,14 @@ describe('edge cases', () => { editor_state.check_disk_changes(); // With no user edits, content should be updated to empty - expect(editor_state.current_content).toBe(''); - expect(editor_state.last_seen_disk_content).toBe(''); + assert.strictEqual(editor_state.current_content, ''); + assert.strictEqual(editor_state.last_seen_disk_content, ''); // History should include empty content entry const history = app.get_diskfile_history(TEST_PATH)!; const empty_entry = history.entries.find((e) => e.content === '' && e.is_disk_change); - expect(empty_entry).toMatchObject({ + assert.include(empty_entry, { content: '', is_disk_change: true, }); @@ -344,10 +343,10 @@ describe('edge cases', () => { empty_history_editor.check_disk_changes(); // Should handle gracefully without errors - expect(empty_history_editor.current_content).toBe('Disk changed'); + assert.strictEqual(empty_history_editor.current_content, 'Disk changed'); // The implementation should create an entry with the disk_change flag - expect(history.entries[0]!).toMatchObject({ + assert.include(history.entries[0]!, { content: 'Disk changed', is_disk_change: true, }); @@ -364,7 +363,7 @@ describe('edge cases', () => { editor_state.check_disk_changes(); // Should not crash and maintain same state - expect(editor_state.current_content).toBe('User edit'); + assert.strictEqual(editor_state.current_content, 'User edit'); }); test('editing to match disk content is handled properly', () => { @@ -376,6 +375,6 @@ describe('edge cases', () => { editor_state.current_content = 'Disk content'; // User modified state should be false since it matches disk content - expect(editor_state.content_was_modified_by_user).toBe(false); + assert.ok(!editor_state.content_was_modified_by_user); }); }); diff --git a/src/test/diskfile_editor_state.svelte.history.test.ts b/src/test/diskfile_editor_state.svelte.history.test.ts index 45a1a237a..b5dade1b1 100644 --- a/src/test/diskfile_editor_state.svelte.history.test.ts +++ b/src/test/diskfile_editor_state.svelte.history.test.ts @@ -1,14 +1,13 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, beforeEach, describe} from 'vitest'; +import {test, beforeEach, describe, assert} from 'vitest'; import {DiskfileEditorState} from '$lib/diskfile_editor_state.svelte.js'; import {DiskfilePath, SerializableDisknode} from '$lib/diskfile_types.js'; import {Frontend} from '$lib/frontend.svelte.js'; import {Diskfile} from '$lib/diskfile.svelte.js'; -import {monkeypatch_zzz_for_tests} from './test_helpers.ts'; + +import {monkeypatch_zzz_for_tests} from './test_helpers.js'; // Constants for testing const TEST_PATH = DiskfilePath.parse('/path/to/test.txt'); @@ -45,20 +44,20 @@ describe('unsaved edit creation', () => { editor_state.current_content = new_content; // Verify an unsaved entry was created - expect(editor_state.unsaved_edit_entry_id).not.toBeNull(); + assert.ok(editor_state.unsaved_edit_entry_id !== null); // Verify the new entry const history = app.get_diskfile_history(TEST_PATH)!; - const new_entry = history.find_entry_by_id(editor_state.unsaved_edit_entry_id!); + const new_entry = history.find_entry_by_id(editor_state.unsaved_edit_entry_id); - expect(new_entry).toMatchObject({ + assert.include(new_entry, { content: new_content, is_unsaved_edit: true, label: 'Unsaved edit', }); // Selection should match the unsaved entry - expect(editor_state.selected_history_entry_id).toBe(editor_state.unsaved_edit_entry_id); + assert.strictEqual(editor_state.selected_history_entry_id, editor_state.unsaved_edit_entry_id); }); test('multiple content updates modify the same unsaved entry', () => { @@ -67,20 +66,20 @@ describe('unsaved edit creation', () => { // Track the entry id const unsaved_id = editor_state.unsaved_edit_entry_id; - expect(unsaved_id).not.toBeNull(); + assert.ok(unsaved_id !== null); // Make additional edits editor_state.current_content = 'Second edit'; editor_state.current_content = 'Third edit'; // Verify the same entry was updated - expect(editor_state.unsaved_edit_entry_id).toBe(unsaved_id); + assert.strictEqual(editor_state.unsaved_edit_entry_id, unsaved_id); // Verify the entry content was updated const history = app.get_diskfile_history(TEST_PATH)!; - const updated_entry = history.find_entry_by_id(unsaved_id!); + const updated_entry = history.find_entry_by_id(unsaved_id); - expect(updated_entry).toMatchObject({ + assert.include(updated_entry, { content: 'Third edit', is_unsaved_edit: true, }); @@ -95,11 +94,11 @@ describe('unsaved edit creation', () => { editor_state.current_content = TEST_CONTENT; // Verify unsaved entry was removed - expect(editor_state.unsaved_edit_entry_id).toBeNull(); + assert.isNull(editor_state.unsaved_edit_entry_id); // Entry should no longer exist const history = app.get_diskfile_history(TEST_PATH)!; - expect(history.find_entry_by_id(unsaved_id!)).toBeUndefined(); + assert.ok(history.find_entry_by_id(unsaved_id!) === undefined); }); test('editing to match existing content selects that entry instead of creating new one', () => { @@ -111,8 +110,8 @@ describe('unsaved edit creation', () => { editor_state.current_content = 'Existing content'; // Existing entry should be selected - expect(editor_state.selected_history_entry_id).toBe(existing_entry.id); - expect(editor_state.unsaved_edit_entry_id).toBeNull(); + assert.strictEqual(editor_state.selected_history_entry_id, existing_entry.id); + assert.isNull(editor_state.unsaved_edit_entry_id); }); test('editing to match existing unsaved edit selects that entry', () => { @@ -128,8 +127,8 @@ describe('unsaved edit creation', () => { editor_state.current_content = 'Unsaved content'; // The existing unsaved entry should be selected - expect(editor_state.selected_history_entry_id).toBe(unsaved_entry.id); - expect(editor_state.unsaved_edit_entry_id).toBe(unsaved_entry.id); + assert.strictEqual(editor_state.selected_history_entry_id, unsaved_entry.id); + assert.strictEqual(editor_state.unsaved_edit_entry_id, unsaved_entry.id); }); }); @@ -144,15 +143,15 @@ describe('history navigation', () => { editor_state.set_content_from_history(entry1.id); // Verify selection and content - expect(editor_state.selected_history_entry_id).toBe(entry1.id); - expect(editor_state.current_content).toBe('Entry 1'); + assert.strictEqual(editor_state.selected_history_entry_id, entry1.id); + assert.strictEqual(editor_state.current_content, 'Entry 1'); // Select second entry editor_state.set_content_from_history(entry2.id); // Verify selection and content updated - expect(editor_state.selected_history_entry_id).toBe(entry2.id); - expect(editor_state.current_content).toBe('Entry 2'); + assert.strictEqual(editor_state.selected_history_entry_id, entry2.id); + assert.strictEqual(editor_state.current_content, 'Entry 2'); }); test('set_content_from_history with unsaved edit sets unsaved_edit_entry_id', () => { @@ -164,8 +163,8 @@ describe('history navigation', () => { editor_state.set_content_from_history(unsaved_entry.id); // Verify both ids are set correctly - expect(editor_state.selected_history_entry_id).toBe(unsaved_entry.id); - expect(editor_state.unsaved_edit_entry_id).toBe(unsaved_entry.id); + assert.strictEqual(editor_state.selected_history_entry_id, unsaved_entry.id); + assert.strictEqual(editor_state.unsaved_edit_entry_id, unsaved_entry.id); }); test('set_content_from_history with saved entry clears unsaved_edit_entry_id', () => { @@ -175,14 +174,14 @@ describe('history navigation', () => { // First select an unsaved entry editor_state.current_content = 'Unsaved content'; - expect(editor_state.unsaved_edit_entry_id).not.toBeNull(); + assert.ok(editor_state.unsaved_edit_entry_id !== null); // Now select the saved entry editor_state.set_content_from_history(saved_entry.id); // Verify unsaved edit id is cleared - expect(editor_state.selected_history_entry_id).toBe(saved_entry.id); - expect(editor_state.unsaved_edit_entry_id).toBeNull(); + assert.strictEqual(editor_state.selected_history_entry_id, saved_entry.id); + assert.isNull(editor_state.unsaved_edit_entry_id); }); test('content_matching_entry_ids tracks entries with matching content', () => { @@ -193,17 +192,17 @@ describe('history navigation', () => { const entry3 = history.add_entry('Duplicate content'); // Initial check - current content doesn't match any entry - expect(editor_state.content_matching_entry_ids).not.toContain(entry1.id); - expect(editor_state.content_matching_entry_ids).not.toContain(entry2.id); - expect(editor_state.content_matching_entry_ids).not.toContain(entry3.id); + assert.notInclude(editor_state.content_matching_entry_ids, entry1.id); + assert.notInclude(editor_state.content_matching_entry_ids, entry2.id); + assert.notInclude(editor_state.content_matching_entry_ids, entry3.id); // Set content to match duplicates editor_state.current_content = 'Duplicate content'; // Verify matching entries are tracked - expect(editor_state.content_matching_entry_ids).toContain(entry2.id); - expect(editor_state.content_matching_entry_ids).toContain(entry3.id); - expect(editor_state.content_matching_entry_ids).not.toContain(entry1.id); + assert.include(editor_state.content_matching_entry_ids, entry2.id); + assert.include(editor_state.content_matching_entry_ids, entry3.id); + assert.notInclude(editor_state.content_matching_entry_ids, entry1.id); }); }); @@ -211,34 +210,34 @@ describe('saving history changes', () => { test('save_changes persists content and converts unsaved to saved', async () => { // Make an edit to create unsaved entry editor_state.current_content = 'Content to save'; - expect(editor_state.unsaved_edit_entry_id).not.toBeNull(); + assert.ok(editor_state.unsaved_edit_entry_id !== null); // Save changes await editor_state.save_changes(); // Verify the unsaved flag was cleared - expect(editor_state.unsaved_edit_entry_id).toBeNull(); + assert.isNull(editor_state.unsaved_edit_entry_id); // A new entry should be created with correct properties const history = app.get_diskfile_history(TEST_PATH)!; - expect(history.entries[0]!).toMatchObject({ + assert.include(history.entries[0]!, { content: 'Content to save', is_unsaved_edit: false, }); // Selection should point to the new entry - expect(editor_state.selected_history_entry_id).toBe(history.entries[0]!.id); + assert.strictEqual(editor_state.selected_history_entry_id, history.entries[0]!.id); }); test('save_changes with no changes returns false', async () => { // Don't make any changes - expect(editor_state.has_changes).toBe(false); + assert.ok(!editor_state.has_changes); // Try to save const result = await editor_state.save_changes(); // Verify nothing was saved - expect(result).toBe(false); + assert.ok(!result); }); test('save_changes updates the diskfile content', async () => { @@ -249,10 +248,10 @@ describe('saving history changes', () => { await editor_state.save_changes(); // Verify diskfile was updated - expect(test_diskfile.content).toBe('New saved content'); + assert.strictEqual(test_diskfile.content, 'New saved content'); // Verify last_seen_disk_content was updated - expect(editor_state.last_seen_disk_content).toBe('New saved content'); + assert.strictEqual(editor_state.last_seen_disk_content, 'New saved content'); }); }); @@ -274,20 +273,20 @@ describe('managing unsaved edits', () => { const unsaved2_id = editor_state.unsaved_edit_entry_id; // Verify both unsaved entries exist - expect(unsaved1_id).not.toBeNull(); - expect(unsaved2_id).not.toBeNull(); - expect(unsaved1_id).not.toBe(unsaved2_id); + assert.ok(unsaved1_id !== null); + assert.ok(unsaved2_id !== null); + assert.notStrictEqual(unsaved1_id, unsaved2_id); // Verify both entries in history - const unsaved1 = history.find_entry_by_id(unsaved1_id!); - const unsaved2 = history.find_entry_by_id(unsaved2_id!); + const unsaved1 = history.find_entry_by_id(unsaved1_id); + const unsaved2 = history.find_entry_by_id(unsaved2_id); - expect(unsaved1).toMatchObject({ + assert.include(unsaved1, { content: 'Modified 1', is_unsaved_edit: true, }); - expect(unsaved2).toMatchObject({ + assert.include(unsaved2, { content: 'Modified 2', is_unsaved_edit: true, }); @@ -308,10 +307,10 @@ describe('managing unsaved edits', () => { // Verify all unsaved entries are gone const unsaved_after = history.entries.filter((e) => e.is_unsaved_edit); - expect(unsaved_after.length).toBe(0); + assert.strictEqual(unsaved_after.length, 0); // Unsaved edit id should be cleared - expect(editor_state.unsaved_edit_entry_id).toBeNull(); + assert.isNull(editor_state.unsaved_edit_entry_id); }); test('clear_unsaved_edits updates selection when selected entry is removed', () => { @@ -320,13 +319,13 @@ describe('managing unsaved edits', () => { const unsaved_id = editor_state.unsaved_edit_entry_id; // Verify it's selected - expect(editor_state.selected_history_entry_id).toBe(unsaved_id); + assert.strictEqual(editor_state.selected_history_entry_id, unsaved_id); // Clear unsaved edits editor_state.clear_unsaved_edits(); // Selection should be updated to a valid entry or null - expect(editor_state.selected_history_entry_id).not.toBe(unsaved_id); + assert.notStrictEqual(editor_state.selected_history_entry_id, unsaved_id); }); }); @@ -342,16 +341,16 @@ describe('history clearing', () => { editor_state.clear_history(); // Only one entry should remain - expect(history.entries.length).toBe(1); - expect(history.entries[0]).toMatchObject({ + assert.strictEqual(history.entries.length, 1); + assert.include(history.entries[0], { id: newest.id, content: 'Newest entry', is_original_state: true, }); // Selection should be updated - expect(editor_state.selected_history_entry_id).toBe(newest.id); - expect(editor_state.unsaved_edit_entry_id).toBeNull(); + assert.strictEqual(editor_state.selected_history_entry_id, newest.id); + assert.isNull(editor_state.unsaved_edit_entry_id); }); test('clear_history preserves all unsaved edits', () => { @@ -376,13 +375,13 @@ describe('history clearing', () => { editor_state.clear_history(); // Verify the specific unsaved entries still exist - expect(history.find_entry_by_id(unsaved_entry1.id)).toMatchObject({ + assert.include(history.find_entry_by_id(unsaved_entry1.id), { content: 'Unsaved edit 1', is_unsaved_edit: true, label: 'Unsaved 1', }); - expect(history.find_entry_by_id(unsaved_entry2.id)).toMatchObject({ + assert.include(history.find_entry_by_id(unsaved_entry2.id), { content: 'Unsaved edit 2', is_unsaved_edit: true, label: 'Unsaved 2', @@ -390,13 +389,13 @@ describe('history clearing', () => { // Verify the newest non-unsaved entry was also preserved const newest_after_clear = history.entries.find((entry) => !entry.is_unsaved_edit); - expect(newest_after_clear).toMatchObject({ + assert.include(newest_after_clear, { content: 'Newest entry', is_original_state: true, }); // Verify the original entry was removed (since it's not the newest saved entry) const original_entry = history.entries.find((entry) => entry.content === TEST_CONTENT); - expect(original_entry).toBeUndefined(); + assert.ok(original_entry === undefined); }); }); diff --git a/src/test/diskfile_history.svelte.test.ts b/src/test/diskfile_history.svelte.test.ts index e5b0b7d5a..6335480b1 100644 --- a/src/test/diskfile_history.svelte.test.ts +++ b/src/test/diskfile_history.svelte.test.ts @@ -1,14 +1,13 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, beforeEach, describe} from 'vitest'; +import {test, beforeEach, describe, assert} from 'vitest'; import {DiskfileHistory} from '$lib/diskfile_history.svelte.js'; import {DiskfilePath} from '$lib/diskfile_types.js'; import {create_uuid} from '$lib/zod_helpers.js'; import {Frontend} from '$lib/frontend.svelte.js'; -import {monkeypatch_zzz_for_tests} from './test_helpers.ts'; + +import {monkeypatch_zzz_for_tests} from './test_helpers.js'; // Test data const TEST_PATH = DiskfilePath.parse('/path/to/file.txt'); @@ -35,10 +34,10 @@ describe('DiskfileHistory', () => { describe('initialization', () => { test('creates empty history state', () => { - expect(history.path).toBe(TEST_PATH); - expect(history.entries).toEqual([]); - expect(history.max_entries).toBe(100); - expect(history.current_entry).toBe(null); + assert.strictEqual(history.path, TEST_PATH); + assert.deepEqual(history.entries, []); + assert.strictEqual(history.max_entries, 100); + assert.isNull(history.current_entry); }); }); @@ -47,13 +46,13 @@ describe('DiskfileHistory', () => { const entry = history.add_entry(TEST_CONTENT); // Verify entry was created with proper structure - expect(history.entries.length).toBe(1); - expect(entry.content).toBe(TEST_CONTENT); - expect(entry.id).toBeDefined(); - expect(typeof entry.created).toBe('number'); - expect(entry.is_disk_change).toBe(false); - expect(entry.is_unsaved_edit).toBe(false); - expect(entry.is_original_state).toBe(false); + assert.strictEqual(history.entries.length, 1); + assert.strictEqual(entry.content, TEST_CONTENT); + assert.isDefined(entry.id); + assert.strictEqual(typeof entry.created, 'number'); + assert.ok(!entry.is_disk_change); + assert.ok(!entry.is_unsaved_edit); + assert.ok(!entry.is_original_state); }); test('add_entry with custom options sets all properties', () => { @@ -68,11 +67,11 @@ describe('DiskfileHistory', () => { }); // Verify all options were applied - expect(entry.created).toBe(custom_timestamp); - expect(entry.is_disk_change).toBe(true); - expect(entry.is_unsaved_edit).toBe(true); - expect(entry.is_original_state).toBe(true); - expect(entry.label).toBe('Custom Label'); + assert.strictEqual(entry.created, custom_timestamp); + assert.ok(entry.is_disk_change); + assert.ok(entry.is_unsaved_edit); + assert.ok(entry.is_original_state); + assert.strictEqual(entry.label, 'Custom Label'); }); test('add_entry skips duplicate content back-to-back', () => { @@ -83,9 +82,9 @@ describe('DiskfileHistory', () => { const duplicate = history.add_entry(TEST_CONTENT); // Verify no new entry was added and original was returned - expect(history.entries.length).toBe(1); - expect(duplicate).toEqual(first); - expect(duplicate.id).toBe(first.id); + assert.strictEqual(history.entries.length, 1); + assert.deepEqual(duplicate, first); + assert.strictEqual(duplicate.id, first.id); }); test('add_entry creates immutable entry array', () => { @@ -96,8 +95,8 @@ describe('DiskfileHistory', () => { history.add_entry(TEST_CONTENT); // Verify entries array was replaced, not mutated in place - expect(history.entries).not.toBe(initial_entries); - expect(history.entries.length).toBe(1); + assert.notStrictEqual(history.entries, initial_entries); + assert.strictEqual(history.entries.length, 1); }); }); @@ -113,10 +112,10 @@ describe('DiskfileHistory', () => { history.add_entry('content 1', {created: time1}); // Verify entries are sorted newest first - expect(history.entries.length).toBe(3); - expect(history.entries[0]!.content).toBe('content 3'); - expect(history.entries[1]!.content).toBe('content 2'); - expect(history.entries[2]!.content).toBe('content 1'); + assert.strictEqual(history.entries.length, 3); + assert.strictEqual(history.entries[0]!.content, 'content 3'); + assert.strictEqual(history.entries[1]!.content, 'content 2'); + assert.strictEqual(history.entries[2]!.content, 'content 1'); }); test('current_entry returns most recent entry', () => { @@ -125,12 +124,12 @@ describe('DiskfileHistory', () => { const latest = history.add_entry('latest entry'); // Verify current_entry points to most recent - expect(history.current_entry).toBe(history.entries[0]); - expect(history.current_entry).toEqual(latest); + assert.strictEqual(history.current_entry, history.entries[0]); + assert.deepEqual(history.current_entry, latest); }); test('current_entry is null when history is empty', () => { - expect(history.current_entry).toBe(null); + assert.isNull(history.current_entry); }); }); @@ -146,10 +145,10 @@ describe('DiskfileHistory', () => { history.add_entry('content 4', {created: Date.now()}); // Verify only the most recent entries were kept - expect(history.entries.length).toBe(3); - expect(history.entries[0]!.content).toBe('content 4'); - expect(history.entries[1]!.content).toBe('content 3'); - expect(history.entries[2]!.content).toBe('content 2'); + assert.strictEqual(history.entries.length, 3); + assert.strictEqual(history.entries[0]!.content, 'content 4'); + assert.strictEqual(history.entries[1]!.content, 'content 3'); + assert.strictEqual(history.entries[2]!.content, 'content 2'); }); test('add_entry correctly handles insertion with capacity limit', () => { @@ -169,9 +168,9 @@ describe('DiskfileHistory', () => { history.add_entry('oldest entry', {created: oldest_time}); // Verify correct entries were kept (newest two) - expect(history.entries.length).toBe(2); - expect(history.entries[0]!.content).toBe('newest entry'); - expect(history.entries[1]!.content).toBe('middle entry'); + assert.strictEqual(history.entries.length, 2); + assert.strictEqual(history.entries[0]!.content, 'newest entry'); + assert.strictEqual(history.entries[1]!.content, 'middle entry'); }); }); @@ -186,9 +185,9 @@ describe('DiskfileHistory', () => { const found = history.find_entry_by_id(entry2.id); // Verify the right entry was found - expect(found).toBeDefined(); - expect(found!.id).toBe(entry2.id); - expect(found!.content).toBe('content 2'); + assert.isDefined(found); + assert.strictEqual(found.id, entry2.id); + assert.strictEqual(found.content, 'content 2'); }); test('find_entry_by_id returns undefined for non-existent id', () => { @@ -201,7 +200,7 @@ describe('DiskfileHistory', () => { const result = history.find_entry_by_id(unknown_id); // Verify undefined is returned - expect(result).toBeUndefined(); + assert.ok(result === undefined); }); test('get_content returns content from entry', () => { @@ -212,7 +211,7 @@ describe('DiskfileHistory', () => { const content = history.get_content(entry.id); // Verify content was retrieved - expect(content).toBe('specific content'); + assert.strictEqual(content, 'specific content'); }); test('get_content returns null for non-existent id', () => { @@ -224,7 +223,7 @@ describe('DiskfileHistory', () => { const content = history.get_content(unknown_id); // Verify null is returned - expect(content).toBeNull(); + assert.isNull(content); }); }); @@ -236,26 +235,26 @@ describe('DiskfileHistory', () => { const newest = history.add_entry('newest content'); // Verify we have multiple entries - expect(history.entries.length).toBe(3); + assert.strictEqual(history.entries.length, 3); // Clear all except current history.clear_except_current(); // Verify only newest remains - expect(history.entries.length).toBe(1); - expect(history.entries[0]!.id).toBe(newest.id); - expect(history.entries[0]!.content).toBe('newest content'); + assert.strictEqual(history.entries.length, 1); + assert.strictEqual(history.entries[0]!.id, newest.id); + assert.strictEqual(history.entries[0]!.content, 'newest content'); }); test('clear_except_current handles empty history', () => { // Start with empty history - expect(history.entries.length).toBe(0); + assert.strictEqual(history.entries.length, 0); // Call clear - should not error history.clear_except_current(); // Should still be empty - expect(history.entries.length).toBe(0); + assert.strictEqual(history.entries.length, 0); }); test('clear_except_current with keep predicate preserves matching entries', () => { @@ -268,9 +267,9 @@ describe('DiskfileHistory', () => { history.clear_except_current((entry) => entry.is_original_state); // Verify newest and original entries were kept - expect(history.entries.length).toBe(2); - expect(history.entries[0]!.id).toBe(newest.id); - expect(history.entries[1]!.id).toBe(original.id); + assert.strictEqual(history.entries.length, 2); + assert.strictEqual(history.entries[0]!.id, newest.id); + assert.strictEqual(history.entries[1]!.id, original.id); }); test('clear_except_current with single entry does nothing', () => { @@ -281,8 +280,8 @@ describe('DiskfileHistory', () => { history.clear_except_current(); // Verify entry is still there - expect(history.entries.length).toBe(1); - expect(history.entries[0]!.id).toBe(entry.id); + assert.strictEqual(history.entries.length, 1); + assert.strictEqual(history.entries[0]!.id, entry.id); }); }); @@ -295,8 +294,8 @@ describe('DiskfileHistory', () => { const second = history.add_entry(TEST_CONTENT, {is_unsaved_edit: true}); // Both entries should be added since they represent different states - expect(history.entries.length).toBe(2); - expect(second.is_unsaved_edit).toBe(true); + assert.strictEqual(history.entries.length, 2); + assert.ok(second.is_unsaved_edit); }); test('complex editing workflow', () => { @@ -317,17 +316,17 @@ describe('DiskfileHistory', () => { const latest = history.add_entry('latest edit', {is_unsaved_edit: true}); // Verify state - expect(history.entries.length).toBe(5); - expect(history.current_entry).toEqual(latest); + assert.strictEqual(history.entries.length, 5); + assert.deepEqual(history.current_entry, latest); // Clear except saved and current history.clear_except_current((entry) => entry.is_disk_change); // Should have original, saved, and latest - expect(history.entries.length).toBe(3); - expect(history.entries[0]!.id).toBe(latest.id); - expect(history.entries[1]!.id).toBe(saved.id); - expect(history.entries[2]!.id).toBe(original.id); + assert.strictEqual(history.entries.length, 3); + assert.strictEqual(history.entries[0]!.id, latest.id); + assert.strictEqual(history.entries[1]!.id, saved.id); + assert.strictEqual(history.entries[2]!.id, original.id); }); }); }); diff --git a/src/test/diskfile_tabs.svelte.test.ts b/src/test/diskfile_tabs.svelte.test.ts index 31fee0b30..8b71de08d 100644 --- a/src/test/diskfile_tabs.svelte.test.ts +++ b/src/test/diskfile_tabs.svelte.test.ts @@ -1,12 +1,13 @@ // @vitest-environment jsdom -import {test, expect, beforeEach, describe} from 'vitest'; +import {test, beforeEach, describe, assert} from 'vitest'; import {DiskfileTabs} from '$lib/diskfile_tabs.svelte.js'; import {DiskfileTab} from '$lib/diskfile_tab.svelte.js'; import {create_uuid, UuidWithDefault} from '$lib/zod_helpers.js'; import {Frontend} from '$lib/frontend.svelte.js'; -import {monkeypatch_zzz_for_tests} from './test_helpers.ts'; + +import {monkeypatch_zzz_for_tests} from './test_helpers.js'; // Test data const TEST_DISKFILE_ID_1 = UuidWithDefault.parse(undefined); @@ -35,15 +36,15 @@ describe('DiskfileTabs', () => { describe('initialization', () => { test('creates empty tabs state', () => { - expect(tabs.selected_tab_id).toBe(null); - expect(tabs.preview_tab_id).toBe(null); - expect(tabs.tab_order).toEqual([]); - expect(tabs.items.size).toBe(0); - expect(tabs.ordered_tabs).toEqual([]); - expect(tabs.selected_tab).toBeUndefined(); - expect(tabs.preview_tab).toBeUndefined(); - expect(tabs.selected_diskfile_id).toBe(null); - expect(tabs.recently_closed_tabs).toEqual([]); + assert.isNull(tabs.selected_tab_id); + assert.isNull(tabs.preview_tab_id); + assert.deepEqual(tabs.tab_order, []); + assert.strictEqual(tabs.items.size, 0); + assert.deepEqual(tabs.ordered_tabs, []); + assert.ok(tabs.selected_tab === undefined); + assert.ok(tabs.preview_tab === undefined); + assert.isNull(tabs.selected_diskfile_id); + assert.deepEqual(tabs.recently_closed_tabs, []); }); }); @@ -51,12 +52,12 @@ describe('DiskfileTabs', () => { test('preview_diskfile creates a new preview tab', () => { const tab = tabs.preview_diskfile(TEST_DISKFILE_ID_1); - expect(tab).toBeInstanceOf(DiskfileTab); - expect(tab.diskfile_id).toBe(TEST_DISKFILE_ID_1); - expect(tabs.preview_tab_id).toBe(tab.id); - expect(tabs.selected_tab_id).toBe(tab.id); - expect(tabs.tab_order).toContain(tab.id); - expect(tabs.items.size).toBe(1); + assert.instanceOf(tab, DiskfileTab); + assert.strictEqual(tab.diskfile_id, TEST_DISKFILE_ID_1); + assert.strictEqual(tabs.preview_tab_id, tab.id); + assert.strictEqual(tabs.selected_tab_id, tab.id); + assert.include(tabs.tab_order, tab.id); + assert.strictEqual(tabs.items.size, 1); }); test('preview_diskfile reuses existing tab', () => { @@ -67,8 +68,8 @@ describe('DiskfileTabs', () => { const tab2 = tabs.preview_diskfile(TEST_DISKFILE_ID_1); // Should return the existing tab, not create a new one - expect(tab2).toBe(tab1); - expect(tabs.items.size).toBe(1); + assert.strictEqual(tab2, tab1); + assert.strictEqual(tabs.items.size, 1); }); test('preview_diskfile reuses existing preview tab for new file', () => { @@ -79,21 +80,21 @@ describe('DiskfileTabs', () => { const result = tabs.preview_diskfile(TEST_DISKFILE_ID_2); // Should reuse the same preview tab but update its content - expect(result).toBe(preview_tab); - expect(tabs.preview_tab_id).toBe(preview_tab.id); - expect(preview_tab.diskfile_id).toBe(TEST_DISKFILE_ID_2); - expect(tabs.items.size).toBe(1); + assert.strictEqual(result, preview_tab); + assert.strictEqual(tabs.preview_tab_id, preview_tab.id); + assert.strictEqual(preview_tab.diskfile_id, TEST_DISKFILE_ID_2); + assert.strictEqual(tabs.items.size, 1); }); test('open_diskfile creates a permanent tab', () => { const tab = tabs.open_diskfile(TEST_DISKFILE_ID_1); - expect(tab).toBeInstanceOf(DiskfileTab); - expect(tab.diskfile_id).toBe(TEST_DISKFILE_ID_1); - expect(tab.is_preview).toBe(false); - expect(tabs.preview_tab_id).toBe(null); - expect(tabs.selected_tab_id).toBe(tab.id); - expect(tabs.tab_order).toContain(tab.id); + assert.instanceOf(tab, DiskfileTab); + assert.strictEqual(tab.diskfile_id, TEST_DISKFILE_ID_1); + assert.ok(!tab.is_preview); + assert.isNull(tabs.preview_tab_id); + assert.strictEqual(tabs.selected_tab_id, tab.id); + assert.include(tabs.tab_order, tab.id); }); test('open_diskfile reuses existing tab', () => { @@ -104,8 +105,8 @@ describe('DiskfileTabs', () => { const tab2 = tabs.open_diskfile(TEST_DISKFILE_ID_1); // Should return the existing tab, not create a new one - expect(tab2).toBe(tab1); - expect(tabs.items.size).toBe(1); + assert.strictEqual(tab2, tab1); + assert.strictEqual(tabs.items.size, 1); }); test('open_diskfile promotes preview tab to permanent', () => { @@ -116,9 +117,9 @@ describe('DiskfileTabs', () => { const permanent_tab = tabs.open_diskfile(TEST_DISKFILE_ID_1); // Should use the same tab but make it permanent - expect(permanent_tab).toBe(preview_tab); - expect(tabs.preview_tab_id).toBe(null); - expect(tabs.items.size).toBe(1); + assert.strictEqual(permanent_tab, preview_tab); + assert.isNull(tabs.preview_tab_id); + assert.strictEqual(tabs.items.size, 1); }); test('open_diskfile repurposes existing preview tab', () => { @@ -129,39 +130,39 @@ describe('DiskfileTabs', () => { const permanent_tab = tabs.open_diskfile(TEST_DISKFILE_ID_2); // Should repurpose the preview tab - expect(permanent_tab).toBe(preview_tab); - expect(permanent_tab.diskfile_id).toBe(TEST_DISKFILE_ID_2); - expect(tabs.preview_tab_id).toBe(null); - expect(tabs.items.size).toBe(1); + assert.strictEqual(permanent_tab, preview_tab); + assert.strictEqual(permanent_tab.diskfile_id, TEST_DISKFILE_ID_2); + assert.isNull(tabs.preview_tab_id); + assert.strictEqual(tabs.items.size, 1); }); test('open_diskfile replaces preview tab resulting in 2 tabs total', () => { // Create a permanent tab first const tab1 = tabs.open_diskfile(TEST_DISKFILE_ID_1); - expect(tabs.items.size).toBe(1); - expect(tabs.preview_tab_id).toBe(null); + assert.strictEqual(tabs.items.size, 1); + assert.isNull(tabs.preview_tab_id); // Create a preview tab for a different file const preview_tab = tabs.preview_diskfile(TEST_DISKFILE_ID_2); - expect(tabs.items.size).toBe(2); - expect(tabs.preview_tab_id).toBe(preview_tab.id); - expect(preview_tab.is_preview).toBe(true); + assert.strictEqual(tabs.items.size, 2); + assert.strictEqual(tabs.preview_tab_id, preview_tab.id); + assert.ok(preview_tab.is_preview); // Open a third file, which should repurpose the preview tab const tab3 = tabs.open_diskfile(TEST_DISKFILE_ID_3); // Verify the preview tab was repurposed, not creating a third tab - expect(tabs.items.size).toBe(2); - expect(tab3).toBe(preview_tab); // Should be the same tab object - expect(tab3.diskfile_id).toBe(TEST_DISKFILE_ID_3); // But with the new diskfile id - expect(tabs.preview_tab_id).toBe(null); // No preview tab now + assert.strictEqual(tabs.items.size, 2); + assert.strictEqual(tab3, preview_tab); // Should be the same tab object + assert.strictEqual(tab3.diskfile_id, TEST_DISKFILE_ID_3); // But with the new diskfile id + assert.isNull(tabs.preview_tab_id); // No preview tab now // Verify tab ids are different - expect(tab1.id).not.toBe(tab3.id); + assert.notStrictEqual(tab1.id, tab3.id); // Verify tabs have the right content - expect(tab1.diskfile_id).toBe(TEST_DISKFILE_ID_1); - expect(tab3.diskfile_id).toBe(TEST_DISKFILE_ID_3); + assert.strictEqual(tab1.diskfile_id, TEST_DISKFILE_ID_1); + assert.strictEqual(tab3.diskfile_id, TEST_DISKFILE_ID_3); }); }); @@ -178,9 +179,9 @@ describe('DiskfileTabs', () => { const preview_tab = tabs.preview_diskfile(TEST_DISKFILE_ID_3); // Verify the order: tab1, preview_tab, tab2 - expect(tabs.tab_order[0]!).toBe(tab1.id); - expect(tabs.tab_order[1]!).toBe(preview_tab.id); - expect(tabs.tab_order[2]!).toBe(tab2.id); + assert.strictEqual(tabs.tab_order[0]!, tab1.id); + assert.strictEqual(tabs.tab_order[1]!, preview_tab.id); + assert.strictEqual(tabs.tab_order[2]!, tab2.id); }); test('positioning with additional preview tabs', () => { @@ -195,20 +196,20 @@ describe('DiskfileTabs', () => { const preview = tabs.preview_diskfile(TEST_DISKFILE_ID_3); // Expected order: tab1, preview, tab2 - expect(tabs.tab_order[0]!).toBe(tab1.id); - expect(tabs.tab_order[1]!).toBe(preview.id); - expect(tabs.tab_order[2]!).toBe(tab2.id); + assert.strictEqual(tabs.tab_order[0]!, tab1.id); + assert.strictEqual(tabs.tab_order[1]!, preview.id); + assert.strictEqual(tabs.tab_order[2]!, tab2.id); // Select tab2 and create another preview (reusing the existing one) tabs.select_tab(tab2.id); const preview2 = tabs.preview_diskfile(TEST_DISKFILE_ID_4); // The preview tab should move after tab2 - expect(tabs.tab_order[0]!).toBe(tab1.id); - expect(tabs.tab_order[1]!).toBe(tab2.id); - expect(tabs.tab_order[2]!).toBe(preview.id); - expect(preview2).toBe(preview); // Same tab instance - expect(preview2.diskfile_id).toBe(TEST_DISKFILE_ID_4); + assert.strictEqual(tabs.tab_order[0]!, tab1.id); + assert.strictEqual(tabs.tab_order[1]!, tab2.id); + assert.strictEqual(tabs.tab_order[2]!, preview.id); + assert.strictEqual(preview2, preview); // Same tab instance + assert.strictEqual(preview2.diskfile_id, TEST_DISKFILE_ID_4); }); test('preview tab positioning bug fix - selecting existing preview should not reorder', () => { @@ -228,7 +229,7 @@ describe('DiskfileTabs', () => { const preview_tab = tabs.preview_diskfile(TEST_DISKFILE_ID_3); // Order should be: tab1, preview_tab, tab2 - expect(tabs.tab_order).toEqual([tab1.id, preview_tab.id, tab2.id]); + assert.deepEqual(tabs.tab_order, [tab1.id, preview_tab.id, tab2.id]); // Select tab2 tabs.select_tab(tab2.id); @@ -237,7 +238,7 @@ describe('DiskfileTabs', () => { tabs.select_tab(preview_tab.id); // Order should NOT change - the preview tab should stay in its position - expect(tabs.tab_order).toEqual([tab1.id, preview_tab.id, tab2.id]); + assert.deepEqual(tabs.tab_order, [tab1.id, preview_tab.id, tab2.id]); // Select tab1 again tabs.select_tab(tab1.id); @@ -246,7 +247,7 @@ describe('DiskfileTabs', () => { tabs.select_tab(preview_tab.id); // Order should still not change - expect(tabs.tab_order).toEqual([tab1.id, preview_tab.id, tab2.id]); + assert.deepEqual(tabs.tab_order, [tab1.id, preview_tab.id, tab2.id]); }); test('preview tab repositioning only happens when content changes', () => { @@ -260,17 +261,17 @@ describe('DiskfileTabs', () => { const preview = tabs.preview_diskfile(TEST_DISKFILE_ID_4); // Order: tab1, preview, tab2, tab3 - expect(tabs.tab_order).toEqual([tab1.id, preview.id, tab2.id, tab3.id]); + assert.deepEqual(tabs.tab_order, [tab1.id, preview.id, tab2.id, tab3.id]); // Select tab3 tabs.select_tab(tab3.id); // Preview the same file - should NOT reposition tabs.preview_diskfile(TEST_DISKFILE_ID_4); - expect(tabs.tab_order).toEqual([tab1.id, preview.id, tab2.id, tab3.id]); + assert.deepEqual(tabs.tab_order, [tab1.id, preview.id, tab2.id, tab3.id]); tabs.preview_diskfile(TEST_DISKFILE_ID_5); - expect(tabs.tab_order).toEqual([tab1.id, preview.id, tab2.id, tab3.id]); + assert.deepEqual(tabs.tab_order, [tab1.id, preview.id, tab2.id, tab3.id]); }); test('reorder_tabs changes tab order', () => { @@ -280,18 +281,18 @@ describe('DiskfileTabs', () => { const tab3 = tabs.open_diskfile(TEST_DISKFILE_ID_3); // Initial order - expect(tabs.tab_order).toEqual([tab1.id, tab2.id, tab3.id]); + assert.deepEqual(tabs.tab_order, [tab1.id, tab2.id, tab3.id]); // Reorder: move tab1 to position 2 tabs.reorder_tabs(0, 2); // New order should be: tab2, tab3, tab1 - expect(tabs.tab_order).toEqual([tab2.id, tab3.id, tab1.id]); + assert.deepEqual(tabs.tab_order, [tab2.id, tab3.id, tab1.id]); // ordered_tabs should reflect the new order - expect(tabs.ordered_tabs[0]!.id).toBe(tab2.id); - expect(tabs.ordered_tabs[1]!.id).toBe(tab3.id); - expect(tabs.ordered_tabs[2]!.id).toBe(tab1.id); + assert.strictEqual(tabs.ordered_tabs[0]!.id, tab2.id); + assert.strictEqual(tabs.ordered_tabs[1]!.id, tab3.id); + assert.strictEqual(tabs.ordered_tabs[2]!.id, tab1.id); }); }); @@ -304,10 +305,10 @@ describe('DiskfileTabs', () => { // Select the second tab tabs.select_tab(tab2.id); - expect(tabs.selected_tab_id).toBe(tab2.id); - expect(tabs.selected_tab).toBe(tab2); - expect(tabs.selected_tab?.is_selected).toBe(true); - expect(tabs.selected_diskfile_id).toBe(tab2.diskfile_id); + assert.strictEqual(tabs.selected_tab_id, tab2.id); + assert.strictEqual(tabs.selected_tab, tab2); + assert.ok(tabs.selected_tab?.is_selected); + assert.strictEqual(tabs.selected_diskfile_id, tab2.diskfile_id); }); }); @@ -319,11 +320,11 @@ describe('DiskfileTabs', () => { // Close it tabs.close_tab(tab.id); - expect(tabs.items.size).toBe(0); - expect(tabs.tab_order).not.toContain(tab.id); - expect(tabs.selected_tab_id).toBe(null); - expect(tabs.recently_closed_tabs).toHaveLength(1); - expect(tabs.recently_closed_tabs[0]!.id).toBe(tab.id); + assert.strictEqual(tabs.items.size, 0); + assert.notInclude(tabs.tab_order, tab.id); + assert.isNull(tabs.selected_tab_id); + assert.strictEqual(tabs.recently_closed_tabs.length, 1); + assert.strictEqual(tabs.recently_closed_tabs[0]!.id, tab.id); }); test('close_tab with multiple tabs selects the most recently opened tab', () => { @@ -332,16 +333,16 @@ describe('DiskfileTabs', () => { const tab2 = tabs.open_diskfile(TEST_DISKFILE_ID_2); const tab3 = tabs.open_diskfile(TEST_DISKFILE_ID_3); - expect(tabs.items.size).toBe(3); + assert.strictEqual(tabs.items.size, 3); // Select and close the middle tab tabs.select_tab(tab2.id); tabs.close_tab(tab2.id); - expect(tabs.items.size).toBe(2); - expect(tabs.selected_tab_id).toBe(tab3.id); // Should select the most recently opened (tab3) - expect(tabs.recently_closed_tabs).toHaveLength(1); - expect(tabs.recently_closed_tabs[0]!.id).toBe(tab2.id); + assert.strictEqual(tabs.items.size, 2); + assert.strictEqual(tabs.selected_tab_id, tab3.id); // Should select the most recently opened (tab3) + assert.strictEqual(tabs.recently_closed_tabs.length, 1); + assert.strictEqual(tabs.recently_closed_tabs[0]!.id, tab2.id); }); test('close_tab does nothing for non-existent tab', () => { @@ -356,9 +357,9 @@ describe('DiskfileTabs', () => { tabs.close_tab(create_uuid()); // State should be unchanged - expect(tabs.items.size).toBe(initial_size); - expect(tabs.selected_tab_id).toBe(initial_selected); - expect(tabs.recently_closed_tabs).toHaveLength(0); + assert.strictEqual(tabs.items.size, initial_size); + assert.strictEqual(tabs.selected_tab_id, initial_selected); + assert.strictEqual(tabs.recently_closed_tabs.length, 0); }); test('close_tab clears preview_tab_id if closing preview tab', () => { @@ -368,7 +369,7 @@ describe('DiskfileTabs', () => { // Close it tabs.close_tab(preview_tab.id); - expect(tabs.preview_tab_id).toBe(null); + assert.isNull(tabs.preview_tab_id); }); test('close_tab selects next tab when available', () => { @@ -384,7 +385,7 @@ describe('DiskfileTabs', () => { tabs.close_tab(tab2.id); // Should select the tab that was after it (tab3) - expect(tabs.selected_tab_id).toBe(tab3.id); + assert.strictEqual(tabs.selected_tab_id, tab3.id); }); test('close_tab selects previous tab when closing last tab', () => { @@ -399,7 +400,7 @@ describe('DiskfileTabs', () => { tabs.close_tab(tab2.id); // Should select the previous tab - expect(tabs.selected_tab_id).toBe(tab1.id); + assert.strictEqual(tabs.selected_tab_id, tab1.id); }); test('close_all_tabs clears all tabs and state', () => { @@ -416,20 +417,20 @@ describe('DiskfileTabs', () => { const permanent = tabs.open_diskfile(TEST_DISKFILE_ID_3); // Initial state - verify we have 3 tabs before closing - expect(tabs.items.size).toBe(3); - expect(tabs.tab_order).toHaveLength(3); - expect(tabs.preview_tab_id).toBe(null); // No preview tab now since last one is permanent - expect(tabs.selected_tab_id).toBe(permanent.id); + assert.strictEqual(tabs.items.size, 3); + assert.strictEqual(tabs.tab_order.length, 3); + assert.isNull(tabs.preview_tab_id); // No preview tab now since last one is permanent + assert.strictEqual(tabs.selected_tab_id, permanent.id); // Close all tabs.close_all_tabs(); // All state should be cleared - expect(tabs.items.size).toBe(0); - expect(tabs.tab_order).toHaveLength(0); - expect(tabs.preview_tab_id).toBe(null); - expect(tabs.selected_tab_id).toBe(null); - expect(tabs.recently_closed_tabs).toHaveLength(3); + assert.strictEqual(tabs.items.size, 0); + assert.strictEqual(tabs.tab_order.length, 0); + assert.isNull(tabs.preview_tab_id); + assert.isNull(tabs.selected_tab_id); + assert.strictEqual(tabs.recently_closed_tabs.length, 3); }); }); @@ -438,15 +439,15 @@ describe('DiskfileTabs', () => { // Create a preview tab const preview_tab = tabs.preview_diskfile(TEST_DISKFILE_ID_1); - expect(tabs.preview_tab_id).toBe(preview_tab.id); - expect(preview_tab.is_preview).toBe(true); + assert.strictEqual(tabs.preview_tab_id, preview_tab.id); + assert.ok(preview_tab.is_preview); // Promote it const result = tabs.promote_preview_to_permanent(); - expect(result).toBe(true); - expect(tabs.preview_tab_id).toBe(null); - expect(preview_tab.is_preview).toBe(false); + assert.ok(result); + assert.isNull(tabs.preview_tab_id); + assert.ok(!preview_tab.is_preview); }); test('promote_preview_to_permanent returns false if no preview tab', () => { @@ -456,19 +457,19 @@ describe('DiskfileTabs', () => { // Try to promote const result = tabs.promote_preview_to_permanent(); - expect(result).toBe(false); + assert.ok(!result); }); test('open_tab makes a preview tab permanent', () => { // Create a preview tab const preview_tab = tabs.preview_diskfile(TEST_DISKFILE_ID_1); - expect(tabs.preview_tab_id).toBe(preview_tab.id); + assert.strictEqual(tabs.preview_tab_id, preview_tab.id); // Make it permanent tabs.open_tab(preview_tab.id); - expect(tabs.preview_tab_id).toBe(null); + assert.isNull(tabs.preview_tab_id); }); test('open_tab does nothing for permanent tab', () => { @@ -476,13 +477,13 @@ describe('DiskfileTabs', () => { const tab = tabs.open_diskfile(TEST_DISKFILE_ID_1); // Initial state - expect(tabs.preview_tab_id).toBe(null); + assert.isNull(tabs.preview_tab_id); // Try to make it permanent again tabs.open_tab(tab.id); // State should be unchanged - expect(tabs.preview_tab_id).toBe(null); + assert.isNull(tabs.preview_tab_id); }); }); @@ -498,22 +499,22 @@ describe('DiskfileTabs', () => { // Reopen the last closed (tab2) tabs.reopen_last_closed_tab(); - expect(tabs.items.size).toBe(1); - expect(tabs.items.by_id.values().next().value?.diskfile_id).toBe(TEST_DISKFILE_ID_2); - expect(tabs.recently_closed_tabs).toHaveLength(1); + assert.strictEqual(tabs.items.size, 1); + assert.strictEqual(tabs.items.by_id.values().next().value?.diskfile_id, TEST_DISKFILE_ID_2); + assert.strictEqual(tabs.recently_closed_tabs.length, 1); }); test('reopen_last_closed_tab does nothing if no closed tabs', () => { // Initial state - expect(tabs.recently_closed_tabs).toHaveLength(0); - expect(tabs.items.size).toBe(0); + assert.strictEqual(tabs.recently_closed_tabs.length, 0); + assert.strictEqual(tabs.items.size, 0); // Try to reopen tabs.reopen_last_closed_tab(); // State should be unchanged - expect(tabs.recently_closed_tabs).toHaveLength(0); - expect(tabs.items.size).toBe(0); + assert.strictEqual(tabs.recently_closed_tabs.length, 0); + assert.strictEqual(tabs.items.size, 0); }); test('reopen_last_closed_tab restores selection state', () => { @@ -525,14 +526,14 @@ describe('DiskfileTabs', () => { tabs.close_tab(tab.id); // Initial state after closing - expect(tabs.selected_tab_id).toBe(null); + assert.isNull(tabs.selected_tab_id); // Reopen it tabs.reopen_last_closed_tab(); // Tab should be reopened and selected - expect(tabs.selected_tab_id).not.toBe(null); - expect(tabs.items.size).toBe(1); + assert.notStrictEqual(tabs.selected_tab_id, null); + assert.strictEqual(tabs.items.size, 1); }); test('reopen_last_closed_tab maintains proper tab order', () => { @@ -543,25 +544,25 @@ describe('DiskfileTabs', () => { // Remember the original order const original_order = [...tabs.tab_order]; - expect(original_order).toEqual([tab1.id, tab2.id, tab3.id]); + assert.deepEqual(original_order, [tab1.id, tab2.id, tab3.id]); // Close the middle tab tabs.close_tab(tab2.id); // Make sure it's gone - expect(tabs.items.by_id.has(tab2.id)).toBe(false); - expect(tabs.tab_order).not.toContain(tab2.id); + assert.ok(!tabs.items.by_id.has(tab2.id)); + assert.notInclude(tabs.tab_order, tab2.id); // Verify the new order is tab1, tab3 - expect(tabs.tab_order).toEqual([tab1.id, tab3.id]); + assert.deepEqual(tabs.tab_order, [tab1.id, tab3.id]); // Reopen the tab tabs.reopen_last_closed_tab(); // Verify it was added at the end of the tab order const reopened_tab_id = tabs.selected_tab_id; - expect(tabs.tab_order).toContain(reopened_tab_id); - expect(tabs.tab_order[tabs.tab_order.length - 1]).toBe(reopened_tab_id); + assert.include(tabs.tab_order, reopened_tab_id); + assert.strictEqual(tabs.tab_order[tabs.tab_order.length - 1], reopened_tab_id); }); }); @@ -573,17 +574,17 @@ describe('DiskfileTabs', () => { const tab3 = tabs.open_diskfile(TEST_DISKFILE_ID_3); // Initial history should have tab3 (most recently opened) - expect(tabs.recent_tabs[0]!.id).toBe(tab3.id); + assert.strictEqual(tabs.recent_tabs[0]!.id, tab3.id); // Select tab1, should move to front of history tabs.select_tab(tab1.id); - expect(tabs.recent_tabs[0]!.id).toBe(tab1.id); + assert.strictEqual(tabs.recent_tabs[0]!.id, tab1.id); // Select tab2, should move to front of history tabs.select_tab(tab2.id); - expect(tabs.recent_tabs[0]!.id).toBe(tab2.id); - expect(tabs.recent_tabs[1]!.id).toBe(tab1.id); - expect(tabs.recent_tabs[2]!.id).toBe(tab3.id); + assert.strictEqual(tabs.recent_tabs[0]!.id, tab2.id); + assert.strictEqual(tabs.recent_tabs[1]!.id, tab1.id); + assert.strictEqual(tabs.recent_tabs[2]!.id, tab3.id); }); test('maintains history when reopening tabs', () => { @@ -595,8 +596,8 @@ describe('DiskfileTabs', () => { tabs.reopen_last_closed_tab(); // Verify the reopened tab is in history - expect(tabs.recent_tabs).toHaveLength(1); - expect(tabs.recent_tabs[0]!.id).toBe(tabs.selected_tab_id); + assert.strictEqual(tabs.recent_tabs.length, 1); + assert.strictEqual(tabs.recent_tabs[0]!.id, tabs.selected_tab_id); }); test('limits history to max size', () => { @@ -609,7 +610,7 @@ describe('DiskfileTabs', () => { } // Verify history is limited to max size - expect(tabs.recent_tabs).toHaveLength(3); + assert.strictEqual(tabs.recent_tabs.length, 3); }); test('removes closed tabs from history', () => { @@ -618,15 +619,15 @@ describe('DiskfileTabs', () => { const tab2 = tabs.open_diskfile(TEST_DISKFILE_ID_2); // Verify both tabs are in history through their ids - expect(tabs.recent_tab_ids).toContain(tab1.id); - expect(tabs.recent_tab_ids).toContain(tab2.id); + assert.include(tabs.recent_tab_ids, tab1.id); + assert.include(tabs.recent_tab_ids, tab2.id); // Close tab1 tabs.close_tab(tab1.id); // Verify tab1 is removed from history - expect(tabs.recent_tab_ids).not.toContain(tab1.id); - expect(tabs.recent_tab_ids).toContain(tab2.id); + assert.notInclude(tabs.recent_tab_ids, tab1.id); + assert.include(tabs.recent_tab_ids, tab2.id); }); test('clears history when closing all tabs', () => { @@ -635,13 +636,13 @@ describe('DiskfileTabs', () => { tabs.open_diskfile(TEST_DISKFILE_ID_2); // Verify history is not empty - expect(tabs.recent_tabs.length).toBeGreaterThan(0); + assert.ok(tabs.recent_tabs.length > 0); // Close all tabs tabs.close_all_tabs(); // Verify history is cleared - expect(tabs.recent_tabs).toEqual([]); + assert.deepEqual(tabs.recent_tabs, []); }); }); @@ -662,7 +663,7 @@ describe('DiskfileTabs', () => { tabs.close_tab(tab1.id); // The most recently used tab (tab3) should be selected - expect(tabs.selected_tab_id).toBe(tab3.id); + assert.strictEqual(tabs.selected_tab_id, tab3.id); }); test('falls back to next tab when no history available', () => { @@ -684,7 +685,7 @@ describe('DiskfileTabs', () => { tabs.close_tab(tab2.id); // Should fall back to the next tab (tab3) - expect(tabs.selected_tab_id).toBe(tab3.id); + assert.strictEqual(tabs.selected_tab_id, tab3.id); }); test('selecting a non-existent tab in history is handled gracefully', () => { @@ -698,7 +699,7 @@ describe('DiskfileTabs', () => { tabs.close_tab(tab2.id); // Should select tab1 since it's the only one left - expect(tabs.selected_tab_id).toBe(tab1.id); + assert.strictEqual(tabs.selected_tab_id, tab1.id); }); test('find_most_recent_tab correctly finds valid tabs', () => { @@ -714,7 +715,7 @@ describe('DiskfileTabs', () => { const result = tabs.find_most_recent_tab(tab2.id); // Should return tab1 - expect(result).toBe(tab1.id); + assert.strictEqual(result, tab1.id); }); test('tab history preserves references after tab modifications', () => { @@ -727,8 +728,8 @@ describe('DiskfileTabs', () => { tabs.select_tab(tab2.id); // Verify initial history state - expect(tabs.recent_tabs[0]!.id).toBe(tab2.id); - expect(tabs.recent_tabs[1]!.id).toBe(tab1.id); + assert.strictEqual(tabs.recent_tabs[0]!.id, tab2.id); + assert.strictEqual(tabs.recent_tabs[1]!.id, tab1.id); // Store tabs for reference before closing const tab1_diskfile_id = tab1.diskfile_id; @@ -737,8 +738,8 @@ describe('DiskfileTabs', () => { tabs.close_tab(tab2.id); // Check history - tab1 should still be accessible - expect(tabs.recent_tabs[0]!.id).toBe(tab1.id); - expect(tabs.recent_tabs[0]!.diskfile_id).toBe(tab1_diskfile_id); + assert.strictEqual(tabs.recent_tabs[0]!.id, tab1.id); + assert.strictEqual(tabs.recent_tabs[0]!.diskfile_id, tab1_diskfile_id); }); }); @@ -752,9 +753,9 @@ describe('DiskfileTabs', () => { const result = tabs.navigate_to_tab(tab2.id); // Should select tab2 directly - expect(result.resulting_tab_id).toBe(tab2.id); - expect(result.created_preview).toBe(false); - expect(tabs.selected_tab_id).toBe(tab2.id); + assert.strictEqual(result.resulting_tab_id, tab2.id); + assert.ok(!result.created_preview); + assert.strictEqual(tabs.selected_tab_id, tab2.id); }); test('navigate_to_tab creates preview tab for closed tab', () => { @@ -767,14 +768,14 @@ describe('DiskfileTabs', () => { const result = tabs.navigate_to_tab(tab_id); // Should create a preview tab for the same diskfile - expect(result.created_preview).toBe(true); - expect(result.resulting_tab_id).not.toBe(tab_id); // Should be a different tab id - expect(tabs.selected_tab_id).toBe(result.resulting_tab_id); - expect(tabs.preview_tab_id).toBe(result.resulting_tab_id); + assert.ok(result.created_preview); + assert.notStrictEqual(result.resulting_tab_id, tab_id); // Should be a different tab id + assert.strictEqual(tabs.selected_tab_id, result.resulting_tab_id); + assert.strictEqual(tabs.preview_tab_id, result.resulting_tab_id); // Should have the same diskfile const new_tab = tabs.items.by_id.get(result.resulting_tab_id!); - expect(new_tab?.diskfile_id).toBe(TEST_DISKFILE_ID_1); + assert.strictEqual(new_tab?.diskfile_id, TEST_DISKFILE_ID_1); }); test('navigate_to_tab creates a new preview tab for closed tab', () => { @@ -792,16 +793,19 @@ describe('DiskfileTabs', () => { const result = tabs.navigate_to_tab(closed_tab_id); // A new preview tab should be created for the closed tab's file - expect(result.created_preview).toBe(true); - expect(result.resulting_tab_id).not.toBeNull(); - expect(tabs.preview_tab_id).not.toBeNull(); + assert.ok(result.created_preview); + assert.ok(result.resulting_tab_id !== null); + assert.ok(tabs.preview_tab_id !== null); // The new preview tab should be different from the original one - expect(tabs.preview_tab_id).not.toBe(preview_tab_id); + assert.notStrictEqual(tabs.preview_tab_id, preview_tab_id); // But it should have the closed tab's diskfile if (tabs.preview_tab_id && tabs.items.by_id.get(tabs.preview_tab_id)) { - expect(tabs.items.by_id.get(tabs.preview_tab_id)?.diskfile_id).toBe(TEST_DISKFILE_ID_3); + assert.strictEqual( + tabs.items.by_id.get(tabs.preview_tab_id)?.diskfile_id, + TEST_DISKFILE_ID_3, + ); } }); @@ -813,8 +817,8 @@ describe('DiskfileTabs', () => { const result = tabs.navigate_to_tab(UuidWithDefault.parse(undefined)); // Should return null without changing selection - expect(result.resulting_tab_id).toBe(null); - expect(result.created_preview).toBe(false); + assert.isNull(result.resulting_tab_id); + assert.ok(!result.created_preview); }); test('closed tabs are remembered even after closing all tabs', () => { @@ -830,9 +834,9 @@ describe('DiskfileTabs', () => { const result = tabs.navigate_to_tab(tab1_id); // Should create a preview tab for the correct diskfile - expect(result.created_preview).toBe(true); + assert.ok(result.created_preview); const new_tab = tabs.items.by_id.get(result.resulting_tab_id!); - expect(new_tab?.diskfile_id).toBe(TEST_DISKFILE_ID_1); + assert.strictEqual(new_tab?.diskfile_id, TEST_DISKFILE_ID_1); }); }); @@ -846,44 +850,44 @@ describe('DiskfileTabs', () => { tabs.tab_order = [tab1.id]; // ordered_tabs should include both tabs - expect(tabs.ordered_tabs).toHaveLength(2); - expect(tabs.ordered_tabs[0]!.id).toBe(tab1.id); - expect(tabs.ordered_tabs[1]!.id).toBe(tab2.id); + assert.strictEqual(tabs.ordered_tabs.length, 2); + assert.strictEqual(tabs.ordered_tabs[0]!.id, tab1.id); + assert.strictEqual(tabs.ordered_tabs[1]!.id, tab2.id); }); test('complex tab workflow', () => { // Create a preview tab const preview = tabs.preview_diskfile(TEST_DISKFILE_ID_1); - expect(tabs.preview_tab_id).toBe(preview.id); - expect(tabs.selected_tab_id).toBe(preview.id); + assert.strictEqual(tabs.preview_tab_id, preview.id); + assert.strictEqual(tabs.selected_tab_id, preview.id); // Create a permanent tab const permanent = tabs.open_diskfile(TEST_DISKFILE_ID_2); - expect(tabs.preview_tab_id).toBe(null); // Preview was repurposed - expect(tabs.selected_tab_id).toBe(permanent.id); + assert.isNull(tabs.preview_tab_id); // Preview was repurposed + assert.strictEqual(tabs.selected_tab_id, permanent.id); // Create another preview tab const preview2 = tabs.preview_diskfile(TEST_DISKFILE_ID_3); - expect(tabs.preview_tab_id).toBe(preview2.id); - expect(tabs.selected_tab_id).toBe(preview2.id); + assert.strictEqual(tabs.preview_tab_id, preview2.id); + assert.strictEqual(tabs.selected_tab_id, preview2.id); // Select the permanent tab tabs.select_tab(permanent.id); - expect(tabs.selected_tab_id).toBe(permanent.id); - expect(tabs.preview_tab_id).toBe(preview2.id); // Preview status unchanged + assert.strictEqual(tabs.selected_tab_id, permanent.id); + assert.strictEqual(tabs.preview_tab_id, preview2.id); // Preview status unchanged // Close the permanent tab tabs.close_tab(permanent.id); - expect(tabs.selected_tab_id).toBe(preview2.id); // First remaining tab selected + assert.strictEqual(tabs.selected_tab_id, preview2.id); // First remaining tab selected // Promote the preview tab tabs.promote_preview_to_permanent(); - expect(tabs.preview_tab_id).toBe(null); - expect(tabs.selected_tab_id).toBe(preview2.id); + assert.isNull(tabs.preview_tab_id); + assert.strictEqual(tabs.selected_tab_id, preview2.id); // Reopen the closed tab tabs.reopen_last_closed_tab(); - expect(tabs.items.size).toBe(2); + assert.strictEqual(tabs.items.size, 2); }); test('can handle many tabs efficiently', () => { @@ -901,7 +905,7 @@ describe('DiskfileTabs', () => { } // Verify all tabs were created - expect(tabs.items.size).toBe(tab_count); + assert.strictEqual(tabs.items.size, tab_count); // Time some operations to ensure they're reasonably fast const start_reorder = performance.now(); @@ -925,8 +929,8 @@ describe('DiskfileTabs', () => { // These aren't strict assertions since timing depends on the environment // Just ensure operations complete in a reasonable time - expect(tabs.items.size).toBe(0); - expect(tabs.recently_closed_tabs.length).toBe(tab_count); + assert.strictEqual(tabs.items.size, 0); + assert.strictEqual(tabs.recently_closed_tabs.length, tab_count); }); test('preview tab lifecycle with multiple operations', () => { @@ -936,27 +940,27 @@ describe('DiskfileTabs', () => { // Preview a file const preview = tabs.preview_diskfile(TEST_DISKFILE_ID_3); - expect(preview.is_preview).toBe(true); + assert.ok(preview.is_preview); // Double-click simulation - promote to permanent tabs.open_tab(preview.id); - expect(preview.is_preview).toBe(false); - expect(tabs.preview_tab_id).toBe(null); + assert.ok(!preview.is_preview); + assert.isNull(tabs.preview_tab_id); // Create a new preview const preview2 = tabs.preview_diskfile(TEST_DISKFILE_ID_4); - expect(preview2.is_preview).toBe(true); - expect(preview2.id).not.toBe(preview.id); + assert.ok(preview2.is_preview); + assert.notStrictEqual(preview2.id, preview.id); // Close the preview tabs.close_tab(preview2.id); - expect(tabs.preview_tab_id).toBe(null); + assert.isNull(tabs.preview_tab_id); // All permanent tabs should remain - expect(tabs.items.size).toBe(3); - expect(tabs.items.by_id.has(tab1.id)).toBe(true); - expect(tabs.items.by_id.has(tab2.id)).toBe(true); - expect(tabs.items.by_id.has(preview.id)).toBe(true); + assert.strictEqual(tabs.items.size, 3); + assert.ok(tabs.items.by_id.has(tab1.id)); + assert.ok(tabs.items.by_id.has(tab2.id)); + assert.ok(tabs.items.by_id.has(preview.id)); }); test('by_diskfile_id map updates correctly', () => { @@ -965,25 +969,25 @@ describe('DiskfileTabs', () => { const tab2 = tabs.open_diskfile(TEST_DISKFILE_ID_2); // Verify map contents - expect(tabs.by_diskfile_id.get(TEST_DISKFILE_ID_1)).toBe(tab1); - expect(tabs.by_diskfile_id.get(TEST_DISKFILE_ID_2)).toBe(tab2); - expect(tabs.by_diskfile_id.size).toBe(2); + assert.strictEqual(tabs.by_diskfile_id.get(TEST_DISKFILE_ID_1), tab1); + assert.strictEqual(tabs.by_diskfile_id.get(TEST_DISKFILE_ID_2), tab2); + assert.strictEqual(tabs.by_diskfile_id.size, 2); // Create preview that reuses a tab const preview = tabs.preview_diskfile(TEST_DISKFILE_ID_3); - expect(tabs.by_diskfile_id.get(TEST_DISKFILE_ID_3)).toBe(preview); - expect(tabs.by_diskfile_id.size).toBe(3); + assert.strictEqual(tabs.by_diskfile_id.get(TEST_DISKFILE_ID_3), preview); + assert.strictEqual(tabs.by_diskfile_id.size, 3); // Reuse preview for different file tabs.preview_diskfile(TEST_DISKFILE_ID_4); - expect(tabs.by_diskfile_id.get(TEST_DISKFILE_ID_3)).toBeUndefined(); - expect(tabs.by_diskfile_id.get(TEST_DISKFILE_ID_4)).toBe(preview); - expect(tabs.by_diskfile_id.size).toBe(3); + assert.ok(tabs.by_diskfile_id.get(TEST_DISKFILE_ID_3) === undefined); + assert.strictEqual(tabs.by_diskfile_id.get(TEST_DISKFILE_ID_4), preview); + assert.strictEqual(tabs.by_diskfile_id.size, 3); // Close a tab tabs.close_tab(tab1.id); - expect(tabs.by_diskfile_id.get(TEST_DISKFILE_ID_1)).toBeUndefined(); - expect(tabs.by_diskfile_id.size).toBe(2); + assert.ok(tabs.by_diskfile_id.get(TEST_DISKFILE_ID_1) === undefined); + assert.strictEqual(tabs.by_diskfile_id.size, 2); }); }); @@ -995,14 +999,14 @@ describe('DiskfileTabs', () => { const tab3 = tabs.open_diskfile(TEST_DISKFILE_ID_3); // Initial order - expect(tabs.tab_order).toEqual([tab1.id, tab2.id, tab3.id]); + assert.deepEqual(tabs.tab_order, [tab1.id, tab2.id, tab3.id]); // Use private method through public API - reorder simulates position_tab behavior tabs.reorder_tabs(2, 0); // Move tab3 to position after tab1 (index 1) - expect(tabs.tab_order[0]).toBe(tab3.id); - expect(tabs.tab_order[1]).toBe(tab1.id); - expect(tabs.tab_order[2]).toBe(tab2.id); + assert.strictEqual(tabs.tab_order[0], tab3.id); + assert.strictEqual(tabs.tab_order[1], tab1.id); + assert.strictEqual(tabs.tab_order[2], tab2.id); }); test('#update_tab_history maintains correct order and size', () => { @@ -1022,19 +1026,19 @@ describe('DiskfileTabs', () => { tabs.select_tab(tab4.id); // History should only contain last 3 - expect(tabs.recent_tab_ids).toHaveLength(3); - expect(tabs.recent_tab_ids[0]!).toBe(tab4.id); - expect(tabs.recent_tab_ids[1]!).toBe(tab3.id); - expect(tabs.recent_tab_ids[2]!).toBe(tab2.id); - expect(tabs.recent_tab_ids).not.toContain(tab1.id); + assert.strictEqual(tabs.recent_tab_ids.length, 3); + assert.strictEqual(tabs.recent_tab_ids[0]!, tab4.id); + assert.strictEqual(tabs.recent_tab_ids[1]!, tab3.id); + assert.strictEqual(tabs.recent_tab_ids[2]!, tab2.id); + assert.notInclude(tabs.recent_tab_ids, tab1.id); // Select an existing tab in history tabs.select_tab(tab2.id); // Should move to front - expect(tabs.recent_tab_ids[0]!).toBe(tab2.id); - expect(tabs.recent_tab_ids[1]!).toBe(tab4.id); - expect(tabs.recent_tab_ids[2]!).toBe(tab3.id); + assert.strictEqual(tabs.recent_tab_ids[0]!, tab2.id); + assert.strictEqual(tabs.recent_tab_ids[1]!, tab4.id); + assert.strictEqual(tabs.recent_tab_ids[2]!, tab3.id); }); }); @@ -1047,22 +1051,22 @@ describe('DiskfileTabs', () => { // Every tab in tab_order should exist in items for (const tab_id of tabs.tab_order) { - expect(tabs.items.by_id.has(tab_id)).toBe(true); + assert.ok(tabs.items.by_id.has(tab_id)); } // Close a tab tabs.close_tab(tab2.id); // Check consistency again - expect(tabs.tab_order).not.toContain(tab2.id); - expect(tabs.items.by_id.has(tab2.id)).toBe(false); + assert.notInclude(tabs.tab_order, tab2.id); + assert.ok(!tabs.items.by_id.has(tab2.id)); // Reopen a tab tabs.reopen_last_closed_tab(); // Check consistency once more for (const tab_id of tabs.tab_order) { - expect(tabs.items.by_id.has(tab_id)).toBe(true); + assert.ok(tabs.items.by_id.has(tab_id)); } }); @@ -1072,21 +1076,21 @@ describe('DiskfileTabs', () => { const preview = tabs.preview_diskfile(TEST_DISKFILE_ID_2); // Check derived properties - expect(tabs.selected_tab).toBe(preview); - expect(tabs.preview_tab).toBe(preview); - expect(tabs.selected_diskfile_id).toBe(TEST_DISKFILE_ID_2); + assert.strictEqual(tabs.selected_tab, preview); + assert.strictEqual(tabs.preview_tab, preview); + assert.strictEqual(tabs.selected_diskfile_id, TEST_DISKFILE_ID_2); // Select different tab tabs.select_tab(tab1.id); // Check updated derived properties - expect(tabs.selected_tab).toBe(tab1); - expect(tabs.selected_diskfile_id).toBe(TEST_DISKFILE_ID_1); - expect(tabs.preview_tab).toBe(preview); // Preview unchanged + assert.strictEqual(tabs.selected_tab, tab1); + assert.strictEqual(tabs.selected_diskfile_id, TEST_DISKFILE_ID_1); + assert.strictEqual(tabs.preview_tab, preview); // Preview unchanged // Promote preview tabs.promote_preview_to_permanent(); - expect(tabs.preview_tab).toBeUndefined(); + assert.ok(tabs.preview_tab === undefined); }); }); }); diff --git a/src/test/indexed_collection.svelte.base.test.ts b/src/test/indexed_collection.svelte.base.test.ts index 78c4b4780..7dfecb091 100644 --- a/src/test/indexed_collection.svelte.base.test.ts +++ b/src/test/indexed_collection.svelte.base.test.ts @@ -1,8 +1,6 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, describe} from 'vitest'; +import {test, assert, describe} from 'vitest'; import {z} from 'zod'; import {IndexedCollection} from '$lib/indexed_collection.svelte.js'; @@ -61,19 +59,19 @@ describe('IndexedCollection - Base Functionality', () => { collection.add(item2); // Check size and content - expect(collection.size).toBe(2); + assert.strictEqual(collection.size, 2); // Use id-based comparison with by_id.values() - expect(has_item_with_id(collection.by_id.values(), item1)).toBe(true); - expect(has_item_with_id(collection.by_id.values(), item2)).toBe(true); + assert.ok(has_item_with_id(collection.by_id.values(), item1)); + assert.ok(has_item_with_id(collection.by_id.values(), item2)); // Test retrieval by id - expect(collection.get(item1.id)?.id).toBe(item1.id); + assert.strictEqual(collection.get(item1.id)?.id, item1.id); // Test removal - expect(collection.remove(item1.id)).toBe(true); - expect(collection.size).toBe(1); - expect(collection.get(item1.id)).toBeUndefined(); - expect(collection.get(item2.id)?.id).toBe(item2.id); + assert.ok(collection.remove(item1.id)); + assert.strictEqual(collection.size, 1); + assert.isUndefined(collection.get(item1.id)); + assert.strictEqual(collection.get(item2.id)?.id, item2.id); }); test('single index operations', () => { @@ -97,22 +95,22 @@ describe('IndexedCollection - Base Functionality', () => { collection.add(item3); // Test lookup by single index - expect(collection.by_optional('by_text', 'a1')?.id).toBe(item1.id); - expect(collection.by_optional('by_text', 'a2')?.id).toBe(item2.id); - expect(collection.by_optional('by_text', 'a3')?.id).toBe(item3.id); - expect(collection.by_optional('by_text', 'missing')).toBeUndefined(); + assert.strictEqual(collection.by_optional('by_text', 'a1')?.id, item1.id); + assert.strictEqual(collection.by_optional('by_text', 'a2')?.id, item2.id); + assert.strictEqual(collection.by_optional('by_text', 'a3')?.id, item3.id); + assert.isUndefined(collection.by_optional('by_text', 'missing')); // Test the non-optional version that throws - expect(() => collection.by('by_text', 'missing')).toThrow(); - expect(collection.by('by_text', 'a1').id).toBe(item1.id); + assert.throws(() => collection.by('by_text', 'missing')); + assert.strictEqual(collection.by('by_text', 'a1').id, item1.id); // Test query method - expect(collection.query('by_text', 'a1').id).toBe(item1.id); + assert.strictEqual(collection.query('by_text', 'a1').id, item1.id); // Test index update on removal collection.remove(item2.id); - expect(collection.by_optional('by_text', 'a2')).toBeUndefined(); - expect(collection.size).toBe(2); + assert.isUndefined(collection.by_optional('by_text', 'a2')); + assert.strictEqual(collection.size, 2); }); }); @@ -140,24 +138,24 @@ describe('IndexedCollection - Index Types', () => { collection.add(item4); // Test multi-index lookup - expect(collection.where('by_category', 'c1')).toHaveLength(2); + assert.strictEqual(collection.where('by_category', 'c1').length, 2); const c1_items = collection.where('by_category', 'c1'); - expect(c1_items.some((item) => item.id === item1.id)).toBe(true); - expect(c1_items.some((item) => item.id === item2.id)).toBe(true); + assert.ok(c1_items.some((item) => item.id === item1.id)); + assert.ok(c1_items.some((item) => item.id === item2.id)); - expect(collection.where('by_category', 'c2')).toHaveLength(2); + assert.strictEqual(collection.where('by_category', 'c2').length, 2); const c2_items = collection.where('by_category', 'c2'); - expect(c2_items.some((item) => item.id === item3.id)).toBe(true); - expect(c2_items.some((item) => item.id === item4.id)).toBe(true); + assert.ok(c2_items.some((item) => item.id === item3.id)); + assert.ok(c2_items.some((item) => item.id === item4.id)); // Test first/latest with limit - expect(collection.first('by_category', 'c1', 1)).toHaveLength(1); - expect(collection.latest('by_category', 'c2', 1)).toHaveLength(1); + assert.strictEqual(collection.first('by_category', 'c1', 1).length, 1); + assert.strictEqual(collection.latest('by_category', 'c2', 1).length, 1); // Test index update on removal collection.remove(item1.id); - expect(collection.where('by_category', 'c1')).toHaveLength(1); - expect(collection.where('by_category', 'c1')[0]!.id).toBe(item2.id); + assert.strictEqual(collection.where('by_category', 'c1').length, 1); + assert.strictEqual(collection.where('by_category', 'c1')[0]!.id, item2.id); }); test('derived index operations', () => { @@ -194,33 +192,33 @@ describe('IndexedCollection - Index Types', () => { // Check derived index const high_numbers = collection.derived_index('high_numbers'); - expect(high_numbers).toHaveLength(3); + assert.strictEqual(high_numbers.length, 3); // Compare by id instead of reference - expect(high_numbers[0]!.id).toBe(high_item.id); // Highest number first (10) - expect(high_numbers[1]!.id).toBe(medium_item.id); // Second number (8) - expect(high_numbers[2]!.id).toBe(threshold_item.id); // Third number (6) - expect(high_numbers.some((item) => item.id === low_item.id)).toBe(false); // Low number excluded (3) + assert.strictEqual(high_numbers[0]!.id, high_item.id); // Highest number first (10) + assert.strictEqual(high_numbers[1]!.id, medium_item.id); // Second number (8) + assert.strictEqual(high_numbers[2]!.id, threshold_item.id); // Third number (6) + assert.ok(!high_numbers.some((item) => item.id === low_item.id)); // Low number excluded (3) // Test direct access via get_index const high_numbers_via_index = collection.get_index('high_numbers'); - expect(high_numbers_via_index).toEqual(high_numbers); + assert.deepEqual(high_numbers_via_index, high_numbers); // Test incremental update const new_high_item = create_item('a5', 'c1', [], 9); collection.add(new_high_item); const updated_high_numbers = collection.derived_index('high_numbers'); - expect(updated_high_numbers).toHaveLength(4); - expect(updated_high_numbers[0]!.id).toBe(high_item.id); // 10 - expect(updated_high_numbers[1]!.id).toBe(new_high_item.id); // 9 - expect(updated_high_numbers[2]!.id).toBe(medium_item.id); // 8 - expect(updated_high_numbers[3]!.id).toBe(threshold_item.id); // 6 + assert.strictEqual(updated_high_numbers.length, 4); + assert.strictEqual(updated_high_numbers[0]!.id, high_item.id); // 10 + assert.strictEqual(updated_high_numbers[1]!.id, new_high_item.id); // 9 + assert.strictEqual(updated_high_numbers[2]!.id, medium_item.id); // 8 + assert.strictEqual(updated_high_numbers[3]!.id, threshold_item.id); // 6 // Test removal from derived index collection.remove(high_item.id); const numbers_after_removal = collection.derived_index('high_numbers'); - expect(numbers_after_removal).toHaveLength(3); - expect(numbers_after_removal[0]!.id).toBe(new_high_item.id); // Now highest number + assert.strictEqual(numbers_after_removal.length, 3); + assert.strictEqual(numbers_after_removal[0]!.id, new_high_item.id); // Now highest number }); test('function indexes', () => { @@ -261,14 +259,14 @@ describe('IndexedCollection - Index Types', () => { const range_function = collection.get_index<(range: string) => Array>('by_range'); // Test function index queries - expect(range_function('high')).toHaveLength(2); - expect(range_function('medium')).toHaveLength(2); - expect(range_function('low')).toHaveLength(2); + assert.strictEqual(range_function('high').length, 2); + assert.strictEqual(range_function('medium').length, 2); + assert.strictEqual(range_function('low').length, 2); // Test using the query method - expect(collection.query, string>('by_range', 'high')).toHaveLength(2); - expect(collection.query, string>('by_range', 'medium')).toHaveLength(2); - expect(collection.query, string>('by_range', 'low')).toHaveLength(2); + assert.strictEqual(collection.query, string>('by_range', 'high').length, 2); + assert.strictEqual(collection.query, string>('by_range', 'medium').length, 2); + assert.strictEqual(collection.query, string>('by_range', 'low').length, 2); }); }); @@ -318,20 +316,20 @@ describe('IndexedCollection - Advanced Features', () => { collection.add_many([high_number_item, mid_number_item, low_number_item, top_number_item]); // Test single index lookup - expect(collection.by_optional('by_text', 'a1')?.id).toBe(high_number_item.id); + assert.strictEqual(collection.by_optional('by_text', 'a1')?.id, high_number_item.id); // Test multi index lookup - expect(collection.where('by_category', 'c1')).toHaveLength(3); - expect( + assert.strictEqual(collection.where('by_category', 'c1').length, 3); + assert.ok( collection.where('by_listitem', 'l1').some((item) => item.id === high_number_item.id), - ).toBe(true); + ); // Test derived index const high_numbers = collection.derived_index('recent_high_numbers'); - expect(high_numbers).toHaveLength(2); - expect(high_numbers.some((item) => item.id === high_number_item.id)).toBe(true); - expect(high_numbers.some((item) => item.id === top_number_item.id)).toBe(true); - expect(high_numbers.some((item) => item.id === mid_number_item.id)).toBe(false); // score 7 is too low + assert.strictEqual(high_numbers.length, 2); + assert.ok(high_numbers.some((item) => item.id === high_number_item.id)); + assert.ok(high_numbers.some((item) => item.id === top_number_item.id)); + assert.ok(!high_numbers.some((item) => item.id === mid_number_item.id)); // score 7 is too low }); test('complex data structures', () => { @@ -389,17 +387,17 @@ describe('IndexedCollection - Advanced Features', () => { unique_values: Set; }>('stats'); - expect(stats.count).toBe(2); - expect(stats.average).toBe(15); - expect(stats.unique_values.size).toBe(2); - expect(stats.unique_values.has('c1')).toBe(true); + assert.strictEqual(stats.count, 2); + assert.strictEqual(stats.average, 15); + assert.strictEqual(stats.unique_values.size, 2); + assert.ok(stats.unique_values.has('c1')); // Test updating the complex structure collection.add(create_item('a3', 'c1', [], 30)); - expect(stats.count).toBe(3); - expect(stats.average).toBe(20); - expect(stats.unique_values.size).toBe(2); + assert.strictEqual(stats.count, 3); + assert.strictEqual(stats.average, 20); + assert.strictEqual(stats.unique_values.size, 2); }); test('batch operations', () => { @@ -426,26 +424,26 @@ describe('IndexedCollection - Advanced Features', () => { collection.add_many(items); // Verify all items were added - expect(collection.size).toBe(5); - expect(collection.where('by_category', 'c1').length).toBe(3); - expect(collection.where('by_category', 'c2').length).toBe(2); + assert.strictEqual(collection.size, 5); + assert.strictEqual(collection.where('by_category', 'c1').length, 3); + assert.strictEqual(collection.where('by_category', 'c2').length, 2); // Test removing multiple items at once const ids_to_remove = [items[0]!.id, items[2]!.id, items[4]!.id]; const removed_count = collection.remove_many(ids_to_remove); - expect(removed_count).toBe(3); - expect(collection.size).toBe(2); + assert.strictEqual(removed_count, 3); + assert.strictEqual(collection.size, 2); // Verify specific items were removed - expect(collection.has(items[0]!.id)).toBe(false); - expect(collection.has(items[1]!.id)).toBe(true); - expect(collection.has(items[2]!.id)).toBe(false); - expect(collection.has(items[3]!.id)).toBe(true); - expect(collection.has(items[4]!.id)).toBe(false); + assert.ok(!collection.has(items[0]!.id)); + assert.ok(collection.has(items[1]!.id)); + assert.ok(!collection.has(items[2]!.id)); + assert.ok(collection.has(items[3]!.id)); + assert.ok(!collection.has(items[4]!.id)); // Verify indexes were properly updated - expect(collection.where('by_category', 'c1').length).toBe(1); - expect(collection.where('by_category', 'c2').length).toBe(1); + assert.strictEqual(collection.where('by_category', 'c1').length, 1); + assert.strictEqual(collection.where('by_category', 'c2').length, 1); }); }); diff --git a/src/test/indexed_collection.svelte.edge_cases.test.ts b/src/test/indexed_collection.svelte.edge_cases.test.ts index b71921379..47aad28ca 100644 --- a/src/test/indexed_collection.svelte.edge_cases.test.ts +++ b/src/test/indexed_collection.svelte.edge_cases.test.ts @@ -1,8 +1,6 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, describe, vi} from 'vitest'; +import {test, describe, vi, assert} from 'vitest'; import {z} from 'zod'; import {IndexedCollection} from '$lib/indexed_collection.svelte.js'; @@ -14,8 +12,6 @@ import { } from '$lib/indexed_collection_helpers.svelte.js'; import {create_uuid, Uuid} from '$lib/zod_helpers.js'; -/* eslint-disable @typescript-eslint/no-empty-function */ - // Mock item type that implements IndexedItem interface TestItem { id: Uuid; @@ -70,30 +66,30 @@ describe('IndexedCollection - Edge Cases', () => { // Test retrieving with null values const null_item = collection.by_optional('by_number_a', null); - expect(null_item).toBeDefined(); - expect(null_item!.string_a).toBe('a2'); + assert.isDefined(null_item); + assert.strictEqual(null_item.string_a, 'a2'); // Test filtering with non-existing value - expect(collection.by_optional('by_number_a', 999)).toBeUndefined(); + assert.ok(collection.by_optional('by_number_a', 999) === undefined); // Test multi-index with shared array values const tag1_items = collection.where('by_array_a', 'tag1'); - expect(tag1_items.length).toBe(2); - expect(tag1_items.map((i) => i.string_a).sort()).toEqual(['a1', 'a4']); + assert.strictEqual(tag1_items.length, 2); + assert.deepEqual(tag1_items.map((i) => i.string_a).sort(), ['a1', 'a4']); // Item with empty array should be excluded from by_array_a index - expect(collection.where('by_array_a', undefined)).toHaveLength(0); + assert.strictEqual(collection.where('by_array_a', undefined).length, 0); // Test removing an item with null value collection.remove(item2.id); - expect(collection.by_optional('by_number_a', null)).toBeUndefined(); + assert.ok(collection.by_optional('by_number_a', null) === undefined); // Add another item with null value const item5 = create_test_item('a5', null, ['tag5']); collection.add(item5); const null_item_after = collection.by_optional('by_number_a', null); - expect(null_item_after).toBeDefined(); - expect(null_item_after!.string_a).toBe('a5'); + assert.isDefined(null_item_after); + assert.strictEqual(null_item_after.string_a, 'a5'); }); test('handling duplicates in single indexes', () => { @@ -127,18 +123,18 @@ describe('IndexedCollection - Edge Cases', () => { collection.add(item2); // Second 'a' item - should overwrite item1 in the index // Check that the latest addition wins for duplicate keys - expect(collection.by_optional('by_prefix', 'a')?.string_a).toBe('a456'); - expect(collection.by_optional('by_prefix', 'b')?.string_a).toBe('b789'); + assert.strictEqual(collection.by_optional('by_prefix', 'a')?.string_a, 'a456'); + assert.strictEqual(collection.by_optional('by_prefix', 'b')?.string_a, 'b789'); // Test what happens when removing an item that was overwritten in the index collection.remove(item2.id); // Remove the winning item // The index should now revert to the first item with the same key - expect(collection.by_optional('by_prefix', 'a')?.string_a).toBe('a123'); + assert.strictEqual(collection.by_optional('by_prefix', 'a')?.string_a, 'a123'); // Check that removing all items with the same key clears the index entry collection.remove(item1.id); - expect(collection.by_optional('by_prefix', 'a')).toBeUndefined(); + assert.ok(collection.by_optional('by_prefix', 'a') === undefined); console_warn_spy.mockRestore(); }); @@ -193,13 +189,14 @@ describe('IndexedCollection - Edge Cases', () => { console.log(`Time to add 100 items with 3 indexes: ${end_time - start_time}ms`); // Verify all indexes were created correctly - expect(collection.size).toBe(100); - expect(Object.keys(collection.indexes).length).toBe(3); + assert.strictEqual(collection.size, 100); + assert.strictEqual(Object.keys(collection.indexes).length, 3); // Test various queries against the indexes - expect(collection.by_optional('by_string_a', 'item23')?.number_a).toBe(23); - expect(collection.where('by_array_a', 'tag5').length).toBe(10); // 10% of items have tag5 - expect(collection.derived_index('filtered_items').length).toBe( + assert.strictEqual(collection.by_optional('by_string_a', 'item23')?.number_a, 23); + assert.strictEqual(collection.where('by_array_a', 'tag5').length, 10); // 10% of items have tag5 + assert.strictEqual( + collection.derived_index('filtered_items').length, test_batch.filter((i) => i.boolean_a && i.number_a !== null).length, ); @@ -210,7 +207,7 @@ describe('IndexedCollection - Edge Cases', () => { const remove_end = performance.now(); console.log(`Time to remove 50 items: ${remove_end - remove_start}ms`); - expect(collection.size).toBe(50); + assert.strictEqual(collection.size, 50); }); test('error handling for invalid index type access', () => { @@ -234,13 +231,13 @@ describe('IndexedCollection - Edge Cases', () => { collection.add(create_test_item('a1', 1, ['tag1'])); // Test accessing indexes with wrong methods - expect(() => { + assert.throws(() => { collection.where('by_string_a', 'a1'); // Using multi-index method on single index - }).toThrow(); // Should throw error about index type mismatch + }); // Should throw error about index type mismatch - expect(() => { + assert.throws(() => { collection.by('by_array_a', 'tag1'); // Using single-index method on multi-index - }).toThrow(); // Should throw error about index type mismatch + }); // Should throw error about index type mismatch }); test('handling invalid queries with schema validation', () => { @@ -264,15 +261,15 @@ describe('IndexedCollection - Edge Cases', () => { collection.add(create_test_item('a3', null)); // Null value // Test valid query - expect(collection.by_optional('by_number_a', 5)?.string_a).toBe('a1'); + assert.strictEqual(collection.by_optional('by_number_a', 5)?.string_a, 'a1'); // Test queries that violate schema collection.query('by_number_a', -10); // Negative number, should log validation error - expect(console_error_spy).toHaveBeenCalled(); + assert.ok(console_error_spy.mock.calls.length > 0); console_error_spy.mockClear(); collection.query('by_number_a', null); // Null, should log validation error - expect(console_error_spy).toHaveBeenCalled(); + assert.ok(console_error_spy.mock.calls.length > 0); console_error_spy.mockRestore(); }); @@ -340,7 +337,7 @@ describe('IndexedCollection - Edge Cases', () => { // Add test items and verify custom handlers const item1 = create_test_item('x1'); collection.add(item1); - expect(onadd_fn).toHaveBeenCalled(); + assert.ok(onadd_fn.mock.calls.length > 0); const item2 = create_test_item('y2'); collection.add(item2); @@ -350,19 +347,19 @@ describe('IndexedCollection - Edge Cases', () => { // Search functions should work const x_results = search_fn('x'); - expect(x_results.length).toBe(1); + assert.strictEqual(x_results.length, 1); const x_result_0 = x_results[0]; - expect(x_result_0).toBeDefined(); - expect(x_result_0!.string_a).toBe('x1'); - expect(compute_fn).toHaveBeenLastCalledWith('x'); + assert.isDefined(x_result_0); + assert.strictEqual(x_result_0.string_a, 'x1'); + assert.deepEqual(compute_fn.mock.calls[compute_fn.mock.calls.length - 1], ['x']); // Test removing an item triggers onremove collection.remove(item1.id); - expect(onremove_fn).toHaveBeenCalled(); + assert.ok(onremove_fn.mock.calls.length > 0); // Search function should be updated const no_results = search_fn('x'); - expect(no_results.length).toBe(0); + assert.strictEqual(no_results.length, 0); }); test('custom complex index behaviors', () => { @@ -416,7 +413,7 @@ describe('IndexedCollection - Edge Cases', () => { for (const value of item.array_a) { stats.array_a_frequency[value]--; if (stats.array_a_frequency[value] === 0) { - delete stats.array_a_frequency[value]; // eslint-disable-line @typescript-eslint/no-dynamic-delete + delete stats.array_a_frequency[value]; } } return stats; @@ -443,11 +440,11 @@ describe('IndexedCollection - Edge Cases', () => { array_a_frequency: Record; }>('stats'); - expect(stats.count).toBe(3); - expect(stats.boolean_a_true_count).toBe(2); - expect(stats.boolean_a_false_count).toBe(1); - expect(stats.sum_number_a).toBe(60); - expect(stats.array_a_frequency).toEqual({ + assert.strictEqual(stats.count, 3); + assert.strictEqual(stats.boolean_a_true_count, 2); + assert.strictEqual(stats.boolean_a_false_count, 1); + assert.strictEqual(stats.sum_number_a, 60); + assert.deepEqual(stats.array_a_frequency, { tag1: 2, tag2: 2, tag3: 2, @@ -456,25 +453,25 @@ describe('IndexedCollection - Edge Cases', () => { // Test incremental update - add an item collection.add(create_test_item('a4', 40, ['tag1', 'tag4'], false)); - expect(stats.count).toBe(4); - expect(stats.boolean_a_true_count).toBe(2); - expect(stats.boolean_a_false_count).toBe(2); - expect(stats.sum_number_a).toBe(100); - expect(stats.array_a_frequency.tag1).toBe(3); - expect(stats.array_a_frequency.tag4).toBe(1); + assert.strictEqual(stats.count, 4); + assert.strictEqual(stats.boolean_a_true_count, 2); + assert.strictEqual(stats.boolean_a_false_count, 2); + assert.strictEqual(stats.sum_number_a, 100); + assert.strictEqual(stats.array_a_frequency.tag1, 3); + assert.strictEqual(stats.array_a_frequency.tag4, 1); // Test incremental update - remove an item // Store the item reference first to ensure it exists const item1_ref = collection.by_optional('by_string_a', 'a1'); - expect(item1_ref).toBeDefined(); // Make sure we found it - collection.remove(item1_ref!.id); - - expect(stats.count).toBe(3); - expect(stats.boolean_a_true_count).toBe(1); - expect(stats.boolean_a_false_count).toBe(2); - expect(stats.sum_number_a).toBe(90); - expect(stats.array_a_frequency.tag1).toBe(2); - expect(stats.array_a_frequency.tag2).toBe(1); + assert.isDefined(item1_ref); // Make sure we found it + collection.remove(item1_ref.id); + + assert.strictEqual(stats.count, 3); + assert.strictEqual(stats.boolean_a_true_count, 1); + assert.strictEqual(stats.boolean_a_false_count, 2); + assert.strictEqual(stats.sum_number_a, 90); + assert.strictEqual(stats.array_a_frequency.tag1, 2); + assert.strictEqual(stats.array_a_frequency.tag2, 1); }); test('multi-index array instance consistency', () => { @@ -503,12 +500,12 @@ describe('IndexedCollection - Edge Cases', () => { const false_items_after = collection.where('by_boolean_a', false); // Should be the same array instances - expect(true_items_before).toBe(true_items_after); - expect(false_items_before).toBe(false_items_after); + assert.strictEqual(true_items_before, true_items_after); + assert.strictEqual(false_items_before, false_items_after); // Arrays should have the correct content - expect(true_items_after.length).toBe(1); - expect(false_items_after.length).toBe(1); + assert.strictEqual(true_items_after.length, 1); + assert.strictEqual(false_items_after.length, 1); }); test('multi-index reactivity behavior outside reactive context', () => { @@ -534,19 +531,19 @@ describe('IndexedCollection - Edge Cases', () => { const large_items = $derived(collection.where('by_number_group', 'large')); // Initial state - expect(small_items.length).toBe(0); - expect(medium_items.length).toBe(0); - expect(large_items.length).toBe(0); + assert.strictEqual(small_items.length, 0); + assert.strictEqual(medium_items.length, 0); + assert.strictEqual(large_items.length, 0); // Add items collection.add(create_test_item('a1', 5)); - expect(small_items.length).toBe(1); + assert.strictEqual(small_items.length, 1); collection.add(create_test_item('a2', 25)); - expect(medium_items.length).toBe(1); + assert.strictEqual(medium_items.length, 1); collection.add(create_test_item('a3', 75)); - expect(large_items.length).toBe(1); + assert.strictEqual(large_items.length, 1); // Add multiple items collection.add_many([ @@ -554,9 +551,9 @@ describe('IndexedCollection - Edge Cases', () => { create_test_item('a5', 35), create_test_item('a6', 100), ]); - expect(small_items.length).toBe(2); - expect(medium_items.length).toBe(2); - expect(large_items.length).toBe(2); + assert.strictEqual(small_items.length, 2); + assert.strictEqual(medium_items.length, 2); + assert.strictEqual(large_items.length, 2); }); test('multi-index maintains same array with complex extractors', () => { @@ -592,18 +589,18 @@ describe('IndexedCollection - Edge Cases', () => { collection.add_many([item1, item2, item3]); // Verify state - expect(tag1_items.length).toBe(2); // item1, item3 - expect(tag2_items.length).toBe(2); // item1, item2 - expect(conditional_tag1_items.length).toBe(2); // item1, item3 (both have boolean_a true) + assert.strictEqual(tag1_items.length, 2); // item1, item3 + assert.strictEqual(tag2_items.length, 2); // item1, item2 + assert.strictEqual(conditional_tag1_items.length, 2); // item1, item3 (both have boolean_a true) // Get references again - should be same instances const tag1_items_2 = $derived(collection.where('by_tags', 'tag1')); const tag2_items_2 = $derived(collection.where('by_tags', 'tag2')); const conditional_tag1_items_2 = $derived(collection.where('by_conditional_tags', 'tag1')); - expect(tag1_items).toBe(tag1_items_2); - expect(tag2_items).toBe(tag2_items_2); - expect(conditional_tag1_items).toBe(conditional_tag1_items_2); + assert.strictEqual(tag1_items, tag1_items_2); + assert.strictEqual(tag2_items, tag2_items_2); + assert.strictEqual(conditional_tag1_items, conditional_tag1_items_2); }); test('multi-index sort functionality', () => { @@ -629,24 +626,36 @@ describe('IndexedCollection - Edge Cases', () => { const true_items = collection.where('by_boolean_sorted', true); // Verify initial sort order - expect(true_items.map((i) => i.string_a)).toEqual(['a2', 'a3', 'a1']); + assert.deepEqual( + true_items.map((i) => i.string_a), + ['a2', 'a3', 'a1'], + ); // Add new item that should be inserted in middle const item4 = create_test_item('a4', 25, [], true); collection.add(item4); // Verify array maintains sort order - expect(true_items.map((i) => i.string_a)).toEqual(['a2', 'a3', 'a4', 'a1']); + assert.deepEqual( + true_items.map((i) => i.string_a), + ['a2', 'a3', 'a4', 'a1'], + ); // Add item at beginning const item5 = create_test_item('a5', 5, [], true); collection.add(item5); - expect(true_items.map((i) => i.string_a)).toEqual(['a5', 'a2', 'a3', 'a4', 'a1']); + assert.deepEqual( + true_items.map((i) => i.string_a), + ['a5', 'a2', 'a3', 'a4', 'a1'], + ); // Remove middle item collection.remove(item3.id); - expect(true_items.map((i) => i.string_a)).toEqual(['a5', 'a2', 'a4', 'a1']); + assert.deepEqual( + true_items.map((i) => i.string_a), + ['a5', 'a2', 'a4', 'a1'], + ); }); test('multi-index empty bucket behavior', () => { @@ -663,16 +672,16 @@ describe('IndexedCollection - Edge Cases', () => { // Get initial reference const category_x = $derived(collection.where('by_category', 'x')); - expect(category_x.length).toBe(0); + assert.strictEqual(category_x.length, 0); // Add item to create bucket const item1 = create_test_item('x_1', 1); collection.add(item1); - expect(category_x.length).toBe(1); + assert.strictEqual(category_x.length, 1); // Get new reference - should be same instance const category_x_after = $derived(collection.where('by_category', 'x')); - expect(category_x).toBe(category_x_after); + assert.strictEqual(category_x, category_x_after); // Add more items collection.add_many([ @@ -681,14 +690,14 @@ describe('IndexedCollection - Edge Cases', () => { create_test_item('y_1', 4), ]); - expect(category_x.length).toBe(3); + assert.strictEqual(category_x.length, 3); // Remove all x items const x_ids = category_x.map((item) => item.id); collection.remove_many(x_ids); // After removal, the same array is empty - expect(category_x_after.length).toBe(0); + assert.strictEqual(category_x_after.length, 0); }); test('multi-index behavior with bucket deletion and recreation', () => { @@ -705,21 +714,21 @@ describe('IndexedCollection - Edge Cases', () => { // Get reference before adding any items const a_items_1 = $derived(collection.where('by_prefix', 'a')); - expect(a_items_1.length).toBe(0); + assert.strictEqual(a_items_1.length, 0); // Add and remove items const item1 = create_test_item('a1', 1); collection.add(item1); - expect(a_items_1.length).toBe(1); + assert.strictEqual(a_items_1.length, 1); collection.remove(item1.id); // The array is now empty - expect(a_items_1.length).toBe(0); + assert.strictEqual(a_items_1.length, 0); // Add items again - updates the same array const item2 = create_test_item('a2', 2); collection.add(item2); - expect(a_items_1.length).toBe(1); // both references see the update + assert.strictEqual(a_items_1.length, 1); // both references see the update }); test('multi-index with undefined extractor results', () => { @@ -747,12 +756,12 @@ describe('IndexedCollection - Edge Cases', () => { ]); const even_items = collection.where('by_even_numbers', 'even'); - expect(even_items.length).toBe(2); - expect(even_items.map((i) => i.string_a).sort()).toEqual(['a1', 'a3']); + assert.strictEqual(even_items.length, 2); + assert.deepEqual(even_items.map((i) => i.string_a).sort(), ['a1', 'a3']); // Undefined key should return empty array const undefined_items = collection.where('by_even_numbers', undefined); - expect(undefined_items.length).toBe(0); + assert.strictEqual(undefined_items.length, 0); }); test('multi-index preserves array reference consistency', () => { @@ -770,8 +779,8 @@ describe('IndexedCollection - Edge Cases', () => { // Step 1: Get initial empty array references const cat_a = $derived(collection.where('by_category', 'a')); const cat_b = $derived(collection.where('by_category', 'b')); - expect(cat_a.length).toBe(0); - expect(cat_b.length).toBe(0); + assert.strictEqual(cat_a.length, 0); + assert.strictEqual(cat_b.length, 0); // Step 2: Add items collection.add_many([ @@ -781,25 +790,25 @@ describe('IndexedCollection - Edge Cases', () => { ]); // Step 3: Verify same array instances are updated - expect(cat_a.length).toBe(2); - expect(cat_b.length).toBe(1); - expect(collection.where('by_category', 'a')).toBe(cat_a); - expect(collection.where('by_category', 'b')).toBe(cat_b); + assert.strictEqual(cat_a.length, 2); + assert.strictEqual(cat_b.length, 1); + assert.strictEqual(collection.where('by_category', 'a'), cat_a); + assert.strictEqual(collection.where('by_category', 'b'), cat_b); // Step 4: Remove some items const a_1 = cat_a.find((i) => i.string_a === 'a_1'); collection.remove(a_1!.id); - expect(cat_a.length).toBe(1); + assert.strictEqual(cat_a.length, 1); // Step 5: Remove all items from a category const a_2 = cat_a.find((i) => i.string_a === 'a_2'); collection.remove(a_2!.id); // Array is now empty but still exists - expect(cat_a.length).toBe(0); + assert.strictEqual(cat_a.length, 0); // Step 6: Verify we still get the same instance const cat_a_new = collection.where('by_category', 'a'); - expect(cat_a_new.length).toBe(0); + assert.strictEqual(cat_a_new.length, 0); }); test('reactive context tracking with $derived', () => { @@ -819,8 +828,8 @@ describe('IndexedCollection - Edge Cases', () => { const false_count = $derived(collection.where('by_boolean_a', false).length); // Initial state - expect(true_count).toBe(0); - expect(false_count).toBe(0); + assert.strictEqual(true_count, 0); + assert.strictEqual(false_count, 0); // Add items collection.add_many([ @@ -830,15 +839,15 @@ describe('IndexedCollection - Edge Cases', () => { ]); // Derived values should update - expect(true_count).toBe(2); - expect(false_count).toBe(1); + assert.strictEqual(true_count, 2); + assert.strictEqual(false_count, 1); // Remove an item const items = collection.where('by_boolean_a', true); collection.remove(items[0]!.id); - expect(true_count).toBe(1); - expect(false_count).toBe(1); + assert.strictEqual(true_count, 1); + assert.strictEqual(false_count, 1); }); test('multi-index with $state creates reactive arrays', () => { @@ -857,7 +866,7 @@ describe('IndexedCollection - Edge Cases', () => { const tag1_derived_length = $derived(collection.where('by_tags', 'tag1').length); // Initial state - expect(tag1_derived_length).toBe(0); + assert.strictEqual(tag1_derived_length, 0); // Add items collection.add_many([ @@ -866,13 +875,13 @@ describe('IndexedCollection - Edge Cases', () => { ]); // Derived value should update - expect(tag1_derived_length).toBe(2); + assert.strictEqual(tag1_derived_length, 2); // Remove an item const tag1_items = collection.where('by_tags', 'tag1'); collection.remove(tag1_items[0]!.id); // Derived value should update again - expect(tag1_derived_length).toBe(1); + assert.strictEqual(tag1_derived_length, 1); }); }); diff --git a/src/test/indexed_collection.svelte.optimization.test.ts b/src/test/indexed_collection.svelte.optimization.test.ts index a8cfc601c..c9bf1c625 100644 --- a/src/test/indexed_collection.svelte.optimization.test.ts +++ b/src/test/indexed_collection.svelte.optimization.test.ts @@ -1,8 +1,6 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, vi, describe} from 'vitest'; +import {test, assert, describe, vi} from 'vitest'; import {z} from 'zod'; import {IndexedCollection} from '$lib/indexed_collection.svelte.js'; @@ -58,8 +56,8 @@ describe('IndexedCollection - Optimization Tests', () => { }); // Verify compute was called exactly once during initialization - expect(compute_spy).toHaveBeenCalledTimes(1); - expect(collection.size).toBe(2); + assert.strictEqual(compute_spy.mock.calls.length, 1); + assert.strictEqual(collection.size, 2); }); test('incremental updates avoid recomputing entire index', () => { @@ -97,23 +95,23 @@ describe('IndexedCollection - Optimization Tests', () => { }); // Verify compute was called exactly once during initialization - expect(compute_spy).toHaveBeenCalledTimes(1); + assert.strictEqual(compute_spy.mock.calls.length, 1); // Add more items and check that compute isn't called again collection.add(create_item('string_a3', 'string_b3', [], 20)); collection.add(create_item('string_a4', 'string_b4', [], 8)); // Compute should still have been called only once - expect(compute_spy).toHaveBeenCalledTimes(1); + assert.strictEqual(compute_spy.mock.calls.length, 1); // onadd should have been called twice - once for each new item - expect(onadd_spy).toHaveBeenCalledTimes(2); + assert.strictEqual(onadd_spy.mock.calls.length, 2); // Check that the index was correctly updated const high_number = collection.derived_index('high_number'); - expect(high_number.length).toBe(2); - expect(high_number.some((item) => item.string_a === 'string_a1')).toBe(true); - expect(high_number.some((item) => item.string_a === 'string_a3')).toBe(true); + assert.strictEqual(high_number.length, 2); + assert.ok(high_number.some((item) => item.string_a === 'string_a1')); + assert.ok(high_number.some((item) => item.string_a === 'string_a3')); }); test('batch operations are more efficient', () => { @@ -159,7 +157,7 @@ describe('IndexedCollection - Optimization Tests', () => { const batch_time = end_time - start_time; // Verify onadd was called for each item - expect(onadd_spy).toHaveBeenCalledTimes(100); + assert.strictEqual(onadd_spy.mock.calls.length, 100); // Reset the spy for individual adds onadd_spy.mockClear(); @@ -179,7 +177,7 @@ describe('IndexedCollection - Optimization Tests', () => { const individual_time = individual_end - individual_start; // Verify onadd was called for each item - expect(onadd_spy).toHaveBeenCalledTimes(100); + assert.strictEqual(onadd_spy.mock.calls.length, 100); // This test is somewhat approximative but helps validate the efficiency // We're not making a strict assertion on performance as it can vary between environments @@ -220,10 +218,10 @@ describe('IndexedCollection - Optimization Tests', () => { const number_fn = collection.get_index<(threshold: string) => Array>('by_min_number'); // These should return different filtered subsets without storing separate copies - expect(number_fn('10').length).not.toBe(number_fn('50').length); - expect(number_fn('0').length).toBe(20); // All items - expect(number_fn('50').length).toBe(10); // Half the items - expect(number_fn('90').length).toBe(2); // Just the highest values + assert.ok(number_fn('10').length !== number_fn('50').length); + assert.strictEqual(number_fn('0').length, 20); // All items + assert.strictEqual(number_fn('50').length, 10); // Half the items + assert.strictEqual(number_fn('90').length, 2); // Just the highest values }); test('memory usage with large datasets', () => { @@ -252,11 +250,11 @@ describe('IndexedCollection - Optimization Tests', () => { // Verify the index contains the expected number of categories const b_index = collection.get_index>>('by_string_b'); // console.log(`b_index`, $state.snapshot(b_index)); - expect(b_index.size).toBe(10); // 10 unique categories + assert.strictEqual(b_index.size, 10); // 10 unique categories // Verify each category has the right number of items for (let i = 0; i < 10; i++) { - expect(collection.where('by_string_b', `string_b${i}`).length).toBe(100); // 1000 items / 10 categories = 100 per category + assert.strictEqual(collection.where('by_string_b', `string_b${i}`).length, 100); // 1000 items / 10 categories = 100 per category } }); }); diff --git a/src/test/indexed_collection.svelte.queries.test.ts b/src/test/indexed_collection.svelte.queries.test.ts index 6eda98b1a..1b18d6db5 100644 --- a/src/test/indexed_collection.svelte.queries.test.ts +++ b/src/test/indexed_collection.svelte.queries.test.ts @@ -1,8 +1,6 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, describe, beforeEach} from 'vitest'; +import {test, describe, beforeEach, assert} from 'vitest'; import {z} from 'zod'; import {IndexedCollection} from '$lib/indexed_collection.svelte.js'; @@ -218,93 +216,108 @@ describe('IndexedCollection - Query Capabilities', () => { test('basic query operations', () => { // Single index direct lookup - expect(collection.by_optional('by_string_a', 'a1'.toLowerCase())).toBe(items[0]); - expect(collection.by_optional('by_string_b', 'b1')).toBeDefined(); + assert.strictEqual(collection.by_optional('by_string_a', 'a1'.toLowerCase()), items[0]); + assert.ok(collection.by_optional('by_string_b', 'b1') !== undefined); // Multi index direct lookup - expect(collection.where('by_string_c', 'c1')).toHaveLength(2); - expect(collection.where('by_number_a', 5)).toHaveLength(2); - expect(collection.where('by_boolean_a', 'y')).toHaveLength(3); + assert.strictEqual(collection.where('by_string_c', 'c1').length, 2); + assert.strictEqual(collection.where('by_number_a', 5).length, 2); + assert.strictEqual(collection.where('by_boolean_a', 'y').length, 3); // Non-existent values - expect(collection.by_optional('by_string_a', 'nonexistent')).toBeUndefined(); - expect(collection.where('by_string_c', 'nonexistent')).toHaveLength(0); + assert.ok(collection.by_optional('by_string_a', 'nonexistent') === undefined); + assert.strictEqual(collection.where('by_string_c', 'nonexistent').length, 0); }); test('case sensitivity in queries', () => { // Case insensitive string_a lookup (extractor converts to lowercase) - expect(collection.by_optional('by_string_a', 'a1'.toLowerCase())).toBe(items[0]); - expect(collection.by_optional('by_string_a', 'A1'.toLowerCase())).toBe(items[0]); + assert.strictEqual(collection.by_optional('by_string_a', 'a1'.toLowerCase()), items[0]); + assert.strictEqual(collection.by_optional('by_string_a', 'A1'.toLowerCase()), items[0]); // Case sensitive string_b lookup (no conversion in extractor) - expect(collection.by_optional('by_string_b', 'B1')).toBeUndefined(); - expect(collection.by_optional('by_string_b', 'b1')).toBeDefined(); + assert.ok(collection.by_optional('by_string_b', 'B1') === undefined); + assert.ok(collection.by_optional('by_string_b', 'b1') !== undefined); }); test('compound queries combining indexes', () => { // Find c1 items with string_b=b1 const c1_items = collection.where('by_string_c', 'c1'); const b1_c1_items = c1_items.filter((item) => item.string_b === 'b1'); - expect(b1_c1_items).toHaveLength(1); - expect(b1_c1_items[0]!.string_a).toBe('a1'); + assert.strictEqual(b1_c1_items.length, 1); + assert.strictEqual(b1_c1_items[0]!.string_a, 'a1'); // Find boolean_a=true items with number_a=5 const boolean_a_true_items = collection.where('by_boolean_a', 'y'); const high_value_boolean_a_true = boolean_a_true_items.filter((item) => item.number_a === 5); - expect(high_value_boolean_a_true).toHaveLength(2); - expect(high_value_boolean_a_true.map((i) => i.string_a)).toContain('a2'); - expect(high_value_boolean_a_true.map((i) => i.string_a)).toContain('b2'); + assert.strictEqual(high_value_boolean_a_true.length, 2); + assert.include( + high_value_boolean_a_true.map((i) => i.string_a), + 'a2', + ); + assert.include( + high_value_boolean_a_true.map((i) => i.string_a), + 'b2', + ); }); test('queries with array values', () => { // Query by array_a (checks if any tag matches) const tag1_items = collection.where('by_array_a', 'tag1'); - expect(tag1_items).toHaveLength(3); - expect(tag1_items.map((i) => i.string_a)).toContain('a1'); - expect(tag1_items.map((i) => i.string_a)).toContain('a2'); - expect(tag1_items.map((i) => i.string_a)).toContain('b2'); + assert.strictEqual(tag1_items.length, 3); + assert.include( + tag1_items.map((i) => i.string_a), + 'a1', + ); + assert.include( + tag1_items.map((i) => i.string_a), + 'a2', + ); + assert.include( + tag1_items.map((i) => i.string_a), + 'b2', + ); // Multiple tags intersection (using multiple queries) const tag2_items = collection.where('by_array_a', 'tag2'); const tag2_and_tag3_items = tag2_items.filter((item) => item.array_a.includes('tag3')); - expect(tag2_and_tag3_items).toHaveLength(1); - expect(tag2_and_tag3_items[0]!.string_a).toBe('a1'); + assert.strictEqual(tag2_and_tag3_items.length, 1); + assert.strictEqual(tag2_and_tag3_items[0]!.string_a, 'a1'); }); test('derived index queries', () => { // Test the recent_boolean_a_true derived index const recent_boolean_a_true = collection.derived_index('recent_boolean_a_true'); - expect(recent_boolean_a_true).toHaveLength(3); // All boolean_a=true items + assert.strictEqual(recent_boolean_a_true.length, 3); // All boolean_a=true items // Verify order (most recent first) const rbt0 = recent_boolean_a_true[0]; const rbt1 = recent_boolean_a_true[1]; const rbt2 = recent_boolean_a_true[2]; - expect(rbt0).toBeDefined(); - expect(rbt1).toBeDefined(); - expect(rbt2).toBeDefined(); - expect(rbt0!.string_a).toBe('b2'); // 3 days ago - expect(rbt1!.string_a).toBe('a1'); // 10 days ago - expect(rbt2!.string_a).toBe('a2'); // 20 days ago + assert.isDefined(rbt0); + assert.isDefined(rbt1); + assert.isDefined(rbt2); + assert.strictEqual(rbt0.string_a, 'b2'); // 3 days ago + assert.strictEqual(rbt1.string_a, 'a1'); // 10 days ago + assert.strictEqual(rbt2.string_a, 'a2'); // 20 days ago // Test the high_number_a derived index which should include all items with number_a >= 4 const high_number_a = collection.derived_index('high_number_a'); - expect(high_number_a).toHaveLength(4); - expect(high_number_a.map((i) => i.string_a).sort()).toEqual(['a1', 'a2', 'b1', 'b2'].sort()); + assert.strictEqual(high_number_a.length, 4); + assert.deepEqual(high_number_a.map((i) => i.string_a).sort(), ['a1', 'a2', 'b1', 'b2'].sort()); }); test('first/latest with multi-index', () => { // Get first c1 item const first_c1 = collection.first('by_string_c', 'c1', 1); - expect(first_c1).toHaveLength(1); + assert.strictEqual(first_c1.length, 1); const first_c1_item = first_c1[0]; - expect(first_c1_item).toBeDefined(); + assert.isDefined(first_c1_item); // Get latest c2 item const latest_c2 = collection.latest('by_string_c', 'c2', 1); - expect(latest_c2).toHaveLength(1); + assert.strictEqual(latest_c2.length, 1); const latest_c2_item = latest_c2[0]; - expect(latest_c2_item).toBeDefined(); + assert.isDefined(latest_c2_item); }); test('time-based queries', () => { @@ -315,15 +328,21 @@ describe('IndexedCollection - Query Capabilities', () => { const items_this_year_count = collection.values.filter( (item) => item.date_a.getFullYear() === current_year, ).length; - expect(this_year_items.length).toBe(items_this_year_count); + assert.strictEqual(this_year_items.length, items_this_year_count); // More complex date range query - last 7 days const now = Date.now(); const recent_items = collection.values.filter( (item) => item.date_a.getTime() > now - 1000 * 60 * 60 * 24 * 7, ); - expect(recent_items.map((i) => i.string_a)).toContain('b1'); // 5 days ago - expect(recent_items.map((i) => i.string_a)).toContain('b2'); // 3 days ago + assert.include( + recent_items.map((i) => i.string_a), + 'b1', + ); // 5 days ago + assert.include( + recent_items.map((i) => i.string_a), + 'b2', + ); // 3 days ago }); test('adding items affects derived queries correctly', () => { @@ -342,46 +361,46 @@ describe('IndexedCollection - Query Capabilities', () => { // Check that it appears at the top of the recent_boolean_a_true list const recent_boolean_a_true = collection.derived_index('recent_boolean_a_true'); - expect(recent_boolean_a_true[0]!.id).toBe(new_item.id); + assert.strictEqual(recent_boolean_a_true[0]!.id, new_item.id); // Check that it appears in high_number_a const high_number_a = collection.derived_index('high_number_a'); - expect(has_item_with_id(high_number_a, new_item)).toBe(true); + assert.ok(has_item_with_id(high_number_a, new_item)); }); test('removing items updates derived queries', () => { // Remove the most recent boolean_a=true item const item_to_remove = items[4]; // b2 (most recent boolean_a=true) - expect(item_to_remove).toBeDefined(); + assert.isDefined(item_to_remove); - collection.remove(item_to_remove!.id); + collection.remove(item_to_remove.id); // Check that recent_boolean_a_true updates correctly const recent_boolean_a_true = collection.derived_index('recent_boolean_a_true'); - expect(recent_boolean_a_true).toHaveLength(2); + assert.strictEqual(recent_boolean_a_true.length, 2); const rbt0 = recent_boolean_a_true[0]; const rbt1 = recent_boolean_a_true[1]; - expect(rbt0).toBeDefined(); - expect(rbt1).toBeDefined(); - expect(rbt0!.string_a).toBe('a1'); - expect(rbt1!.string_a).toBe('a2'); + assert.isDefined(rbt0); + assert.isDefined(rbt1); + assert.strictEqual(rbt0.string_a, 'a1'); + assert.strictEqual(rbt1.string_a, 'a2'); // Check that high_number_a updates correctly const high_number_a = collection.derived_index('high_number_a'); - expect(high_number_a).not.toContain(item_to_remove); - expect(high_number_a).toHaveLength(3); // Started with 4, removed 1 + assert.notInclude(high_number_a, item_to_remove); + assert.strictEqual(high_number_a.length, 3); // Started with 4, removed 1 }); test('dynamic ordering of query results', () => { // Get all items and sort by number_a (highest first) const sorted_by_number_a = collection.values.slice().sort((a, b) => b.number_a - a.number_a); - expect(sorted_by_number_a[0]!.number_a).toBe(5); + assert.strictEqual(sorted_by_number_a[0]!.number_a, 5); // Sort by creation time (newest first) const sorted_by_time = collection.values .slice() .sort((a, b) => b.date_a.getTime() - a.date_a.getTime()); - expect(sorted_by_time[0]!.string_a).toBe('b2'); // 3 days ago + assert.strictEqual(sorted_by_time[0]!.string_a, 'b2'); // 3 days ago }); }); @@ -436,33 +455,33 @@ describe('IndexedCollection - Search Patterns', () => { test('word-based search', () => { // Find items with "alpha" in string_a const alpha_items = collection.where('by_word', 'alpha'); - expect(alpha_items).toHaveLength(2); + assert.strictEqual(alpha_items.length, 2); // Find items with "beta" in string_a const beta_items = collection.where('by_word', 'beta'); - expect(beta_items).toHaveLength(2); + assert.strictEqual(beta_items.length, 2); // Find items with both "alpha" and "beta" (intersection) const alpha_beta_items = alpha_items.filter((item) => item.string_a.toLowerCase().includes('beta'), ); - expect(alpha_beta_items).toHaveLength(1); - expect(alpha_beta_items[0]!.string_a).toBe('alpha beta gamma'); + assert.strictEqual(alpha_beta_items.length, 1); + assert.strictEqual(alpha_beta_items[0]!.string_a, 'alpha beta gamma'); }); test('range-based categorization', () => { // Find high-number_a items const high_number_a = collection.where('by_number_a_range', 'high'); - expect(high_number_a).toHaveLength(1); - expect(high_number_a[0]!.number_a).toBe(5); + assert.strictEqual(high_number_a.length, 1); + assert.strictEqual(high_number_a[0]!.number_a, 5); // Find mid-number_a items const mid_number_a = collection.where('by_number_a_range', 'mid'); - expect(mid_number_a).toHaveLength(2); + assert.strictEqual(mid_number_a.length, 2); // Find low-number_a items const low_number_a = collection.where('by_number_a_range', 'low'); - expect(low_number_a).toHaveLength(1); - expect(low_number_a[0]!.number_a).toBe(2); + assert.strictEqual(low_number_a.length, 1); + assert.strictEqual(low_number_a[0]!.number_a, 2); }); }); diff --git a/src/test/indexed_collection.svelte.schema_validation.test.ts b/src/test/indexed_collection.svelte.schema_validation.test.ts index 0c1a4aba5..30b05f97d 100644 --- a/src/test/indexed_collection.svelte.schema_validation.test.ts +++ b/src/test/indexed_collection.svelte.schema_validation.test.ts @@ -1,8 +1,6 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, describe, vi} from 'vitest'; +import {test, assert, describe, vi} from 'vitest'; import {z} from 'zod'; import {IndexedCollection} from '$lib/indexed_collection.svelte.js'; @@ -14,8 +12,6 @@ import { } from '$lib/indexed_collection_helpers.svelte.js'; import {create_uuid, Uuid} from '$lib/zod_helpers.js'; -/* eslint-disable @typescript-eslint/no-empty-function */ - // Mock item type that implements IndexedItem interface TestItem { id: Uuid; @@ -84,11 +80,11 @@ describe('IndexedCollection - Schema Validation', () => { // Test query with valid email const query_result = collection.query('by_string_b', 'a1@zzz.software'); - expect(query_result.string_a).toBe('a1'); + assert.strictEqual(query_result.string_a, 'a1'); // Get single index and check schema validation passed const email_index = collection.single_index('by_string_b'); - expect(email_index.size).toBe(2); + assert.strictEqual(email_index.size, 2); }); test('multi index properly validates input and output', () => { @@ -123,19 +119,19 @@ describe('IndexedCollection - Schema Validation', () => { // Test range query validation const mid_items = collection.query, string>('by_number_range', 'mid'); - expect(mid_items.length).toBe(1); - expect(mid_items[0]!.string_a).toBe('a2'); + assert.strictEqual(mid_items.length, 1); + assert.strictEqual(mid_items[0]!.string_a, 'a2'); // Test array index const item2_matches = collection.query, string>('by_array', 'item2'); - expect(item2_matches.length).toBe(2); - expect(item2_matches.some((item) => item.string_a === 'a1')).toBe(true); - expect(item2_matches.some((item) => item.string_a === 'a3')).toBe(true); + assert.strictEqual(item2_matches.length, 2); + assert.ok(item2_matches.some((item) => item.string_a === 'a1')); + assert.ok(item2_matches.some((item) => item.string_a === 'a3')); const item3_matches = collection.query, string>('by_array', 'item3'); - expect(item3_matches.length).toBe(2); - expect(item3_matches.some((item) => item.string_a === 'a2')).toBe(true); - expect(item3_matches.some((item) => item.string_a === 'a4')).toBe(true); + assert.strictEqual(item3_matches.length, 2); + assert.ok(item3_matches.some((item) => item.string_a === 'a2')); + assert.ok(item3_matches.some((item) => item.string_a === 'a4')); // Restore console.error console_error_spy.mockRestore(); @@ -171,12 +167,12 @@ describe('IndexedCollection - Schema Validation', () => { // Check derived index correctness const flagged_adults = collection.derived_index('flagged_adults'); - expect(flagged_adults.length).toBe(1); - expect(flagged_adults[0]!.string_a).toBe('a1'); + assert.strictEqual(flagged_adults.length, 1); + assert.strictEqual(flagged_adults[0]!.string_a, 'a1'); // Add another qualifying item and verify index updates collection.add(create_item('a5', 'b5@test.com', 40, true)); - expect(collection.derived_index('flagged_adults').length).toBe(2); + assert.strictEqual(collection.derived_index('flagged_adults').length, 2); }); test('dynamic index validates complex query parameters', () => { @@ -235,13 +231,13 @@ describe('IndexedCollection - Schema Validation', () => { // Test number range query const young_range = search_fn({min_number: 18, max_number: 30}); - expect(young_range.length).toBe(2); - expect(young_range.map((item) => item.string_a).sort()).toEqual(['a1', 'a3']); + assert.strictEqual(young_range.length, 2); + assert.deepEqual(young_range.map((item) => item.string_a).sort(), ['a1', 'a3']); // Test flag with specific array values const flagged_with_item1 = search_fn({only_flagged: true, array_values: ['item1']}); - expect(flagged_with_item1.length).toBe(2); - expect(flagged_with_item1.map((item) => item.string_a).sort()).toEqual(['a1', 'a4']); + assert.strictEqual(flagged_with_item1.length, 2); + assert.deepEqual(flagged_with_item1.map((item) => item.string_a).sort(), ['a1', 'a4']); // Test items over 30 that are flagged with specific array values const high_number_with_item3 = search_fn({ @@ -249,15 +245,15 @@ describe('IndexedCollection - Schema Validation', () => { only_flagged: true, array_values: ['item3'], }); - expect(high_number_with_item3.length).toBe(2); - expect(high_number_with_item3.map((item) => item.string_a).sort()).toEqual(['a2', 'a4']); + assert.strictEqual(high_number_with_item3.length, 2); + assert.deepEqual(high_number_with_item3.map((item) => item.string_a).sort(), ['a2', 'a4']); // Test using query method const with_item5 = collection.query, ItemQuery>('item_search', { array_values: ['item5'], }); - expect(with_item5.length).toBe(1); - expect(with_item5[0]!.string_a).toBe('a4'); + assert.strictEqual(with_item5.length, 1); + assert.strictEqual(with_item5[0]!.string_a, 'a4'); }); test('schema validation errors are properly handled', () => { @@ -286,16 +282,20 @@ describe('IndexedCollection - Schema Validation', () => { // Try querying with invalid email format collection.query('by_string_b', 'not-an-email'); - expect(console_error_spy).toHaveBeenCalledWith( - expect.stringContaining('Query validation failed for index by_string_b'), - expect.anything(), + assert.ok( + console_error_spy.mock.calls.some( + ([msg]) => + typeof msg === 'string' && msg.includes('Query validation failed for index by_string_b'), + ), ); // Try querying with out-of-range number collection.query('by_number', 5); - expect(console_error_spy).toHaveBeenCalledWith( - expect.stringContaining('Query validation failed for index by_number'), - expect.anything(), + assert.ok( + console_error_spy.mock.calls.some( + ([msg]) => + typeof msg === 'string' && msg.includes('Query validation failed for index by_number'), + ), ); console_error_spy.mockRestore(); @@ -330,7 +330,7 @@ describe('IndexedCollection - Schema Validation', () => { collection.query('by_number', 5); // Verify no validation errors were logged - expect(console_error_spy).not.toHaveBeenCalled(); + assert.strictEqual(console_error_spy.mock.calls.length, 0); console_error_spy.mockRestore(); }); @@ -367,11 +367,11 @@ describe('IndexedCollection - Schema Validation', () => { collection.add(item2); // Test lookup by nested property - use by_optional instead of where for single index - expect(collection.by_optional('by_nested_option', 'x')?.string_a).toBe('a1'); - expect(collection.by_optional('by_nested_option', 'y')?.string_a).toBe('a2'); + assert.strictEqual(collection.by_optional('by_nested_option', 'x')?.string_a, 'a1'); + assert.strictEqual(collection.by_optional('by_nested_option', 'y')?.string_a, 'a2'); // Test compound key lookup - expect(collection.where('by_compound', 'a1-x').length).toBe(1); - expect(collection.where('by_compound', 'a2-y').length).toBe(1); + assert.strictEqual(collection.where('by_compound', 'a1-x').length, 1); + assert.strictEqual(collection.where('by_compound', 'a2-y').length, 1); }); }); diff --git a/src/test/list_helpers.test.ts b/src/test/list_helpers.test.ts index abddac52b..07dc25007 100644 --- a/src/test/list_helpers.test.ts +++ b/src/test/list_helpers.test.ts @@ -1,6 +1,4 @@ -// @slop Claude Opus 4 - -import {test, expect, describe} from 'vitest'; +import {test, describe, assert} from 'vitest'; import {reorder_list, to_reordered_list} from '$lib/list_helpers.js'; // Test constants @@ -13,99 +11,99 @@ describe('reorder_list', () => { test('moves an item forward in the array', () => { const array = [...SAMPLE_ARRAY]; reorder_list(array, 1, 3); - expect(array).toEqual(['a', 'c', 'd', 'b', 'e']); + assert.deepEqual(array, ['a', 'c', 'd', 'b', 'e']); }); test('moves an item backward in the array', () => { const array = [...SAMPLE_ARRAY]; reorder_list(array, 3, 1); - expect(array).toEqual(['a', 'd', 'b', 'c', 'e']); + assert.deepEqual(array, ['a', 'd', 'b', 'c', 'e']); }); test('does nothing when from_index equals to_index', () => { const array = [...SAMPLE_ARRAY]; reorder_list(array, 2, 2); - expect(array).toEqual(SAMPLE_ARRAY); + assert.deepEqual(array, SAMPLE_ARRAY); }); // Edge cases test('moves first item to the end', () => { const array = [...SAMPLE_ARRAY]; reorder_list(array, 0, 5); - expect(array).toEqual(['b', 'c', 'd', 'e', 'a']); + assert.deepEqual(array, ['b', 'c', 'd', 'e', 'a']); }); test('moves first item one position forward', () => { const array = [...SAMPLE_ARRAY]; reorder_list(array, 0, 1); - expect(array).toEqual(['b', 'a', 'c', 'd', 'e']); + assert.deepEqual(array, ['b', 'a', 'c', 'd', 'e']); }); test('moves last item to the beginning', () => { const array = [...SAMPLE_ARRAY]; reorder_list(array, 4, 0); - expect(array).toEqual(['e', 'a', 'b', 'c', 'd']); + assert.deepEqual(array, ['e', 'a', 'b', 'c', 'd']); }); test('moves last item one position backward', () => { const array = [...SAMPLE_ARRAY]; reorder_list(array, 4, 3); - expect(array).toEqual(['a', 'b', 'c', 'e', 'd']); + assert.deepEqual(array, ['a', 'b', 'c', 'e', 'd']); }); test('handles single item array correctly', () => { const array = [...SINGLE_ITEM_ARRAY]; reorder_list(array, 0, 0); - expect(array).toEqual(SINGLE_ITEM_ARRAY); + assert.deepEqual(array, SINGLE_ITEM_ARRAY); }); test('handles empty array correctly', () => { const array = [...EMPTY_ARRAY]; reorder_list(array, 0, 0); - expect(array).toEqual(EMPTY_ARRAY); + assert.deepEqual(array, EMPTY_ARRAY); }); // Error cases - testing that array remains unchanged with invalid indices test('handles negative from_index by leaving array unchanged', () => { const array = [...SAMPLE_ARRAY]; reorder_list(array, -1, 2); - expect(array).toEqual(SAMPLE_ARRAY); + assert.deepEqual(array, SAMPLE_ARRAY); }); test('handles out of bounds from_index by leaving array unchanged', () => { const array = [...SAMPLE_ARRAY]; reorder_list(array, 10, 2); - expect(array).toEqual(SAMPLE_ARRAY); + assert.deepEqual(array, SAMPLE_ARRAY); }); test('handles negative to_index by leaving array unchanged', () => { const array = [...SAMPLE_ARRAY]; reorder_list(array, 2, -1); - expect(array).toEqual(SAMPLE_ARRAY); + assert.deepEqual(array, SAMPLE_ARRAY); }); test('handles out of bounds to_index by leaving array unchanged', () => { const array = [...SAMPLE_ARRAY]; reorder_list(array, 2, 10); - expect(array).toEqual(SAMPLE_ARRAY); + assert.deepEqual(array, SAMPLE_ARRAY); }); test('moves item to exact length boundary', () => { const array = [...SAMPLE_ARRAY]; reorder_list(array, 2, array.length); - expect(array).toEqual(['a', 'b', 'd', 'e', 'c']); + assert.deepEqual(array, ['a', 'b', 'd', 'e', 'c']); }); test('handles adjacent indices correctly when moving forward', () => { const array = [...SAMPLE_ARRAY]; reorder_list(array, 1, 2); - expect(array).toEqual(['a', 'c', 'b', 'd', 'e']); + assert.deepEqual(array, ['a', 'c', 'b', 'd', 'e']); }); test('handles adjacent indices correctly when moving backward', () => { const array = [...SAMPLE_ARRAY]; reorder_list(array, 2, 1); - expect(array).toEqual(['a', 'c', 'b', 'd', 'e']); + assert.deepEqual(array, ['a', 'c', 'b', 'd', 'e']); }); }); @@ -114,107 +112,107 @@ describe('to_reordered_list', () => { test('creates new array with item moved forward', () => { const original = [...SAMPLE_ARRAY]; const result = to_reordered_list(original, 1, 3); - expect(result).toEqual(['a', 'c', 'd', 'b', 'e']); - expect(original).toEqual(SAMPLE_ARRAY); // Original unchanged + assert.deepEqual(result, ['a', 'c', 'd', 'b', 'e']); + assert.deepEqual(original, SAMPLE_ARRAY); // Original unchanged }); test('creates new array with item moved backward', () => { const original = [...SAMPLE_ARRAY]; const result = to_reordered_list(original, 3, 1); - expect(result).toEqual(['a', 'd', 'b', 'c', 'e']); - expect(original).toEqual(SAMPLE_ARRAY); // Original unchanged + assert.deepEqual(result, ['a', 'd', 'b', 'c', 'e']); + assert.deepEqual(original, SAMPLE_ARRAY); // Original unchanged }); test('returns original array when from_index equals to_index', () => { const original = [...SAMPLE_ARRAY]; const result = to_reordered_list(original, 2, 2); - expect(result).toBe(original); // Same reference, not just equal + assert.strictEqual(result, original); // Same reference, not just equal }); // Edge cases test('creates new array with first item moved to end', () => { const original = [...SAMPLE_ARRAY]; const result = to_reordered_list(original, 0, 5); - expect(result).toEqual(['b', 'c', 'd', 'e', 'a']); - expect(original).toEqual(SAMPLE_ARRAY); // Original unchanged + assert.deepEqual(result, ['b', 'c', 'd', 'e', 'a']); + assert.deepEqual(original, SAMPLE_ARRAY); // Original unchanged }); test('creates new array with first item moved one position forward', () => { const original = [...SAMPLE_ARRAY]; const result = to_reordered_list(original, 0, 1); - expect(result).toEqual(['b', 'a', 'c', 'd', 'e']); - expect(original).toEqual(SAMPLE_ARRAY); // Original unchanged + assert.deepEqual(result, ['b', 'a', 'c', 'd', 'e']); + assert.deepEqual(original, SAMPLE_ARRAY); // Original unchanged }); test('creates new array with last item moved to beginning', () => { const original = [...SAMPLE_ARRAY]; const result = to_reordered_list(original, 4, 0); - expect(result).toEqual(['e', 'a', 'b', 'c', 'd']); - expect(original).toEqual(SAMPLE_ARRAY); // Original unchanged + assert.deepEqual(result, ['e', 'a', 'b', 'c', 'd']); + assert.deepEqual(original, SAMPLE_ARRAY); // Original unchanged }); test('creates new array with last item moved one position backward', () => { const original = [...SAMPLE_ARRAY]; const result = to_reordered_list(original, 4, 3); - expect(result).toEqual(['a', 'b', 'c', 'e', 'd']); - expect(original).toEqual(SAMPLE_ARRAY); // Original unchanged + assert.deepEqual(result, ['a', 'b', 'c', 'e', 'd']); + assert.deepEqual(original, SAMPLE_ARRAY); // Original unchanged }); test('handles single item array correctly', () => { const original = [...SINGLE_ITEM_ARRAY]; const result = to_reordered_list(original, 0, 0); - expect(result).toBe(original); // Same reference + assert.strictEqual(result, original); // Same reference }); test('handles empty array correctly', () => { const original = [...EMPTY_ARRAY]; const result = to_reordered_list(original, 0, 0); - expect(result).toBe(original); // Same reference + assert.strictEqual(result, original); // Same reference }); // Error cases - testing that original array is returned with invalid indices test('handles negative from_index by returning original array', () => { const original = [...SAMPLE_ARRAY]; const result = to_reordered_list(original, -1, 2); - expect(result).toBe(original); + assert.strictEqual(result, original); }); test('handles out of bounds from_index by returning original array', () => { const original = [...SAMPLE_ARRAY]; const result = to_reordered_list(original, 10, 2); - expect(result).toBe(original); + assert.strictEqual(result, original); }); test('handles negative to_index by returning original array', () => { const original = [...SAMPLE_ARRAY]; const result = to_reordered_list(original, 2, -1); - expect(result).toBe(original); + assert.strictEqual(result, original); }); test('handles out of bounds to_index by returning original array', () => { const original = [...SAMPLE_ARRAY]; const result = to_reordered_list(original, 2, 10); - expect(result).toBe(original); + assert.strictEqual(result, original); }); test('creates new array with item moved to exact length boundary', () => { const original = [...SAMPLE_ARRAY]; const result = to_reordered_list(original, 2, original.length); - expect(result).toEqual(['a', 'b', 'd', 'e', 'c']); - expect(original).toEqual(SAMPLE_ARRAY); // Original unchanged + assert.deepEqual(result, ['a', 'b', 'd', 'e', 'c']); + assert.deepEqual(original, SAMPLE_ARRAY); // Original unchanged }); test('creates new array with adjacent indices correctly when moving forward', () => { const original = [...SAMPLE_ARRAY]; const result = to_reordered_list(original, 1, 2); - expect(result).toEqual(['a', 'c', 'b', 'd', 'e']); - expect(original).toEqual(SAMPLE_ARRAY); // Original unchanged + assert.deepEqual(result, ['a', 'c', 'b', 'd', 'e']); + assert.deepEqual(original, SAMPLE_ARRAY); // Original unchanged }); test('creates new array with adjacent indices correctly when moving backward', () => { const original = [...SAMPLE_ARRAY]; const result = to_reordered_list(original, 2, 1); - expect(result).toEqual(['a', 'c', 'b', 'd', 'e']); - expect(original).toEqual(SAMPLE_ARRAY); // Original unchanged + assert.deepEqual(result, ['a', 'c', 'b', 'd', 'e']); + assert.deepEqual(original, SAMPLE_ARRAY); // Original unchanged }); }); diff --git a/src/test/ollama.svelte.test.ts b/src/test/ollama.svelte.test.ts index 5d65c9622..1d9c768ce 100644 --- a/src/test/ollama.svelte.test.ts +++ b/src/test/ollama.svelte.test.ts @@ -1,14 +1,12 @@ -// @slop claude_sonnet_4 - // @vitest-environment jsdom -import {test, expect, describe} from 'vitest'; +import {test, describe, assert} from 'vitest'; import {Ollama} from '$lib/ollama.svelte.js'; import {Frontend} from '$lib/frontend.svelte.js'; import config from '$lib/config.js'; import {OLLAMA_URL} from '$lib/ollama_helpers.js'; -import {create_action_event} from '$lib/action_event.js'; +import {create_action_event} from '@fuzdev/fuz_app/actions/action_event.js'; describe('Ollama', () => { const create_test_app = () => { @@ -23,18 +21,18 @@ describe('Ollama', () => { const app = create_test_app(); const ollama = new Ollama({app}); - expect(ollama.host).toBe(OLLAMA_URL); - expect(ollama.list_status).toBe('initial'); - expect(ollama.available).toBe(false); - expect(ollama.models.length).toBeTypeOf('number'); + assert.strictEqual(ollama.host, OLLAMA_URL); + assert.strictEqual(ollama.list_status, 'initial'); + assert.ok(!ollama.available); + assert.typeOf(ollama.models.length, 'number'); }); test('should track pending and completed actions', () => { const app = create_test_app(); const ollama = new Ollama({app}); - expect(ollama.pending_actions).toHaveLength(0); - expect(ollama.completed_actions).toHaveLength(0); + assert.strictEqual(ollama.pending_actions.length, 0); + assert.strictEqual(ollama.completed_actions.length, 0); // Add a pending action app.actions.add_from_json({ @@ -55,8 +53,8 @@ describe('Ollama', () => { }, }); - expect(ollama.pending_actions).toHaveLength(1); - expect(ollama.completed_actions).toHaveLength(0); + assert.strictEqual(ollama.pending_actions.length, 1); + assert.strictEqual(ollama.completed_actions.length, 0); // Add a completed action app.actions.add_from_json({ @@ -77,8 +75,8 @@ describe('Ollama', () => { }, }); - expect(ollama.pending_actions).toHaveLength(1); - expect(ollama.completed_actions).toHaveLength(1); + assert.strictEqual(ollama.pending_actions.length, 1); + assert.strictEqual(ollama.completed_actions.length, 1); }); test('should derive models from app.models', () => { @@ -92,11 +90,11 @@ describe('Ollama', () => { const ollama = new Ollama({app}); - expect(ollama.models).toHaveLength(2); - expect(ollama.models.length).toBe(2); - expect(ollama.model_names).toContain('llama3.2:1b'); - expect(ollama.model_names).toContain('gemma3:1b'); - expect(ollama.model_names).not.toContain('gpt-4'); + assert.strictEqual(ollama.models.length, 2); + assert.strictEqual(ollama.models.length, 2); + assert.include(ollama.model_names, 'llama3.2:1b'); + assert.include(ollama.model_names, 'gemma3:1b'); + assert.notInclude(ollama.model_names, 'gpt-4'); }); test('should update derived state correctly', () => { @@ -110,8 +108,8 @@ describe('Ollama', () => { const ollama = new Ollama({app}); ollama.list_status = 'success'; - expect(ollama.available).toBe(true); - expect(ollama.models.length).toBe(2); + assert.ok(ollama.available); + assert.strictEqual(ollama.models.length, 2); }); test('should clear model details', () => { @@ -129,15 +127,15 @@ describe('Ollama', () => { const ollama = new Ollama({app}); const model = app.models.find_by_name('test_model'); - expect(model).toBeDefined(); - expect(model!.ollama_show_response_loaded).toBe(true); - expect(model!.ollama_show_response).toEqual({license: 'MIT'}); + assert.isDefined(model); + assert.ok(model.ollama_show_response_loaded); + assert.deepEqual(model.ollama_show_response, {license: 'MIT'}); - ollama.clear_model_details(model!); + ollama.clear_model_details(model); - expect(model!.ollama_show_response).toBeUndefined(); - expect(model!.ollama_show_response_loaded).toBe(false); - expect(model!.ollama_show_response_error).toBeUndefined(); + assert.ok(model.ollama_show_response === undefined); + assert.ok(!model.ollama_show_response_loaded); + assert.ok(model.ollama_show_response_error === undefined); }); test('should handle model_by_name map', () => { @@ -151,22 +149,22 @@ describe('Ollama', () => { const ollama = new Ollama({app}); - expect(ollama.model_by_name.size).toBe(2); - expect(ollama.model_by_name.get('test1')?.name).toBe('test1'); - expect(ollama.model_by_name.get('test2')?.name).toBe('test2'); - expect(ollama.model_by_name.has('other')).toBe(false); + assert.strictEqual(ollama.model_by_name.size, 2); + assert.strictEqual(ollama.model_by_name.get('test1')?.name, 'test1'); + assert.strictEqual(ollama.model_by_name.get('test2')?.name, 'test2'); + assert.ok(!ollama.model_by_name.has('other')); }); test('should initialize ps state correctly', () => { const app = create_test_app(); const ollama = new Ollama({app}); - expect(ollama.ps_response).toBeNull(); - expect(ollama.ps_status).toBe('initial'); - expect(ollama.ps_error).toBeNull(); - expect(ollama.ps_polling_enabled).toBe(false); - expect(ollama.running_models).toEqual([]); - expect(ollama.running_model_names.size).toBe(0); + assert.isNull(ollama.ps_response); + assert.strictEqual(ollama.ps_status, 'initial'); + assert.isNull(ollama.ps_error); + assert.ok(!ollama.ps_polling_enabled); + assert.deepEqual(ollama.running_models, []); + assert.strictEqual(ollama.running_model_names.size, 0); }); test('should derive running models from ps response', () => { @@ -197,15 +195,15 @@ describe('Ollama', () => { ], }; - expect(ollama.running_models).toHaveLength(2); - expect(ollama.running_models[0]!.name).toBe('llama3.2:1b'); - expect(ollama.running_models[0]!.size_vram).toBe(1024 * 1024 * 1024); - expect(ollama.running_models[1]!.name).toBe('gemma:2b'); - expect(ollama.running_models[1]!.size_vram).toBe(2 * 1024 * 1024 * 1024); + assert.strictEqual(ollama.running_models.length, 2); + assert.strictEqual(ollama.running_models[0]!.name, 'llama3.2:1b'); + assert.strictEqual(ollama.running_models[0]!.size_vram, 1024 * 1024 * 1024); + assert.strictEqual(ollama.running_models[1]!.name, 'gemma:2b'); + assert.strictEqual(ollama.running_models[1]!.size_vram, 2 * 1024 * 1024 * 1024); - expect(ollama.running_model_names.has('llama3.2:1b')).toBe(true); - expect(ollama.running_model_names.has('gemma:2b')).toBe(true); - expect(ollama.running_model_names.has('other')).toBe(false); + assert.ok(ollama.running_model_names.has('llama3.2:1b')); + assert.ok(ollama.running_model_names.has('gemma:2b')); + assert.ok(!ollama.running_model_names.has('other')); }); test('should handle ps polling state', () => { @@ -214,19 +212,19 @@ describe('Ollama', () => { // Start polling ollama.start_ps_polling(); - expect(ollama.ps_polling_enabled).toBe(true); + assert.ok(ollama.ps_polling_enabled); // Starting again should be safe ollama.start_ps_polling(); - expect(ollama.ps_polling_enabled).toBe(true); + assert.ok(ollama.ps_polling_enabled); // Stop polling ollama.stop_ps_polling(); - expect(ollama.ps_polling_enabled).toBe(false); + assert.ok(!ollama.ps_polling_enabled); // Stopping again should be safe ollama.stop_ps_polling(); - expect(ollama.ps_polling_enabled).toBe(false); + assert.ok(!ollama.ps_polling_enabled); }); test('should filter ollama actions correctly', () => { @@ -288,10 +286,19 @@ describe('Ollama', () => { }, }); - expect(ollama.actions).toHaveLength(2); - expect(ollama.actions.map((a) => a.method)).toContain('ollama_pull'); - expect(ollama.actions.map((a) => a.method)).toContain('ollama_list'); - expect(ollama.actions.map((a) => a.method)).not.toContain('completion_create'); + assert.strictEqual(ollama.actions.length, 2); + assert.include( + ollama.actions.map((a) => a.method), + 'ollama_pull', + ); + assert.include( + ollama.actions.map((a) => a.method), + 'ollama_list', + ); + assert.notInclude( + ollama.actions.map((a) => a.method), + 'completion_create', + ); }); test('should filter read operations when show_read_actions is false', () => { @@ -336,13 +343,13 @@ describe('Ollama', () => { }); // With show_read_actions = false (default) - expect(ollama.show_read_actions).toBe(false); - expect(ollama.filtered_actions).toHaveLength(1); - expect(ollama.filtered_actions[0]!.method).toBe('ollama_pull'); + assert.ok(!ollama.show_read_actions); + assert.strictEqual(ollama.filtered_actions.length, 1); + assert.strictEqual(ollama.filtered_actions[0]!.method, 'ollama_pull'); // With show_read_actions = true ollama.show_read_actions = true; - expect(ollama.filtered_actions).toHaveLength(2); + assert.strictEqual(ollama.filtered_actions.length, 2); }); test('should handle action progress tracking', () => { @@ -368,8 +375,8 @@ describe('Ollama', () => { }, }); - expect(ollama.pending_actions).toHaveLength(1); - expect(ollama.pending_actions[0]!.action_event_data?.progress).toEqual({ + assert.strictEqual(ollama.pending_actions.length, 1); + assert.deepEqual(ollama.pending_actions[0]!.action_event_data?.progress, { status: 'downloading', completed: 50, total: 100, @@ -394,12 +401,12 @@ describe('Ollama', () => { // Update progress through the action event action_event.update_progress({status: 'downloading', completed: 75, total: 100}); - expect(action.action_event_data?.progress).toEqual({ + assert.deepEqual(action.action_event_data?.progress, { status: 'downloading', completed: 75, total: 100, }); - expect(ollama.pending_actions[0]!.action_event_data?.progress).toEqual({ + assert.deepEqual(ollama.pending_actions[0]!.action_event_data?.progress, { status: 'downloading', completed: 75, total: 100, @@ -410,10 +417,10 @@ describe('Ollama', () => { const app = create_test_app(); const ollama = new Ollama({app}); - expect(ollama.actions).toHaveLength(0); - expect(ollama.pending_actions).toHaveLength(0); - expect(ollama.completed_actions).toHaveLength(0); - expect(ollama.filtered_actions).toHaveLength(0); + assert.strictEqual(ollama.actions.length, 0); + assert.strictEqual(ollama.pending_actions.length, 0); + assert.strictEqual(ollama.completed_actions.length, 0); + assert.strictEqual(ollama.filtered_actions.length, 0); }); test('should handle failed actions', () => { @@ -438,9 +445,9 @@ describe('Ollama', () => { }, }); - expect(ollama.pending_actions).toHaveLength(0); - expect(ollama.completed_actions).toHaveLength(1); - expect(ollama.completed_actions[0]!.action_event_data?.step).toBe('failed'); + assert.strictEqual(ollama.pending_actions.length, 0); + assert.strictEqual(ollama.completed_actions.length, 1); + assert.strictEqual(ollama.completed_actions[0]!.action_event_data?.step, 'failed'); }); test('should only include ollama provider models', () => { @@ -453,9 +460,9 @@ describe('Ollama', () => { const ollama = new Ollama({app}); - expect(ollama.models).toHaveLength(1); - expect(ollama.models[0]!.name).toBe('ollama_model'); - expect(ollama.models[0]!.provider_name).toBe('ollama'); + assert.strictEqual(ollama.models.length, 1); + assert.strictEqual(ollama.models[0]!.name, 'ollama_model'); + assert.strictEqual(ollama.models[0]!.provider_name, 'ollama'); }); test('should handle ps response with empty models array', () => { @@ -464,7 +471,7 @@ describe('Ollama', () => { ollama.ps_response = {models: []}; - expect(ollama.running_models).toHaveLength(0); - expect(ollama.running_model_names.size).toBe(0); + assert.strictEqual(ollama.running_models.length, 0); + assert.strictEqual(ollama.running_model_names.size, 0); }); }); diff --git a/src/test/part.svelte.base.test.ts b/src/test/part.svelte.base.test.ts index d00e94722..5f90bf3c3 100644 --- a/src/test/part.svelte.base.test.ts +++ b/src/test/part.svelte.base.test.ts @@ -1,16 +1,15 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, describe, beforeEach} from 'vitest'; +import {test, assert, describe, beforeEach} from 'vitest'; import {Part, TextPart, DiskfilePart} from '$lib/part.svelte.js'; import {create_uuid, get_datetime_now} from '$lib/zod_helpers.js'; import {DiskfileDirectoryPath, DiskfilePath} from '$lib/diskfile_types.js'; import {Frontend} from '$lib/frontend.svelte.js'; -import {monkeypatch_zzz_for_tests} from './test_helpers.ts'; import {estimate_token_count} from '$lib/helpers.js'; +import {monkeypatch_zzz_for_tests} from './test_helpers.js'; + // Test suite variables let app: Frontend; @@ -43,34 +42,34 @@ describe('Part base class functionality', () => { for (const part of [text_part, diskfile_part]) { part.add_attribute({key: 'test-attr', value: 'test-value'}); - expect(part.attributes).toHaveLength(1); + assert.strictEqual(part.attributes.length, 1); let first_attr = part.attributes[0]; if (!first_attr) throw new Error('Expected first attribute'); - expect(first_attr.key).toBe('test-attr'); - expect(first_attr.value).toBe('test-value'); + assert.strictEqual(first_attr.key, 'test-attr'); + assert.strictEqual(first_attr.value, 'test-value'); const attr_id = first_attr.id; const updated = part.update_attribute(attr_id, {value: 'updated-value'}); - expect(updated).toBe(true); + assert.ok(updated); first_attr = part.attributes[0]; if (!first_attr) throw new Error('Expected attribute after update'); - expect(first_attr.key).toBe('test-attr'); - expect(first_attr.value).toBe('updated-value'); + assert.strictEqual(first_attr.key, 'test-attr'); + assert.strictEqual(first_attr.value, 'updated-value'); part.update_attribute(attr_id, {key: 'updated-key', value: 'updated-value-2'}); first_attr = part.attributes[0]; if (!first_attr) throw new Error('Expected attribute after second update'); - expect(first_attr.key).toBe('updated-key'); - expect(first_attr.value).toBe('updated-value-2'); + assert.strictEqual(first_attr.key, 'updated-key'); + assert.strictEqual(first_attr.value, 'updated-value-2'); part.remove_attribute(attr_id); - expect(part.attributes).toHaveLength(0); + assert.strictEqual(part.attributes.length, 0); const non_existent_update = part.update_attribute(create_uuid(), { value: 'test', }); - expect(non_existent_update).toBe(false); + assert.ok(!non_existent_update); } }); @@ -82,14 +81,14 @@ describe('Part base class functionality', () => { }); // Test initial derivations - expect(text_part.length).toBe(TEST_CONTENT.BASIC.length); - expect(text_part.token_count).toBe(estimate_token_count(TEST_CONTENT.BASIC)); + assert.strictEqual(text_part.length, TEST_CONTENT.BASIC.length); + assert.strictEqual(text_part.token_count, estimate_token_count(TEST_CONTENT.BASIC)); // Test derivations after content change text_part.content = TEST_CONTENT.SECONDARY; - expect(text_part.length).toBe(TEST_CONTENT.SECONDARY.length); - expect(text_part.token_count).toBe(estimate_token_count(TEST_CONTENT.SECONDARY)); + assert.strictEqual(text_part.length, TEST_CONTENT.SECONDARY.length); + assert.strictEqual(text_part.token_count, estimate_token_count(TEST_CONTENT.SECONDARY)); }); }); @@ -107,15 +106,15 @@ describe('Part factory method', () => { name: 'Diskfile Part', }); - expect(text_part).toBeInstanceOf(TextPart); - expect(text_part.type).toBe('text'); - expect(text_part.name).toBe('Text Part'); - expect(text_part.content).toBe(TEST_CONTENT.BASIC); + assert.instanceOf(text_part, TextPart); + assert.strictEqual(text_part.type, 'text'); + assert.strictEqual(text_part.name, 'Text Part'); + assert.strictEqual(text_part.content, TEST_CONTENT.BASIC); - expect(diskfile_part).toBeInstanceOf(DiskfilePart); - expect(diskfile_part.type).toBe('diskfile'); - expect(diskfile_part.name).toBe('Diskfile Part'); - expect(diskfile_part.path).toBe(TEST_PATH); + assert.instanceOf(diskfile_part, DiskfilePart); + assert.strictEqual(diskfile_part.type, 'diskfile'); + assert.strictEqual(diskfile_part.name, 'Diskfile Part'); + assert.strictEqual(diskfile_part.path, TEST_PATH); }); test('Part.create throws error for unknown part type', () => { @@ -123,7 +122,7 @@ describe('Part factory method', () => { type: 'unknown' as const, }; - expect(() => Part.create(app, invalid_json as any)).toThrow('Unreachable case: unknown'); + assert.throws(() => Part.create(app, invalid_json as any), /Unreachable case: unknown/); }); test('Part.create throws error for missing type field', () => { @@ -131,8 +130,9 @@ describe('Part factory method', () => { name: 'Test', }; - expect(() => Part.create(app, invalid_json as any)).toThrow( - 'Missing required "type" field in part JSON', + assert.throws( + () => Part.create(app, invalid_json as any), + /Missing required "type" field in part JSON/, ); }); }); @@ -145,16 +145,16 @@ describe('TextPart specific behavior', () => { content: TEST_CONTENT.BASIC, }); - expect(part.type).toBe('text'); - expect(part.content).toBe(TEST_CONTENT.BASIC); + assert.strictEqual(part.type, 'text'); + assert.strictEqual(part.content, TEST_CONTENT.BASIC); // Test update method part.content = TEST_CONTENT.SECONDARY; - expect(part.content).toBe(TEST_CONTENT.SECONDARY); + assert.strictEqual(part.content, TEST_CONTENT.SECONDARY); // Test direct property assignment part.content = TEST_CONTENT.EMPTY; - expect(part.content).toBe(TEST_CONTENT.EMPTY); + assert.strictEqual(part.content, TEST_CONTENT.EMPTY); }); test('TextPart serialization and deserialization', () => { @@ -187,22 +187,22 @@ describe('TextPart specific behavior', () => { const restored = app.cell_registry.instantiate('TextPart', json); // Verify all properties were preserved - expect(restored.id).toBe(test_id); - expect(restored.created).toBe(test_date); - expect(restored.content).toBe(TEST_CONTENT.BASIC); - expect(restored.name).toBe('Test part'); - expect(restored.has_xml_tag).toBe(true); - expect(restored.xml_tag_name).toBe('test'); - expect(restored.start).toBe(5); - expect(restored.end).toBe(15); - expect(restored.enabled).toBe(false); - expect(restored.title).toBe('Test Title'); - expect(restored.summary).toBe('Test Summary'); - expect(restored.attributes).toHaveLength(1); + assert.strictEqual(restored.id, test_id); + assert.strictEqual(restored.created, test_date); + assert.strictEqual(restored.content, TEST_CONTENT.BASIC); + assert.strictEqual(restored.name, 'Test part'); + assert.ok(restored.has_xml_tag); + assert.strictEqual(restored.xml_tag_name, 'test'); + assert.strictEqual(restored.start, 5); + assert.strictEqual(restored.end, 15); + assert.ok(!restored.enabled); + assert.strictEqual(restored.title, 'Test Title'); + assert.strictEqual(restored.summary, 'Test Summary'); + assert.strictEqual(restored.attributes.length, 1); const restored_attr = restored.attributes[0]; if (!restored_attr) throw new Error('Expected restored attribute'); - expect(restored_attr.key).toBe('class'); - expect(restored_attr.value).toBe('highlight'); + assert.strictEqual(restored_attr.key, 'class'); + assert.strictEqual(restored_attr.value, 'highlight'); }); test('TextPart cloning creates independent copy', () => { @@ -217,21 +217,21 @@ describe('TextPart specific behavior', () => { const clone = original.clone(); // Verify initial state is the same except id - expect(clone.id).not.toBe(original.id); - expect(clone.content).toBe(original.content); - expect(clone.name).toBe(original.name); + assert.ok(clone.id !== original.id); + assert.strictEqual(clone.content, original.content); + assert.strictEqual(clone.name, original.name); // Modify clone clone.content = TEST_CONTENT.SECONDARY; clone.name = 'Modified'; // Verify original remains unchanged - expect(original.content).toBe(TEST_CONTENT.BASIC); - expect(original.name).toBe('Original'); + assert.strictEqual(original.content, TEST_CONTENT.BASIC); + assert.strictEqual(original.name, 'Original'); // Verify clone has new values - expect(clone.content).toBe(TEST_CONTENT.SECONDARY); - expect(clone.name).toBe('Modified'); + assert.strictEqual(clone.content, TEST_CONTENT.SECONDARY); + assert.strictEqual(clone.name, 'Modified'); }); }); @@ -251,17 +251,17 @@ describe('DiskfilePart specific behavior', () => { }); // Test basic properties - expect(part.type).toBe('diskfile'); - expect(part.path).toBe(TEST_PATH); - expect(part.diskfile).toEqual(diskfile); - expect(part.content).toBe(TEST_CONTENT.BASIC); + assert.strictEqual(part.type, 'diskfile'); + assert.strictEqual(part.path, TEST_PATH); + assert.deepEqual(part.diskfile, diskfile); + assert.strictEqual(part.content, TEST_CONTENT.BASIC); // Update content through part part.content = TEST_CONTENT.SECONDARY; // Verify both part and diskfile were updated - expect(part.content).toBe(TEST_CONTENT.SECONDARY); - expect(part.diskfile?.content).toBe(TEST_CONTENT.SECONDARY); + assert.strictEqual(part.content, TEST_CONTENT.SECONDARY); + assert.strictEqual(part.diskfile?.content, TEST_CONTENT.SECONDARY); }); test('DiskfilePart handles null path properly', () => { @@ -270,9 +270,9 @@ describe('DiskfilePart specific behavior', () => { path: null, }); - expect(part.path).toBeNull(); - expect(part.diskfile).toBeNull(); - expect(part.content).toBeUndefined(); + assert.isNull(part.path); + assert.isNull(part.diskfile); + assert.isUndefined(part.content); }); test('DiskfilePart handles changing path', () => { @@ -298,14 +298,14 @@ describe('DiskfilePart specific behavior', () => { path: path1, }); - expect(part.path).toBe(path1); - expect(part.content).toBe('File 1 content'); + assert.strictEqual(part.path, path1); + assert.strictEqual(part.content, 'File 1 content'); // Change path to reference second file part.path = path2; - expect(part.path).toBe(path2); - expect(part.content).toBe('File 2 content'); + assert.strictEqual(part.path, path2); + assert.strictEqual(part.content, 'File 2 content'); }); }); @@ -325,11 +325,11 @@ describe('Common part behavior across types', () => { end: 20, }); - expect(text_part.start).toBe(5); - expect(text_part.end).toBe(10); + assert.strictEqual(text_part.start, 5); + assert.strictEqual(text_part.end, 10); - expect(diskfile_part.start).toBe(15); - expect(diskfile_part.end).toBe(20); + assert.strictEqual(diskfile_part.start, 15); + assert.strictEqual(diskfile_part.end, 20); text_part.start = 6; text_part.end = 11; @@ -337,11 +337,11 @@ describe('Common part behavior across types', () => { diskfile_part.start = 16; diskfile_part.end = 21; - expect(text_part.start).toBe(6); - expect(text_part.end).toBe(11); + assert.strictEqual(text_part.start, 6); + assert.strictEqual(text_part.end, 11); - expect(diskfile_part.start).toBe(16); - expect(diskfile_part.end).toBe(21); + assert.strictEqual(diskfile_part.start, 16); + assert.strictEqual(diskfile_part.end, 21); }); test('XML tag properties work across part types', () => { @@ -357,30 +357,30 @@ describe('Common part behavior across types', () => { xml_tag_name: 'file-tag', }); - expect(text_part.has_xml_tag).toBe(true); - expect(text_part.xml_tag_name).toBe('text-tag'); + assert.ok(text_part.has_xml_tag); + assert.strictEqual(text_part.xml_tag_name, 'text-tag'); - expect(diskfile_part.has_xml_tag).toBe(true); - expect(diskfile_part.xml_tag_name).toBe('file-tag'); + assert.ok(diskfile_part.has_xml_tag); + assert.strictEqual(diskfile_part.xml_tag_name, 'file-tag'); text_part.has_xml_tag = false; text_part.xml_tag_name = ''; diskfile_part.xml_tag_name = 'updated-file-tag'; - expect(text_part.has_xml_tag).toBe(false); - expect(text_part.xml_tag_name).toBe(''); + assert.ok(!text_part.has_xml_tag); + assert.strictEqual(text_part.xml_tag_name, ''); - expect(diskfile_part.has_xml_tag).toBe(true); - expect(diskfile_part.xml_tag_name).toBe('updated-file-tag'); + assert.ok(diskfile_part.has_xml_tag); + assert.strictEqual(diskfile_part.xml_tag_name, 'updated-file-tag'); }); test('has_xml_tag defaults correctly for each part type', () => { const text_part = app.cell_registry.instantiate('TextPart'); const diskfile_part = app.cell_registry.instantiate('DiskfilePart'); - expect(text_part.has_xml_tag).toBe(false); - expect(diskfile_part.has_xml_tag).toBe(true); + assert.ok(!text_part.has_xml_tag); + assert.ok(diskfile_part.has_xml_tag); const custom_text_part = app.cell_registry.instantiate('TextPart', { has_xml_tag: true, @@ -389,7 +389,7 @@ describe('Common part behavior across types', () => { has_xml_tag: false, }); - expect(custom_text_part.has_xml_tag).toBe(true); - expect(custom_diskfile_part.has_xml_tag).toBe(false); + assert.ok(custom_text_part.has_xml_tag); + assert.ok(!custom_diskfile_part.has_xml_tag); }); }); diff --git a/src/test/part.svelte.diskfile.test.ts b/src/test/part.svelte.diskfile.test.ts index 19e9534d5..3957b17fc 100644 --- a/src/test/part.svelte.diskfile.test.ts +++ b/src/test/part.svelte.diskfile.test.ts @@ -1,14 +1,13 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, describe, beforeEach} from 'vitest'; +import {test, describe, beforeEach, assert} from 'vitest'; import {create_uuid, get_datetime_now} from '$lib/zod_helpers.js'; import {Frontend} from '$lib/frontend.svelte.js'; import {DiskfilePath, SerializableDisknode} from '$lib/diskfile_types.js'; import type {Diskfile} from '$lib/diskfile.svelte.js'; -import {monkeypatch_zzz_for_tests} from './test_helpers.ts'; + +import {monkeypatch_zzz_for_tests} from './test_helpers.js'; const TEST_DIR = SerializableDisknode.shape.source_dir.parse('/test/'); @@ -87,15 +86,15 @@ describe('DiskfilePart initialization', () => { path, }); - expect(part.type).toBe('diskfile'); - expect(part.path).toBe(path); - expect(part.name).toBe(''); - expect(part.enabled).toBe(true); - expect(part.has_xml_tag).toBe(true); - expect(part.xml_tag_name).toBe(''); - expect(part.attributes).toEqual([]); - expect(part.start).toBeNull(); - expect(part.end).toBeNull(); + assert.strictEqual(part.type, 'diskfile'); + assert.strictEqual(part.path, path); + assert.strictEqual(part.name, ''); + assert.ok(part.enabled); + assert.ok(part.has_xml_tag); + assert.strictEqual(part.xml_tag_name, ''); + assert.deepEqual(part.attributes, []); + assert.isNull(part.start); + assert.isNull(part.end); }); test('initializes from json with complete properties', () => { @@ -119,22 +118,22 @@ describe('DiskfilePart initialization', () => { attributes: [{id: create_uuid(), key: 'format', value: 'json'}], }); - expect(part.id).toBe(test_id); - expect(part.created).toBe(test_date); - expect(part.path).toBe(test_path); - expect(part.name).toBe('Config file'); - expect(part.has_xml_tag).toBe(true); - expect(part.xml_tag_name).toBe('config'); - expect(part.title).toBe('Configuration'); - expect(part.summary).toBe('System configuration file'); - expect(part.start).toBe(5); - expect(part.end).toBe(20); - expect(part.enabled).toBe(false); - expect(part.attributes).toHaveLength(1); + assert.strictEqual(part.id, test_id); + assert.strictEqual(part.created, test_date); + assert.strictEqual(part.path, test_path); + assert.strictEqual(part.name, 'Config file'); + assert.ok(part.has_xml_tag); + assert.strictEqual(part.xml_tag_name, 'config'); + assert.strictEqual(part.title, 'Configuration'); + assert.strictEqual(part.summary, 'System configuration file'); + assert.strictEqual(part.start, 5); + assert.strictEqual(part.end, 20); + assert.ok(!part.enabled); + assert.strictEqual(part.attributes.length, 1); const first_attr = part.attributes[0]; if (!first_attr) throw new Error('Expected first attribute'); - expect(first_attr.key).toBe('format'); - expect(first_attr.value).toBe('json'); + assert.strictEqual(first_attr.key, 'format'); + assert.strictEqual(first_attr.value, 'json'); }); test('initializes with null path', () => { @@ -143,9 +142,9 @@ describe('DiskfilePart initialization', () => { path: null, }); - expect(part.path).toBeNull(); - expect(part.diskfile).toBeNull(); - expect(part.content).toBeUndefined(); + assert.isNull(part.path); + assert.isNull(part.diskfile); + assert.ok(part.content === undefined); }); }); @@ -159,8 +158,8 @@ describe('DiskfilePart content access', () => { path, }); - expect(part.content).toBe(content); - expect(part.diskfile).toEqual(test_diskfiles.get(path)); + assert.strictEqual(part.content, content); + assert.deepEqual(part.diskfile, test_diskfiles.get(path)); }); test('content setter updates diskfile content', () => { @@ -174,15 +173,15 @@ describe('DiskfilePart content access', () => { }); // Verify initial state - expect(part.content).toBe(initial_content); + assert.strictEqual(part.content, initial_content); // Update content part.content = updated_content; // Verify diskfile was updated - get it fresh from zzz const diskfile = app.diskfiles.get_by_path(path); - expect(diskfile?.content).toBe(updated_content); - expect(part.content).toBe(updated_content); + assert.strictEqual(diskfile?.content, updated_content); + assert.strictEqual(part.content, updated_content); }); test('assigning part content updates diskfile content', () => { @@ -196,15 +195,15 @@ describe('DiskfilePart content access', () => { }); // Verify initial state - expect(part.content).toBe(initial_content); + assert.strictEqual(part.content, initial_content); // Update content using assignment part.content = updated_content; // Verify diskfile was updated - get it fresh from zzz const diskfile = app.diskfiles.get_by_path(path); - expect(diskfile?.content).toBe(updated_content); - expect(part.content).toBe(updated_content); + assert.strictEqual(diskfile?.content, updated_content); + assert.strictEqual(part.content, updated_content); }); test('content is undefined when diskfile not found', () => { @@ -215,8 +214,8 @@ describe('DiskfilePart content access', () => { path, }); - expect(part.diskfile).toBeUndefined(); - expect(part.content).toBeUndefined(); + assert.ok(part.diskfile === undefined); + assert.ok(part.content === undefined); }); test('setting content to null logs error in development', () => { @@ -242,11 +241,11 @@ describe('DiskfilePart content access', () => { console.error = original_console_error; // Verify error was logged - expect(error_called).toBe(true); + assert.ok(error_called); // Verify diskfile content was not changed const diskfile = test_diskfiles.get(path); - expect(diskfile?.content).toBe(TEST_CONTENT.BASIC); + assert.strictEqual(diskfile?.content, TEST_CONTENT.BASIC); }); }); @@ -262,15 +261,15 @@ describe('DiskfilePart reactive properties', () => { }); // Verify initial state - expect(part.content).toBe(initial_content); - expect(part.length).toBe(initial_content.length); + assert.strictEqual(part.content, initial_content); + assert.strictEqual(part.length, initial_content.length); // Update diskfile content directly part.diskfile!.content = updated_content; // Verify derived properties update - expect(part.content).toBe(updated_content); - expect(part.length).toBe(updated_content.length); + assert.strictEqual(part.content, updated_content); + assert.strictEqual(part.length, updated_content.length); }); test('derived properties update when path changes', () => { @@ -283,14 +282,14 @@ describe('DiskfilePart reactive properties', () => { }); // Verify initial state - expect(part.content).toBe(TEST_CONTENT.BASIC); + assert.strictEqual(part.content, TEST_CONTENT.BASIC); // Change path part.path = path2; // Verify derived properties update - expect(part.content).toBe(TEST_CONTENT.CONFIG); - expect(part.diskfile).toEqual(test_diskfiles.get(path2)); + assert.strictEqual(part.content, TEST_CONTENT.CONFIG); + assert.deepEqual(part.diskfile, test_diskfiles.get(path2)); }); }); @@ -312,15 +311,15 @@ describe('DiskfilePart serialization', () => { const json = part.to_json(); - expect(json.id).toBe(test_id); - expect(json.type).toBe('diskfile'); - expect(json.created).toBe(created); - expect(json.path).toBe(path); - expect(json.name).toBe('Test file'); - expect(json.start).toBe(10); - expect(json.end).toBe(20); - expect(json.has_xml_tag).toBe(true); - expect(json.enabled).toBe(true); + assert.strictEqual(json.id, test_id); + assert.strictEqual(json.type, 'diskfile'); + assert.strictEqual(json.created, created); + assert.strictEqual(json.path, path); + assert.strictEqual(json.name, 'Test file'); + assert.strictEqual(json.start, 10); + assert.strictEqual(json.end, 20); + assert.ok(json.has_xml_tag); + assert.ok(json.enabled); }); test('clone creates independent copy with same path', () => { @@ -336,18 +335,18 @@ describe('DiskfilePart serialization', () => { const clone = original.clone(); // Verify they have same initial values except id - expect(clone.id).not.toBe(original.id); - expect(clone.path).toBe(original_path); - expect(clone.name).toBe('Original name'); + assert.notStrictEqual(clone.id, original.id); + assert.strictEqual(clone.path, original_path); + assert.strictEqual(clone.name, 'Original name'); // Verify they're independent objects clone.path = modified_path; clone.name = 'Modified name'; - expect(original.path).toBe(original_path); - expect(original.name).toBe('Original name'); - expect(clone.path).toBe(modified_path); - expect(clone.name).toBe('Modified name'); + assert.strictEqual(original.path, original_path); + assert.strictEqual(original.name, 'Original name'); + assert.strictEqual(clone.path, modified_path); + assert.strictEqual(clone.name, 'Modified name'); }); }); @@ -360,9 +359,9 @@ describe('DiskfilePart edge cases', () => { path, }); - expect(part.path).toBe(path); - expect(part.content).toBe(TEST_CONTENT.BASIC); - expect(part.diskfile).toEqual(test_diskfiles.get(path)); + assert.strictEqual(part.path, path); + assert.strictEqual(part.content, TEST_CONTENT.BASIC); + assert.deepEqual(part.diskfile, test_diskfiles.get(path)); }); test('handles empty content', () => { @@ -375,9 +374,9 @@ describe('DiskfilePart edge cases', () => { path, }); - expect(part.content).toBe(''); - expect(part.length).toBe(0); - expect(part.token_count).toBe(0); + assert.strictEqual(part.content, ''); + assert.strictEqual(part.length, 0); + assert.strictEqual(part.token_count, 0); }); test('handles binary file content', () => { @@ -391,8 +390,8 @@ describe('DiskfilePart edge cases', () => { path, }); - expect(part.content).toBe(binary_content); - expect(part.length).toBe(binary_content.length); + assert.strictEqual(part.content, binary_content); + assert.strictEqual(part.length, binary_content.length); }); test('handles changing from null path to valid path', () => { @@ -402,18 +401,18 @@ describe('DiskfilePart edge cases', () => { }); // Verify initial state - expect(part.path).toBeNull(); - expect(part.diskfile).toBeNull(); - expect(part.content).toBeUndefined(); + assert.isNull(part.path); + assert.isNull(part.diskfile); + assert.ok(part.content === undefined); // Set to valid path const path = TEST_PATHS.BASIC; part.path = path; // Verify properties updated - expect(part.path).toBe(path); - expect(part.diskfile?.id).toBe(test_diskfiles.get(path)?.id); - expect(part.content).toBe(TEST_CONTENT.BASIC); + assert.strictEqual(part.path, path); + assert.strictEqual((part as any).diskfile?.id, test_diskfiles.get(path)?.id); + assert.strictEqual(part.content, TEST_CONTENT.BASIC); }); test('handles changing from valid path to null path', () => { @@ -424,16 +423,16 @@ describe('DiskfilePart edge cases', () => { }); // Verify initial state - expect(part.path).toBe(path); - expect(part.diskfile?.id).toBe(test_diskfiles.get(path)?.id); + assert.strictEqual(part.path, path); + assert.strictEqual((part as any).diskfile?.id, test_diskfiles.get(path)?.id); // Set to null path part.path = null; // Verify properties updated - expect(part.path).toBeNull(); - expect(part.diskfile).toBeNull(); - expect(part.content).toBeUndefined(); + assert.isNull(part.path); + assert.isNull(part.diskfile); + assert.ok(part.content === undefined); }); }); @@ -446,29 +445,29 @@ describe('DiskfilePart attribute management', () => { // Add attribute part.add_attribute({key: 'mime-type', value: 'text/plain'}); - expect(part.attributes).toHaveLength(1); + assert.strictEqual(part.attributes.length, 1); let first_attr = part.attributes[0]; if (!first_attr) throw new Error('Expected first attribute'); - expect(first_attr.key).toBe('mime-type'); - expect(first_attr.value).toBe('text/plain'); + assert.strictEqual(first_attr.key, 'mime-type'); + assert.strictEqual(first_attr.value, 'text/plain'); const attr_id = first_attr.id; // Update attribute const updated = part.update_attribute(attr_id, {value: 'application/text'}); - expect(updated).toBe(true); + assert.ok(updated); first_attr = part.attributes[0]; if (!first_attr) throw new Error('Expected attribute after update'); - expect(first_attr.key).toBe('mime-type'); - expect(first_attr.value).toBe('application/text'); + assert.strictEqual(first_attr.key, 'mime-type'); + assert.strictEqual(first_attr.value, 'application/text'); // Remove attribute part.remove_attribute(attr_id); - expect(part.attributes).toHaveLength(0); + assert.strictEqual(part.attributes.length, 0); // Attempting to update non-existent attribute returns false const fake_update = part.update_attribute(create_uuid(), {key: 'test', value: 'test'}); - expect(fake_update).toBe(false); + assert.ok(!fake_update); }); test('updates attribute key and value together', () => { @@ -484,11 +483,11 @@ describe('DiskfilePart attribute management', () => { // Update both key and value const updated = part.update_attribute(attr_id, {key: 'data-type', value: 'important'}); - expect(updated).toBe(true); + assert.ok(updated); const updated_attr = part.attributes[0]; if (!updated_attr) throw new Error('Expected attribute after update'); - expect(updated_attr.key).toBe('data-type'); - expect(updated_attr.value).toBe('important'); + assert.strictEqual(updated_attr.key, 'data-type'); + assert.strictEqual(updated_attr.value, 'important'); }); test('attributes are preserved when serializing to JSON', () => { @@ -502,22 +501,22 @@ describe('DiskfilePart attribute management', () => { const json = part.to_json(); - expect(json.attributes).toHaveLength(2); + assert.strictEqual(json.attributes.length, 2); const json_attr0 = json.attributes[0]; const json_attr1 = json.attributes[1]; if (!json_attr0 || !json_attr1) throw new Error('Expected both attributes in JSON'); - expect(json_attr0.key).toBe('data-test'); - expect(json_attr1.key).toBe('class'); + assert.strictEqual(json_attr0.key, 'data-test'); + assert.strictEqual(json_attr1.key, 'class'); // Verify they're properly restored const new_part = app.cell_registry.instantiate('DiskfilePart', json); - expect(new_part.attributes).toHaveLength(2); + assert.strictEqual(new_part.attributes.length, 2); const new_attr0 = new_part.attributes[0]; const new_attr1 = new_part.attributes[1]; if (!new_attr0 || !new_attr1) throw new Error('Expected both attributes in restored part'); - expect(new_attr0.key).toBe('data-test'); - expect(new_attr1.key).toBe('class'); + assert.strictEqual(new_attr0.key, 'data-test'); + assert.strictEqual(new_attr1.key, 'class'); }); }); @@ -530,8 +529,8 @@ describe('DiskfilePart position markers', () => { end: 25, }); - expect(part.start).toBe(10); - expect(part.end).toBe(25); + assert.strictEqual(part.start, 10); + assert.strictEqual(part.end, 25); }); test('start and end positions can be updated', () => { @@ -541,14 +540,14 @@ describe('DiskfilePart position markers', () => { }); // Initial values are null - expect(part.start).toBeNull(); - expect(part.end).toBeNull(); + assert.isNull(part.start); + assert.isNull(part.end); // Update positions part.start = 5; part.end = 15; - expect(part.start).toBe(5); - expect(part.end).toBe(15); + assert.strictEqual(part.start, 5); + assert.strictEqual(part.end, 15); }); }); diff --git a/src/test/part.svelte.text.test.ts b/src/test/part.svelte.text.test.ts index bcabbcb17..cb43b8088 100644 --- a/src/test/part.svelte.text.test.ts +++ b/src/test/part.svelte.text.test.ts @@ -1,13 +1,12 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, describe, beforeEach} from 'vitest'; +import {test, describe, beforeEach, assert} from 'vitest'; import {estimate_token_count} from '$lib/helpers.js'; import {create_uuid, get_datetime_now} from '$lib/zod_helpers.js'; import {Frontend} from '$lib/frontend.svelte.js'; -import {monkeypatch_zzz_for_tests} from './test_helpers.ts'; + +import {monkeypatch_zzz_for_tests} from './test_helpers.js'; // Test suite variables let app: Frontend; @@ -40,17 +39,17 @@ describe('TextPart initialization', () => { test('creates with default values when no options provided', () => { const part = app.cell_registry.instantiate('TextPart'); - expect(part.type).toBe('text'); - expect(part.content).toBe(TEST_CONTENT.EMPTY); - expect(part.length).toBe(TEST_CONTENT.EMPTY.length); - expect(part.token_count).toBe(0); - expect(part.name).toBe(''); - expect(part.enabled).toBe(true); - expect(part.has_xml_tag).toBe(false); - expect(part.xml_tag_name).toBe(''); - expect(part.attributes).toEqual([]); - expect(part.start).toBeNull(); - expect(part.end).toBeNull(); + assert.strictEqual(part.type, 'text'); + assert.strictEqual(part.content, TEST_CONTENT.EMPTY); + assert.strictEqual(part.length, TEST_CONTENT.EMPTY.length); + assert.strictEqual(part.token_count, 0); + assert.strictEqual(part.name, ''); + assert.ok(part.enabled); + assert.ok(!part.has_xml_tag); + assert.strictEqual(part.xml_tag_name, ''); + assert.deepEqual(part.attributes, []); + assert.isNull(part.start); + assert.isNull(part.end); }); test('initializes with direct content property', () => { @@ -60,9 +59,9 @@ describe('TextPart initialization', () => { content, }); - expect(part.content).toBe(content); - expect(part.length).toBe(content.length); - expect(part.token_count).toBe(estimate_token_count(content)); + assert.strictEqual(part.content, content); + assert.strictEqual(part.length, content.length); + assert.strictEqual(part.token_count, estimate_token_count(content)); }); test('initializes from json with complete properties', () => { @@ -85,22 +84,22 @@ describe('TextPart initialization', () => { attributes: [{id: create_uuid(), key: 'attr1', value: 'value1'}], }); - expect(part.id).toBe(test_id); - expect(part.created).toBe(test_date); - expect(part.content).toBe('Json content'); - expect(part.name).toBe('Test name'); - expect(part.has_xml_tag).toBe(true); - expect(part.xml_tag_name).toBe('test-element'); - expect(part.title).toBe('Test Title'); - expect(part.summary).toBe('Test summary text'); - expect(part.start).toBe(5); - expect(part.end).toBe(20); - expect(part.enabled).toBe(false); - expect(part.attributes).toHaveLength(1); + assert.strictEqual(part.id, test_id); + assert.strictEqual(part.created, test_date); + assert.strictEqual(part.content, 'Json content'); + assert.strictEqual(part.name, 'Test name'); + assert.ok(part.has_xml_tag); + assert.strictEqual(part.xml_tag_name, 'test-element'); + assert.strictEqual(part.title, 'Test Title'); + assert.strictEqual(part.summary, 'Test summary text'); + assert.strictEqual(part.start, 5); + assert.strictEqual(part.end, 20); + assert.ok(!part.enabled); + assert.strictEqual(part.attributes.length, 1); const first_attr = part.attributes[0]; if (!first_attr) throw new Error('Expected first attribute'); - expect(first_attr.key).toBe('attr1'); - expect(first_attr.value).toBe('value1'); + assert.strictEqual(first_attr.key, 'attr1'); + assert.strictEqual(first_attr.value, 'value1'); }); }); @@ -112,18 +111,18 @@ describe('TextPart reactive properties', () => { }); // Verify initial state - expect(part.content).toBe(TEST_CONTENT.INITIAL); - expect(part.length).toBe(TEST_CONTENT.INITIAL.length); + assert.strictEqual(part.content, TEST_CONTENT.INITIAL); + assert.strictEqual(part.length, TEST_CONTENT.INITIAL.length); const initial_token_count = part.token_count; // Change content part.content = TEST_CONTENT.NEW_CONTENT; // Verify derived properties update automatically - expect(part.content).toBe(TEST_CONTENT.NEW_CONTENT); - expect(part.length).toBe(TEST_CONTENT.NEW_CONTENT.length); - expect(part.token_count).not.toBe(initial_token_count); - expect(part.token_count).toEqual(estimate_token_count(TEST_CONTENT.NEW_CONTENT)); + assert.strictEqual(part.content, TEST_CONTENT.NEW_CONTENT); + assert.strictEqual(part.length, TEST_CONTENT.NEW_CONTENT.length); + assert.notStrictEqual(part.token_count, initial_token_count); + assert.deepEqual(part.token_count, estimate_token_count(TEST_CONTENT.NEW_CONTENT)); }); test('length is zero when content is empty', () => { @@ -132,14 +131,14 @@ describe('TextPart reactive properties', () => { content: TEST_CONTENT.EMPTY, }); - expect(part.content).toBe(TEST_CONTENT.EMPTY); - expect(part.length).toBe(TEST_CONTENT.EMPTY.length); + assert.strictEqual(part.content, TEST_CONTENT.EMPTY); + assert.strictEqual(part.length, TEST_CONTENT.EMPTY.length); part.content = TEST_CONTENT.SOMETHING; - expect(part.length).toBe(TEST_CONTENT.SOMETHING.length); + assert.strictEqual(part.length, TEST_CONTENT.SOMETHING.length); part.content = TEST_CONTENT.EMPTY; - expect(part.length).toBe(TEST_CONTENT.EMPTY.length); + assert.strictEqual(part.length, TEST_CONTENT.EMPTY.length); }); }); @@ -160,15 +159,15 @@ describe('TextPart serialization', () => { const json = part.to_json(); - expect(json.id).toBe(test_id); - expect(json.type).toBe('text'); - expect(json.created).toBe(created); - expect(json.content).toBe('Test content'); - expect(json.name).toBe('Test part'); - expect(json.start).toBe(10); - expect(json.end).toBe(20); - expect(json.has_xml_tag).toBe(false); - expect(json.enabled).toBe(true); + assert.strictEqual(json.id, test_id); + assert.strictEqual(json.type, 'text'); + assert.strictEqual(json.created, created); + assert.strictEqual(json.content, 'Test content'); + assert.strictEqual(json.name, 'Test part'); + assert.strictEqual(json.start, 10); + assert.strictEqual(json.end, 20); + assert.ok(!json.has_xml_tag); + assert.ok(json.enabled); }); test('clone creates independent copy with same values', () => { @@ -190,18 +189,18 @@ describe('TextPart serialization', () => { const clone = original.clone(); // Verify they have same initial values except id - expect(clone.id).not.toBe(original.id); - expect(clone.content).toBe(ORIGINAL.CONTENT); - expect(clone.name).toBe(ORIGINAL.NAME); + assert.notStrictEqual(clone.id, original.id); + assert.strictEqual(clone.content, ORIGINAL.CONTENT); + assert.strictEqual(clone.name, ORIGINAL.NAME); // Verify they're independent objects clone.content = MODIFIED.CONTENT; clone.name = MODIFIED.NAME; - expect(original.content).toBe(ORIGINAL.CONTENT); - expect(original.name).toBe(ORIGINAL.NAME); - expect(clone.content).toBe(MODIFIED.CONTENT); - expect(clone.name).toBe(MODIFIED.NAME); + assert.strictEqual(original.content, ORIGINAL.CONTENT); + assert.strictEqual(original.name, ORIGINAL.NAME); + assert.strictEqual(clone.content, MODIFIED.CONTENT); + assert.strictEqual(clone.name, MODIFIED.NAME); }); }); @@ -213,13 +212,13 @@ describe('TextPart content modification', () => { }); // Initial state - expect(part.content).toBe(TEST_CONTENT.INITIAL); + assert.strictEqual(part.content, TEST_CONTENT.INITIAL); // Update content using assignment part.content = TEST_CONTENT.NEW_CONTENT; // Verify content was updated - expect(part.content).toBe(TEST_CONTENT.NEW_CONTENT); + assert.strictEqual(part.content, TEST_CONTENT.NEW_CONTENT); }); test('content setter directly updates content', () => { @@ -229,13 +228,13 @@ describe('TextPart content modification', () => { }); // Initial state - expect(part.content).toBe(TEST_CONTENT.INITIAL); + assert.strictEqual(part.content, TEST_CONTENT.INITIAL); // Update content using setter part.content = TEST_CONTENT.NEW_CONTENT; // Verify content was updated - expect(part.content).toBe(TEST_CONTENT.NEW_CONTENT); + assert.strictEqual(part.content, TEST_CONTENT.NEW_CONTENT); }); }); @@ -246,9 +245,9 @@ describe('TextPart content edge cases', () => { content: TEST_CONTENT.LONG, }); - expect(part.content).toBe(TEST_CONTENT.LONG); - expect(part.length).toBe(TEST_CONTENT.LONG.length); - expect(part.token_count).toBeGreaterThan(0); + assert.strictEqual(part.content, TEST_CONTENT.LONG); + assert.strictEqual(part.length, TEST_CONTENT.LONG.length); + assert.ok(part.token_count! > 0); }); test('handles unicode characters correctly', () => { @@ -257,9 +256,9 @@ describe('TextPart content edge cases', () => { content: TEST_CONTENT.UNICODE, }); - expect(part.content).toBe(TEST_CONTENT.UNICODE); - expect(part.length).toBe(TEST_CONTENT.UNICODE.length); - expect(part.token_count).toEqual(estimate_token_count(TEST_CONTENT.UNICODE)); + assert.strictEqual(part.content, TEST_CONTENT.UNICODE); + assert.strictEqual(part.length, TEST_CONTENT.UNICODE.length); + assert.deepEqual(part.token_count, estimate_token_count(TEST_CONTENT.UNICODE)); }); test('handles special characters correctly', () => { @@ -268,9 +267,9 @@ describe('TextPart content edge cases', () => { content: TEST_CONTENT.SPECIAL_CHARS, }); - expect(part.content).toBe(TEST_CONTENT.SPECIAL_CHARS); - expect(part.length).toBe(TEST_CONTENT.SPECIAL_CHARS.length); - expect(part.token_count).toEqual(estimate_token_count(TEST_CONTENT.SPECIAL_CHARS)); + assert.strictEqual(part.content, TEST_CONTENT.SPECIAL_CHARS); + assert.strictEqual(part.length, TEST_CONTENT.SPECIAL_CHARS.length); + assert.deepEqual(part.token_count, estimate_token_count(TEST_CONTENT.SPECIAL_CHARS)); }); test('handles code and markup content correctly', () => { @@ -279,9 +278,9 @@ describe('TextPart content edge cases', () => { content: TEST_CONTENT.CODE, }); - expect(part.content).toBe(TEST_CONTENT.CODE); - expect(part.length).toBe(TEST_CONTENT.CODE.length); - expect(part.token_count).toEqual(estimate_token_count(TEST_CONTENT.CODE)); + assert.strictEqual(part.content, TEST_CONTENT.CODE); + assert.strictEqual(part.length, TEST_CONTENT.CODE.length); + assert.deepEqual(part.token_count, estimate_token_count(TEST_CONTENT.CODE)); }); }); @@ -294,29 +293,29 @@ describe('TextPart attribute management', () => { // Add attribute part.add_attribute({key: 'class', value: 'highlight'}); - expect(part.attributes).toHaveLength(1); + assert.strictEqual(part.attributes.length, 1); let first_attr = part.attributes[0]; if (!first_attr) throw new Error('Expected first attribute'); - expect(first_attr.key).toBe('class'); - expect(first_attr.value).toBe('highlight'); + assert.strictEqual(first_attr.key, 'class'); + assert.strictEqual(first_attr.value, 'highlight'); const attr_id = first_attr.id; // Update attribute const updated = part.update_attribute(attr_id, {value: 'special-highlight'}); - expect(updated).toBe(true); + assert.ok(updated); first_attr = part.attributes[0]; if (!first_attr) throw new Error('Expected attribute after update'); - expect(first_attr.key).toBe('class'); - expect(first_attr.value).toBe('special-highlight'); + assert.strictEqual(first_attr.key, 'class'); + assert.strictEqual(first_attr.value, 'special-highlight'); // Remove attribute part.remove_attribute(attr_id); - expect(part.attributes).toHaveLength(0); + assert.strictEqual(part.attributes.length, 0); // Attempting to update non-existent attribute returns false const fake_update = part.update_attribute(create_uuid(), {key: 'test', value: 'test'}); - expect(fake_update).toBe(false); + assert.ok(!fake_update); }); test('updates attribute key and value together', () => { @@ -329,11 +328,11 @@ describe('TextPart attribute management', () => { // Update both key and value const updated = part.update_attribute(attr_id, {key: 'data-type', value: 'important'}); - expect(updated).toBe(true); + assert.ok(updated); const updated_attr = part.attributes[0]; if (!updated_attr) throw new Error('Expected attribute after update'); - expect(updated_attr.key).toBe('data-type'); - expect(updated_attr.value).toBe('important'); + assert.strictEqual(updated_attr.key, 'data-type'); + assert.strictEqual(updated_attr.value, 'important'); }); test('attributes are preserved when serializing to JSON', () => { @@ -347,22 +346,22 @@ describe('TextPart attribute management', () => { const json = part.to_json(); - expect(json.attributes).toHaveLength(2); + assert.strictEqual(json.attributes.length, 2); const json_attr0 = json.attributes[0]; const json_attr1 = json.attributes[1]; if (!json_attr0 || !json_attr1) throw new Error('Expected both attributes in JSON'); - expect(json_attr0.key).toBe('data-test'); - expect(json_attr1.key).toBe('class'); + assert.strictEqual(json_attr0.key, 'data-test'); + assert.strictEqual(json_attr1.key, 'class'); // Verify they're properly restored const new_part = app.cell_registry.instantiate('TextPart', json); - expect(new_part.attributes).toHaveLength(2); + assert.strictEqual(new_part.attributes.length, 2); const new_attr0 = new_part.attributes[0]; const new_attr1 = new_part.attributes[1]; if (!new_attr0 || !new_attr1) throw new Error('Expected both attributes in restored part'); - expect(new_attr0.key).toBe('data-test'); - expect(new_attr1.key).toBe('class'); + assert.strictEqual(new_attr0.key, 'data-test'); + assert.strictEqual(new_attr1.key, 'class'); }); }); @@ -379,7 +378,7 @@ describe('TextPart instance management', () => { // Verify it's in the registry const retrieved_part = app.parts.items.by_id.get(part.id); - expect(retrieved_part).toBe(part); + assert.strictEqual(retrieved_part, part); }); test('part is removed from registry when requested', () => { @@ -392,13 +391,13 @@ describe('TextPart instance management', () => { app.parts.items.add(part); // Verify it's in the registry - expect(app.parts.items.by_id.get(part.id)).toBe(part); + assert.strictEqual(app.parts.items.by_id.get(part.id), part); // Remove from registry app.parts.items.remove(part.id); // Verify it's gone - expect(app.parts.items.by_id.get(part.id)).toBeUndefined(); + assert.ok(app.parts.items.by_id.get(part.id) === undefined); }); }); @@ -411,8 +410,8 @@ describe('TextPart start and end position markers', () => { end: 25, }); - expect(part.start).toBe(10); - expect(part.end).toBe(25); + assert.strictEqual(part.start, 10); + assert.strictEqual(part.end, 25); }); test('start and end positions can be updated', () => { @@ -422,15 +421,15 @@ describe('TextPart start and end position markers', () => { }); // Initial values are null - expect(part.start).toBeNull(); - expect(part.end).toBeNull(); + assert.isNull(part.start); + assert.isNull(part.end); // Update positions part.start = 5; part.end = 15; - expect(part.start).toBe(5); - expect(part.end).toBe(15); + assert.strictEqual(part.start, 5); + assert.strictEqual(part.end, 15); }); test('positions are preserved when serializing and deserializing', () => { @@ -448,7 +447,7 @@ describe('TextPart start and end position markers', () => { const new_part = app.cell_registry.instantiate('TextPart', json); // Verify positions were preserved - expect(new_part.start).toBe(8); - expect(new_part.end).toBe(30); + assert.strictEqual(new_part.start, 8); + assert.strictEqual(new_part.end, 30); }); }); diff --git a/src/test/poller.svelte.test.ts b/src/test/poller.svelte.test.ts index f2cae906e..1f80eab7d 100644 --- a/src/test/poller.svelte.test.ts +++ b/src/test/poller.svelte.test.ts @@ -1,8 +1,6 @@ -// @slop claude_sonnet_4 - // @vitest-environment jsdom -import {test, expect, describe, vi, beforeEach, afterEach} from 'vitest'; +import {test, describe, vi, beforeEach, afterEach, assert} from 'vitest'; import {Poller} from '$lib/poller.svelte.js'; @@ -20,7 +18,7 @@ describe('Poller', () => { const poll_fn = vi.fn(); const poller = new Poller({poll_fn}); - expect(poller.active).toBe(false); + assert.ok(!poller.active); }); test('should start polling with immediate execution by default', () => { @@ -29,12 +27,12 @@ describe('Poller', () => { poller.start(); - expect(poller.active).toBe(true); - expect(poll_fn).toHaveBeenCalledTimes(1); + assert.ok(poller.active); + assert.strictEqual(poll_fn.mock.calls.length, 1); // Advance time to trigger interval vi.advanceTimersByTime(Poller.DEFAULT_INTERVAL); - expect(poll_fn).toHaveBeenCalledTimes(2); + assert.strictEqual(poll_fn.mock.calls.length, 2); }); test('should start polling without immediate execution when configured', () => { @@ -43,12 +41,12 @@ describe('Poller', () => { poller.start(); - expect(poller.active).toBe(true); - expect(poll_fn).not.toHaveBeenCalled(); + assert.ok(poller.active); + assert.strictEqual(poll_fn.mock.calls.length, 0); // Advance time to trigger interval vi.advanceTimersByTime(Poller.DEFAULT_INTERVAL); - expect(poll_fn).toHaveBeenCalledTimes(1); + assert.strictEqual(poll_fn.mock.calls.length, 1); }); test('should use custom interval', () => { @@ -59,15 +57,15 @@ describe('Poller', () => { // Advance by less than interval vi.advanceTimersByTime(4_000); - expect(poll_fn).not.toHaveBeenCalled(); + assert.strictEqual(poll_fn.mock.calls.length, 0); // Advance to interval vi.advanceTimersByTime(1_000); - expect(poll_fn).toHaveBeenCalledTimes(1); + assert.strictEqual(poll_fn.mock.calls.length, 1); // Advance by another interval vi.advanceTimersByTime(5_000); - expect(poll_fn).toHaveBeenCalledTimes(2); + assert.strictEqual(poll_fn.mock.calls.length, 2); }); test('should stop polling', () => { @@ -75,15 +73,15 @@ describe('Poller', () => { const poller = new Poller({poll_fn}); poller.start(); - expect(poller.active).toBe(true); - expect(poll_fn).toHaveBeenCalledTimes(1); + assert.ok(poller.active); + assert.strictEqual(poll_fn.mock.calls.length, 1); poller.stop(); - expect(poller.active).toBe(false); + assert.ok(!poller.active); // Should not poll after stopping vi.advanceTimersByTime(Poller.DEFAULT_INTERVAL); - expect(poll_fn).toHaveBeenCalledTimes(1); + assert.strictEqual(poll_fn.mock.calls.length, 1); }); test('should handle multiple starts safely', () => { @@ -94,12 +92,12 @@ describe('Poller', () => { poller.start(); poller.start(); - expect(poller.active).toBe(true); - expect(poll_fn).toHaveBeenCalledTimes(1); + assert.ok(poller.active); + assert.strictEqual(poll_fn.mock.calls.length, 1); // Should only have one interval running vi.advanceTimersByTime(Poller.DEFAULT_INTERVAL); - expect(poll_fn).toHaveBeenCalledTimes(2); + assert.strictEqual(poll_fn.mock.calls.length, 2); }); test('should handle multiple stops safely', () => { @@ -111,7 +109,7 @@ describe('Poller', () => { poller.stop(); poller.stop(); - expect(poller.active).toBe(false); + assert.ok(!poller.active); }); test('should handle async poll functions', () => { @@ -120,11 +118,11 @@ describe('Poller', () => { poller.start(); - expect(poll_fn).toHaveBeenCalledTimes(1); - expect(poller.active).toBe(true); + assert.strictEqual(poll_fn.mock.calls.length, 1); + assert.ok(poller.active); vi.advanceTimersByTime(Poller.DEFAULT_INTERVAL); - expect(poll_fn).toHaveBeenCalledTimes(2); + assert.strictEqual(poll_fn.mock.calls.length, 2); }); test('should handle poll function errors gracefully', () => { @@ -138,16 +136,13 @@ describe('Poller', () => { poller.start(); - expect(poll_fn).toHaveBeenCalledTimes(1); - expect(console_error_spy).toHaveBeenCalledWith( - '[poller] poll function error:', - expect.any(Error), - ); - expect(poller.active).toBe(true); + assert.strictEqual(poll_fn.mock.calls.length, 1); + assert.ok(console_error_spy.mock.calls.length > 0); + assert.ok(poller.active); // Should continue polling despite error vi.advanceTimersByTime(Poller.DEFAULT_INTERVAL); - expect(poll_fn).toHaveBeenCalledTimes(2); + assert.strictEqual(poll_fn.mock.calls.length, 2); }); test('should handle async poll function errors gracefully', () => { @@ -156,12 +151,12 @@ describe('Poller', () => { poller.start(); - expect(poll_fn).toHaveBeenCalledTimes(1); - expect(poller.active).toBe(true); + assert.strictEqual(poll_fn.mock.calls.length, 1); + assert.ok(poller.active); // Should continue polling despite async error (error handling is async) vi.advanceTimersByTime(Poller.DEFAULT_INTERVAL); - expect(poll_fn).toHaveBeenCalledTimes(2); + assert.strictEqual(poll_fn.mock.calls.length, 2); }); test('should set interval and restart if active', () => { @@ -169,18 +164,18 @@ describe('Poller', () => { const poller = new Poller({poll_fn, interval: Poller.DEFAULT_INTERVAL, immediate: false}); poller.start(); - expect(poller.active).toBe(true); + assert.ok(poller.active); // Change interval while active poller.set_interval(5_000); - expect(poller.active).toBe(true); + assert.ok(poller.active); // Should use new interval vi.advanceTimersByTime(5_000); - expect(poll_fn).toHaveBeenCalledTimes(1); + assert.strictEqual(poll_fn.mock.calls.length, 1); vi.advanceTimersByTime(5_000); - expect(poll_fn).toHaveBeenCalledTimes(2); + assert.strictEqual(poll_fn.mock.calls.length, 2); }); test('should set interval without restarting if inactive', () => { @@ -189,12 +184,12 @@ describe('Poller', () => { // Set interval while inactive poller.set_interval(5_000); - expect(poller.active).toBe(false); + assert.ok(!poller.active); // Start and verify new interval is used poller.start(); vi.advanceTimersByTime(5_000); - expect(poll_fn).toHaveBeenCalledTimes(2); // immediate + first interval + assert.strictEqual(poll_fn.mock.calls.length, 2); // immediate + first interval }); test('should be no-op when setting same interval', () => { @@ -208,8 +203,8 @@ describe('Poller', () => { poller.set_interval(Poller.DEFAULT_INTERVAL); // Should not have restarted - expect(poll_fn.mock.calls.length).toBe(initial_call_count); - expect(poller.active).toBe(true); + assert.strictEqual(poll_fn.mock.calls.length, initial_call_count); + assert.ok(poller.active); }); test('should dispose and stop polling', () => { @@ -217,14 +212,14 @@ describe('Poller', () => { const poller = new Poller({poll_fn}); poller.start(); - expect(poller.active).toBe(true); + assert.ok(poller.active); poller.dispose(); - expect(poller.active).toBe(false); + assert.ok(!poller.active); // Should not poll after disposal vi.advanceTimersByTime(Poller.DEFAULT_INTERVAL); - expect(poll_fn).toHaveBeenCalledTimes(1); + assert.strictEqual(poll_fn.mock.calls.length, 1); }); test('should handle restart scenario', () => { @@ -233,16 +228,16 @@ describe('Poller', () => { // Start, stop, start cycle poller.start(); - expect(poller.active).toBe(true); + assert.ok(poller.active); poller.stop(); - expect(poller.active).toBe(false); + assert.ok(!poller.active); poller.start(); - expect(poller.active).toBe(true); + assert.ok(poller.active); // Verify polling works after restart vi.advanceTimersByTime(Poller.DEFAULT_INTERVAL); - expect(poll_fn).toHaveBeenCalledTimes(1); + assert.strictEqual(poll_fn.mock.calls.length, 1); }); }); diff --git a/src/test/popover.svelte.test.ts b/src/test/popover.svelte.test.ts deleted file mode 100644 index 8bae96a58..000000000 --- a/src/test/popover.svelte.test.ts +++ /dev/null @@ -1,1103 +0,0 @@ -// @slop Claude Sonnet 4 - -// @vitest-environment jsdom - -import {describe, test, expect, vi, beforeEach, afterEach} from 'vitest'; - -import {Popover} from '$lib/popover.svelte.js'; -import type {Position} from '$lib/position_helpers.js'; - -// Helper functions for testing -const create_elements = (): { - container: HTMLElement; - trigger: HTMLElement; - content: HTMLElement; - body: HTMLElement; -} => { - const container = document.createElement('div'); - container.classList.add('container'); - - const trigger = document.createElement('button'); - trigger.textContent = 'Trigger Button'; - container.appendChild(trigger); - - const content = document.createElement('div'); - content.textContent = 'Popover Content'; - container.appendChild(content); - - document.body.appendChild(container); - - return {container, trigger, content, body: document.body}; -}; - -const create_mock_event = (type: string, target?: HTMLElement): Event => { - const event = new Event(type, {bubbles: true, cancelable: true}); - if (target) { - Object.defineProperty(event, 'target', {value: target}); - } - return event; -}; - -// Helper for checking style values that handles browser normalization -const check_style = (element: HTMLElement, prop: string, expected: string): void => { - const value = element.style.getPropertyValue(prop); - // Handle empty string vs 'auto' case - if (expected === 'auto' && value === '') { - return; - } - // Handle '0' vs '0px' case - if (expected === '0' && value === '0px') { - return; - } - expect(value).toBe(expected); -}; - -describe('Popover', () => { - // Define shared variables - let elements: ReturnType; - let popover: Popover; - let cleanup_actions: Array<() => void>; - - beforeEach(() => { - elements = create_elements(); - popover = new Popover(); - cleanup_actions = []; - }); - - afterEach(() => { - // Clean up all actions registered during the test - for (const cleanup of cleanup_actions) { - cleanup(); - } - - // Clean up DOM after each test - if (elements.body.contains(elements.container)) { - elements.body.removeChild(elements.container); - } - }); - - // Helper to register attachments for automatic cleanup - const register_attachment = (cleanup: (() => void) | void): void => { - if (cleanup) { - cleanup_actions.push(cleanup); - } - }; - - describe('constructor', () => { - test('creates with default values', () => { - expect(popover.visible).toBe(false); - expect(popover.position).toBe('bottom'); - expect(popover.align).toBe('center'); - expect(popover.offset).toBe('0'); - expect(popover.disable_outside_click).toBe(false); - expect(popover.popover_class).toBe(''); - }); - - test('accepts custom parameters', () => { - const onshow = vi.fn(); - const onhide = vi.fn(); - - popover = new Popover({ - position: 'top', - align: 'start', - offset: '16px', - disable_outside_click: true, - popover_class: 'test-class', - onshow, - onhide, - }); - - expect(popover.position).toBe('top'); - expect(popover.align).toBe('start'); - expect(popover.offset).toBe('16px'); - expect(popover.disable_outside_click).toBe(true); - expect(popover.popover_class).toBe('test-class'); - }); - }); - - describe('visibility methods', () => { - test('show() makes popover visible and calls onshow callback', () => { - const onshow = vi.fn(); - popover = new Popover({onshow}); - - expect(popover.visible).toBe(false); - - popover.show(); - - expect(popover.visible).toBe(true); - expect(onshow).toHaveBeenCalledTimes(1); - - // Showing when already visible should not call onshow again - popover.show(); - expect(onshow).toHaveBeenCalledTimes(1); - }); - - test('hide() hides popover and calls onhide callback', () => { - const onhide = vi.fn(); - popover = new Popover({onhide}); - - // Set visible manually first - popover.visible = true; - - popover.hide(); - - expect(popover.visible).toBe(false); - expect(onhide).toHaveBeenCalledTimes(1); - - // Hiding when already hidden should not call onhide again - popover.hide(); - expect(onhide).toHaveBeenCalledTimes(1); - }); - - test('toggle() toggles visibility state', () => { - const onshow = vi.fn(); - const onhide = vi.fn(); - popover = new Popover({onshow, onhide}); - - // Initially hidden - expect(popover.visible).toBe(false); - - // First toggle should show - popover.toggle(); - expect(popover.visible).toBe(true); - expect(onshow).toHaveBeenCalledTimes(1); - expect(onhide).not.toHaveBeenCalled(); - - // Second toggle should hide - popover.toggle(); - expect(popover.visible).toBe(false); - expect(onshow).toHaveBeenCalledTimes(1); - expect(onhide).toHaveBeenCalledTimes(1); - }); - }); - - describe('update()', () => { - test('changes configuration completely', () => { - popover = new Popover({ - position: 'left', - align: 'end', - popover_class: 'old-class', - }); - - const new_onshow = vi.fn(); - const new_onhide = vi.fn(); - - // Update with new parameters - popover.update({ - position: 'right', - align: 'start', - offset: '20px', - disable_outside_click: true, - popover_class: 'new-class', - onshow: new_onshow, - onhide: new_onhide, - }); - - expect(popover.position).toBe('right'); - expect(popover.align).toBe('start'); - expect(popover.offset).toBe('20px'); - expect(popover.disable_outside_click).toBe(true); - expect(popover.popover_class).toBe('new-class'); - - // Test the new callbacks work - popover.show(); - expect(new_onshow).toHaveBeenCalled(); - - popover.hide(); - expect(new_onhide).toHaveBeenCalled(); - }); - - test('handles partial updates correctly', () => { - popover = new Popover({ - position: 'left', - align: 'end', - offset: '10px', - }); - - // Update only some parameters - popover.update({ - position: 'right', - // Align should remain 'end' - // Offset should remain '10px' - }); - - expect(popover.position).toBe('right'); - expect(popover.align).toBe('end'); - expect(popover.offset).toBe('10px'); - }); - }); - - describe('actions', () => { - describe('trigger attachment', () => { - test('attaches click handler to show/hide popover', () => { - const {trigger} = elements; - - // Set up trigger attachment - register_attachment(popover.trigger()(trigger)); - - // Initial state - expect(popover.visible).toBe(false); - - // Simulate click to show - trigger.click(); - expect(popover.visible).toBe(true); - - // Simulate another click to hide - trigger.click(); - expect(popover.visible).toBe(false); - }); - - test('accepts parameters', () => { - const {trigger} = elements; - - // Set up trigger attachment with params - register_attachment( - popover.trigger({ - position: 'right', - align: 'start', - })(trigger), - ); - - // Check params were applied - expect(popover.position).toBe('right'); - expect(popover.align).toBe('start'); - }); - - test('sets proper aria attributes', () => { - const {trigger, content} = elements; - - // Set up attachments - register_attachment(popover.trigger()(trigger)); - register_attachment(popover.content()(content)); - - // Check for aria-expanded on trigger - expect(trigger.getAttribute('aria-expanded')).toBe('false'); - - // Show popover - popover.show(); - - // aria-expanded should update - expect(trigger.getAttribute('aria-expanded')).toBe('true'); - - // Hide popover - popover.hide(); - - // aria-expanded should update back - expect(trigger.getAttribute('aria-expanded')).toBe('false'); - }); - }); - - describe('content attachment', () => { - test('applies position styles and classes', () => { - const {content} = elements; - - // Set up content attachment - register_attachment( - popover.content({ - position: 'bottom', - align: 'start', - offset: '15px', - popover_class: 'test-popover', - })(content), - ); - - // Check styles were applied - expect(content.style.position).toBe('absolute'); - expect(content.style.zIndex).toBe('10'); - expect(content.classList.contains('test-popover')).toBe(true); - - // Initial state - content shouldn't be visible, but we don't check display style - // since we might want to allow animations - expect(popover.visible).toBe(false); - - // Make visible - popover.show(); - expect(popover.visible).toBe(true); - }); - - test('updates styles when parameters change', () => { - const {content} = elements; - - // Set up content attachment - register_attachment( - popover.content({ - position: 'bottom', - align: 'start', - popover_class: 'test-popover', - })(content), - ); - - // Since attachments are reactive, updating the popover instance should trigger re-evaluation - popover.update({ - position: 'right', - align: 'center', - popover_class: 'updated-class', - }); - - // Check class was updated - expect(content.classList.contains('test-popover')).toBe(false); - expect(content.classList.contains('updated-class')).toBe(true); - }); - }); - - describe('container attachment', () => { - test('registers container element for positioning', () => { - const {container, trigger, content} = elements; - - // Set up all attachments - register_attachment(popover.container(container)); - register_attachment(popover.trigger()(trigger)); - register_attachment(popover.content()(content)); - - // Show popover - popover.show(); - - // Basic check that content is visible - expect(content.style.display).not.toBe('none'); - }); - }); - }); - - describe('positioning', () => { - test.each([ - {position: 'left', align: 'start'}, - {position: 'left', align: 'center'}, - {position: 'left', align: 'end'}, - {position: 'right', align: 'start'}, - {position: 'right', align: 'center'}, - {position: 'right', align: 'end'}, - {position: 'top', align: 'start'}, - {position: 'top', align: 'center'}, - {position: 'top', align: 'end'}, - {position: 'bottom', align: 'start'}, - {position: 'bottom', align: 'center'}, - {position: 'bottom', align: 'end'}, - ] as const)('applies correct styles for %s/%s', ({position, align}) => { - const {content} = elements; - - // Apply position and alignment - register_attachment(popover.content({position, align})(content)); - - // Show popover - popover.show(); - - // Ensure some key styles are set based on position and alignment - if (position === 'left' || position === 'right') { - if (align === 'center') { - expect(content.style.transform).toMatch(/translateY/); - } else { - // For start/end alignment, one of top/bottom should be set - const has_position = content.style.top || content.style.bottom; - expect(has_position).toBeTruthy(); - } - // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition - } else if (position === 'top' || position === 'bottom') { - if (align === 'center') { - expect(content.style.transform).toMatch(/translateX/); - } else { - // For start/end alignment, one of left/right should be set - const has_position = content.style.left || content.style.right; - expect(has_position).toBeTruthy(); - } - } - }); - - describe('detailed positioning', () => { - test('verifies left position styles with various alignments', () => { - const {content} = elements; - - // Test left + start - register_attachment(popover.content({position: 'left', align: 'start'})(content)); - check_style(content, 'right', '100%'); - check_style(content, 'left', 'auto'); - check_style(content, 'top', '0'); - check_style(content, 'bottom', 'auto'); - check_style(content, 'transform', ''); - check_style(content, 'transform-origin', 'right'); - cleanup_actions.pop()?.(); - - // Test left + center - register_attachment(popover.content({position: 'left', align: 'center'})(content)); - check_style(content, 'right', '100%'); - check_style(content, 'left', 'auto'); - check_style(content, 'top', '50%'); - check_style(content, 'bottom', 'auto'); - check_style(content, 'transform', 'translateY(-50%)'); - check_style(content, 'transform-origin', 'right'); - cleanup_actions.pop()?.(); - - // Test left + end - register_attachment(popover.content({position: 'left', align: 'end'})(content)); - check_style(content, 'right', '100%'); - check_style(content, 'left', 'auto'); - check_style(content, 'top', 'auto'); - check_style(content, 'bottom', '0'); - check_style(content, 'transform', ''); - check_style(content, 'transform-origin', 'right'); - cleanup_actions.pop()?.(); - - // Test left with offset - register_attachment( - popover.content({position: 'left', align: 'start', offset: '10px'})(content), - ); - check_style(content, 'right', 'calc(100% + 10px)'); - }); - - test('verifies right position styles with various alignments', () => { - const {content} = elements; - - // Test right + start - register_attachment(popover.content({position: 'right', align: 'start'})(content)); - check_style(content, 'left', '100%'); - check_style(content, 'right', 'auto'); - check_style(content, 'top', '0'); - check_style(content, 'bottom', 'auto'); - check_style(content, 'transform', ''); - check_style(content, 'transform-origin', 'left'); - cleanup_actions.pop()?.(); - - // Test right + center - register_attachment(popover.content({position: 'right', align: 'center'})(content)); - check_style(content, 'left', '100%'); - check_style(content, 'right', 'auto'); - check_style(content, 'top', '50%'); - check_style(content, 'bottom', 'auto'); - check_style(content, 'transform', 'translateY(-50%)'); - check_style(content, 'transform-origin', 'left'); - cleanup_actions.pop()?.(); - - // Test right + end - register_attachment(popover.content({position: 'right', align: 'end'})(content)); - check_style(content, 'left', '100%'); - check_style(content, 'right', 'auto'); - check_style(content, 'top', 'auto'); - check_style(content, 'bottom', '0'); - check_style(content, 'transform', ''); - check_style(content, 'transform-origin', 'left'); - cleanup_actions.pop()?.(); - - // Test right with offset - register_attachment( - popover.content({position: 'right', align: 'start', offset: '10px'})(content), - ); - check_style(content, 'left', 'calc(100% + 10px)'); - }); - - test('verifies top position styles with various alignments', () => { - const {content} = elements; - - // Test top + start - register_attachment(popover.content({position: 'top', align: 'start'})(content)); - check_style(content, 'bottom', '100%'); - check_style(content, 'top', 'auto'); - check_style(content, 'left', '0'); - check_style(content, 'right', 'auto'); - check_style(content, 'transform', ''); - check_style(content, 'transform-origin', 'bottom'); - cleanup_actions.pop()?.(); - - // Test top + center - register_attachment(popover.content({position: 'top', align: 'center'})(content)); - check_style(content, 'bottom', '100%'); - check_style(content, 'top', 'auto'); - check_style(content, 'left', '50%'); - check_style(content, 'right', 'auto'); - check_style(content, 'transform', 'translateX(-50%)'); - check_style(content, 'transform-origin', 'bottom'); - cleanup_actions.pop()?.(); - - // Test top + end - register_attachment(popover.content({position: 'top', align: 'end'})(content)); - check_style(content, 'bottom', '100%'); - check_style(content, 'top', 'auto'); - check_style(content, 'left', 'auto'); - check_style(content, 'right', '0'); - check_style(content, 'transform', ''); - check_style(content, 'transform-origin', 'bottom'); - cleanup_actions.pop()?.(); - - // Test top with offset - register_attachment( - popover.content({position: 'top', align: 'start', offset: '10px'})(content), - ); - check_style(content, 'bottom', 'calc(100% + 10px)'); - }); - - test('verifies bottom position styles with various alignments', () => { - const {content} = elements; - - // Test bottom + start - register_attachment(popover.content({position: 'bottom', align: 'start'})(content)); - check_style(content, 'top', '100%'); - check_style(content, 'bottom', 'auto'); - check_style(content, 'left', '0'); - check_style(content, 'right', 'auto'); - check_style(content, 'transform', ''); - check_style(content, 'transform-origin', 'top'); - cleanup_actions.pop()?.(); - - // Test bottom + center - register_attachment(popover.content({position: 'bottom', align: 'center'})(content)); - check_style(content, 'top', '100%'); - check_style(content, 'bottom', 'auto'); - check_style(content, 'left', '50%'); - check_style(content, 'right', 'auto'); - check_style(content, 'transform', 'translateX(-50%)'); - check_style(content, 'transform-origin', 'top'); - cleanup_actions.pop()?.(); - - // Test bottom + end - register_attachment(popover.content({position: 'bottom', align: 'end'})(content)); - check_style(content, 'top', '100%'); - check_style(content, 'bottom', 'auto'); - check_style(content, 'left', 'auto'); - check_style(content, 'right', '0'); - check_style(content, 'transform', ''); - check_style(content, 'transform-origin', 'top'); - cleanup_actions.pop()?.(); - - // Test bottom with offset - register_attachment( - popover.content({position: 'bottom', align: 'start', offset: '10px'})(content), - ); - check_style(content, 'top', 'calc(100% + 10px)'); - }); - - test('verifies center position styles', () => { - const {content} = elements; - - register_attachment(popover.content({position: 'center'})(content)); - check_style(content, 'top', '50%'); - check_style(content, 'left', '50%'); - check_style(content, 'transform', 'translate(-50%, -50%)'); - check_style(content, 'transform-origin', 'center'); - cleanup_actions.pop()?.(); - - // Center ignores alignment and offset - register_attachment( - popover.content({position: 'center', align: 'start', offset: '10px'})(content), - ); - check_style(content, 'top', '50%'); - check_style(content, 'left', '50%'); - check_style(content, 'transform', 'translate(-50%, -50%)'); - }); - - test('verifies overlay position styles', () => { - const {content} = elements; - - register_attachment(popover.content({position: 'overlay'})(content)); - check_style(content, 'top', '0'); - check_style(content, 'left', '0'); - check_style(content, 'width', '100%'); - check_style(content, 'height', '100%'); - check_style(content, 'transform-origin', 'center'); - cleanup_actions.pop()?.(); - - // Overlay ignores alignment and offset - register_attachment( - popover.content({position: 'overlay', align: 'end', offset: '10px'})(content), - ); - check_style(content, 'top', '0'); - check_style(content, 'left', '0'); - check_style(content, 'width', '100%'); - check_style(content, 'height', '100%'); - }); - - test('updating position and offset dynamically updates styles', () => { - const {content} = elements; - popover = new Popover({ - position: 'bottom', - align: 'center', - offset: '0', - }); - - // Initial setup - register_attachment(popover.content()(content)); - check_style(content, 'top', '100%'); - check_style(content, 'left', '50%'); - - // With attachments being reactive, we need to recreate them to pick up new params - cleanup_actions.pop()?.(); - - // Update position - popover.update({position: 'right'}); - register_attachment(popover.content()(content)); - check_style(content, 'left', '100%'); - check_style(content, 'top', '50%'); - cleanup_actions.pop()?.(); - - // Update alignment - popover.update({align: 'start'}); - register_attachment(popover.content()(content)); - check_style(content, 'top', '0'); - cleanup_actions.pop()?.(); - - // Update offset - popover.update({offset: '15px'}); - register_attachment(popover.content()(content)); - check_style(content, 'left', 'calc(100% + 15px)'); - cleanup_actions.pop()?.(); - - // Multiple updates at once - popover.update({position: 'top', align: 'end', offset: '5px'}); - register_attachment(popover.content()(content)); - check_style(content, 'bottom', 'calc(100% + 5px)'); - check_style(content, 'right', '0'); - check_style(content, 'top', 'auto'); - check_style(content, 'left', 'auto'); - }); - - test('z-index is always applied', () => { - const {content} = elements; - - // Test each position type to ensure z-index is always applied - const positions: Array = ['left', 'right', 'top', 'bottom', 'center', 'overlay']; - - for (const position of positions) { - register_attachment(popover.content({position})(content)); - expect(content.style.zIndex).toBe('10'); - cleanup_actions.pop()?.(); - } - }); - - test('transform-origin is set correctly for each position', () => { - const {content} = elements; - - const position_origins = [ - {position: 'left', expected: 'right'}, - {position: 'right', expected: 'left'}, - {position: 'top', expected: 'bottom'}, - {position: 'bottom', expected: 'top'}, - {position: 'center', expected: 'center'}, - {position: 'overlay', expected: 'center'}, - ]; - - for (const {position, expected} of position_origins) { - register_attachment(popover.content({position: position as Position})(content)); - expect(content.style.getPropertyValue('transform-origin')).toBe(expected); - cleanup_actions.pop()?.(); - } - }); - }); - }); - - describe('interaction', () => { - test('clicking outside hides popover when disable_outside_click is false', () => { - const {trigger, content, body} = elements; - const onhide = vi.fn(); - popover = new Popover({onhide}); - - // Set up attachments - register_attachment(popover.trigger()(trigger)); - register_attachment(popover.content()(content)); - - // Show the popover - popover.show(); - expect(popover.visible).toBe(true); - - // Simulate click outside - const click_event = create_mock_event('click', body); - document.dispatchEvent(click_event); - - // Popover should be hidden - expect(popover.visible).toBe(false); - expect(onhide).toHaveBeenCalled(); - }); - - test('clicking outside does not hide when disable_outside_click is true', () => { - const {trigger, content, body} = elements; - const onhide = vi.fn(); - popover = new Popover({ - disable_outside_click: true, - onhide, - }); - - // Set up attachments - register_attachment(popover.trigger()(trigger)); - register_attachment(popover.content()(content)); - - // Show the popover - popover.show(); - expect(popover.visible).toBe(true); - - // Simulate click outside - const click_event = create_mock_event('click', body); - document.dispatchEvent(click_event); - - // Popover should still be visible - expect(popover.visible).toBe(true); - expect(onhide).not.toHaveBeenCalled(); - }); - - test('clicking on trigger or content does not hide popover', () => { - const {trigger, content} = elements; - const onhide = vi.fn(); - popover = new Popover({onhide}); - - // Set up attachments - register_attachment(popover.trigger()(trigger)); - register_attachment(popover.content()(content)); - - // Show the popover - popover.show(); - expect(popover.visible).toBe(true); - - // Simulate click on content (this is intercepted earlier in the event chain) - const content_click = create_mock_event('click', content); - document.dispatchEvent(content_click); - - // Should still be visible - expect(popover.visible).toBe(true); - expect(onhide).not.toHaveBeenCalled(); - - // Simulate click on trigger - const trigger_click = create_mock_event('click', trigger); - document.dispatchEvent(trigger_click); - - // Should still be visible (actual trigger handling is tested separately) - expect(popover.visible).toBe(true); - expect(onhide).not.toHaveBeenCalled(); - }); - }); - - describe('edge cases', () => { - test('nested elements within trigger or content', () => { - const {trigger, content} = elements; - - // Create nested elements - const inner_trigger = document.createElement('span'); - trigger.appendChild(inner_trigger); - - const inner_content = document.createElement('span'); - content.appendChild(inner_content); - - // Set up attachments - register_attachment(popover.trigger()(trigger)); - register_attachment(popover.content()(content)); - - // Show popover - popover.show(); - - // Now we'll test if a click on the inner trigger toggles the popover - // First note current visibility - expect(popover.visible).toBe(true); - - // Click the trigger element directly - trigger.click(); - - // The popover should toggle to hidden - expect(popover.visible).toBe(false); - - // Show it again for the next test - trigger.click(); - expect(popover.visible).toBe(true); - - // Now test if outside clicks work - click on document body (not content or trigger) - const body_click = create_mock_event('click', document.body); - document.dispatchEvent(body_click); - - // This should close the popover - expect(popover.visible).toBe(false); - }); - - test('changing disable_outside_click dynamically', () => { - const {trigger, content, body} = elements; - - // Set up attachments - register_attachment(popover.trigger()(trigger)); - register_attachment(popover.content()(content)); - - // Show popover - - popover.show(); - - // Initially outside clicks should hide - const outside_click1 = create_mock_event('click', body); - document.dispatchEvent(outside_click1); - expect(popover.visible).toBe(false); - - // Update to disable outside clicks - popover.update({disable_outside_click: true}); - - // Show again - popover.show(); - - // Now outside clicks should not hide - const outside_click2 = create_mock_event('click', body); - document.dispatchEvent(outside_click2); - expect(popover.visible).toBe(true); - - // Change back to allowing outside clicks - popover.update({disable_outside_click: false}); - - // Outside clicks should hide again - const outside_click3 = create_mock_event('click', body); - document.dispatchEvent(outside_click3); - expect(popover.visible).toBe(false); - }); - - test('changing class dynamically updates DOM', () => { - const {content} = elements; - popover = new Popover({ - popover_class: 'initial-class', - }); - - // Set up content attachment - register_attachment(popover.content()(content)); - - // Initial class should be applied - expect(content.classList.contains('initial-class')).toBe(true); - - // Update class - popover.update({popover_class: 'updated-class'}); - - // Old class should be removed, new class added - expect(content.classList.contains('initial-class')).toBe(false); - expect(content.classList.contains('updated-class')).toBe(true); - }); - - test('cleanup removes event listeners', () => { - const {trigger, content} = elements; - - // Set up attachments - const trigger_cleanup = popover.trigger()(trigger); - const content_cleanup = popover.content()(content); - - // Show popover to set up document click listener - popover.show(); - - // Clean up attachments - trigger_cleanup?.(); - content_cleanup?.(); - - // After cleanup, clicking trigger should do nothing - trigger.click(); - expect(popover.visible).toBe(true); // Still true because cleanup removed click handler - }); - - test('multiple popovers work independently', () => { - // Create two sets of elements - const elements1 = create_elements(); - const elements2 = create_elements(); - document.body.appendChild(elements2.container); - - const popover1 = new Popover(); - const popover2 = new Popover(); - - // Set up attachments for both popovers - register_attachment(popover1.trigger()(elements1.trigger)); - register_attachment(popover1.content()(elements1.content)); - - register_attachment(popover2.trigger()(elements2.trigger)); - register_attachment(popover2.content()(elements2.content)); - - // Show first popover - elements1.trigger.click(); - expect(popover1.visible).toBe(true); - expect(popover2.visible).toBe(false); - - // Show second popover - elements2.trigger.click(); - expect(popover1.visible).toBe(true); - expect(popover2.visible).toBe(true); - - // Hide first popover - elements1.trigger.click(); - expect(popover1.visible).toBe(false); - expect(popover2.visible).toBe(true); - - // Clean up - document.body.removeChild(elements2.container); - }); - }); - - describe('real-world scenarios and robustness', () => { - test('popover survives DOM manipulation', () => { - const {trigger, content} = elements; - const onhide = vi.fn(); - popover = new Popover({onhide}); - - register_attachment(popover.trigger()(trigger)); - register_attachment(popover.content()(content)); - - // Show popover - trigger.click(); - expect(popover.visible).toBe(true); - - // Add/remove siblings (common in dynamic UIs) - const sibling = document.createElement('div'); - elements.container.appendChild(sibling); - elements.container.removeChild(sibling); - - // Popover should still be functional - expect(popover.visible).toBe(true); - trigger.click(); - expect(popover.visible).toBe(false); - expect(onhide).toHaveBeenCalled(); - }); - - test('handles rapid show/hide cycles', () => { - const {trigger} = elements; - const onshow = vi.fn(); - const onhide = vi.fn(); - popover = new Popover({onshow, onhide}); - - register_attachment(popover.trigger()(trigger)); - - // Rapid clicking should be handled gracefully - for (let i = 0; i < 10; i++) { - trigger.click(); - } - - // Should end up hidden (started hidden, 10 clicks = even number) - expect(popover.visible).toBe(false); - expect(onshow).toHaveBeenCalledTimes(5); - expect(onhide).toHaveBeenCalledTimes(5); - }); - - test('preserves state during attachment recreation', () => { - const {trigger} = elements; - popover = new Popover({position: 'top', align: 'start'}); - - // Set up initial attachment - let cleanup = popover.trigger()(trigger); - register_attachment(cleanup); - - // Show popover - trigger.click(); - expect(popover.visible).toBe(true); - - // Recreate attachment (simulates reactive updates) - cleanup?.(); - cleanup = popover.trigger()(trigger); - register_attachment(cleanup); - - // State should be preserved - expect(popover.visible).toBe(true); - expect(popover.position).toBe('top'); - expect(popover.align).toBe('start'); - }); - - test('handles element removal gracefully', () => { - const {trigger} = elements; - - // Should not throw when elements are missing - expect(() => { - popover.show(); - popover.hide(); - popover.toggle(); - }).not.toThrow(); - - // Should handle element removal during operation - register_attachment(popover.trigger()(trigger)); - popover.show(); - - // Should not crash when trigger is removed - trigger.remove(); - expect(() => popover.hide()).not.toThrow(); - }); - - test('ARIA attributes maintained correctly', () => { - const {trigger, content} = elements; - register_attachment(popover.trigger()(trigger)); - register_attachment(popover.content()(content)); - - // Initial ARIA state - expect(trigger.getAttribute('aria-expanded')).toBe('false'); - expect(content.getAttribute('role')).toBe('dialog'); - - // Show and verify ARIA - popover.show(); - expect(trigger.getAttribute('aria-expanded')).toBe('true'); - expect(trigger.getAttribute('aria-controls')).toBeTruthy(); - expect(content.id).toBeTruthy(); - expect(trigger.getAttribute('aria-controls')).toBe(content.id); - - // Hide and verify ARIA persists - popover.hide(); - expect(trigger.getAttribute('aria-expanded')).toBe('false'); - expect(trigger.getAttribute('aria-controls')).toBe(content.id); - }); - - test('callbacks fire in correct order', () => { - const events: Array = []; - const onshow = () => events.push('show'); - const onhide = () => events.push('hide'); - popover = new Popover({onshow, onhide}); - - // Test multiple show/hide cycles - popover.show(); - popover.hide(); - popover.show(); - popover.hide(); - - expect(events).toEqual(['show', 'hide', 'show', 'hide']); - }); - - test('nested element click detection', () => { - const {trigger, content} = elements; - register_attachment(popover.trigger()(trigger)); - register_attachment(popover.content()(content)); - - // Create deeply nested structure - const level1 = document.createElement('div'); - const level2 = document.createElement('span'); - const level3 = document.createElement('em'); - level1.appendChild(level2); - level2.appendChild(level3); - content.appendChild(level1); - - popover.show(); - expect(popover.visible).toBe(true); - - // Click on deeply nested element should not close popover - const nested_click = create_mock_event('click', level3); - document.dispatchEvent(nested_click); - expect(popover.visible).toBe(true); - - // Click outside should close popover - const outside_click = create_mock_event('click', document.body); - document.dispatchEvent(outside_click); - expect(popover.visible).toBe(false); - }); - - test('multiple popovers independence', () => { - const popovers: Array = []; - const triggers: Array = []; - - // Create multiple popovers - for (let i = 0; i < 3; i++) { - const popover_instance = new Popover(); - const trigger = document.createElement('button'); - trigger.textContent = `Trigger ${i}`; - document.body.appendChild(trigger); - - register_attachment(popover_instance.trigger()(trigger)); - popovers.push(popover_instance); - triggers.push(trigger); - } - - // Show all popovers - for (const popover_instance of popovers) { - popover_instance.show(); - expect(popover_instance.visible).toBe(true); - } - - // Hide them one by one and verify others remain visible - for (let i = 0; i < popovers.length; i++) { - popovers[i]!.hide(); - expect(popovers[i]!.visible).toBe(false); - - // Check remaining popovers are still visible - for (let j = i + 1; j < popovers.length; j++) { - expect(popovers[j]!.visible).toBe(true); - } - } - - // Clean up - for (const trigger of triggers) { - trigger.remove(); - } - }); - }); -}); diff --git a/src/test/position_helpers.test.ts b/src/test/position_helpers.test.ts deleted file mode 100644 index 69a90caba..000000000 --- a/src/test/position_helpers.test.ts +++ /dev/null @@ -1,225 +0,0 @@ -// @slop Claude Sonnet 3.7 - -import {test, expect} from 'vitest'; - -import {generate_position_styles, type Position, type Alignment} from '$lib/position_helpers.js'; - -// Helper to check common properties that should be on all position styles -const check_common_styles = (styles: Record) => { - expect(styles.position).toBe('absolute'); - expect(styles['z-index']).toBe('10'); -}; - -// Helper to check expected style value, considering browser normalization -const check_style_value = (styles: Record, prop: string, expected: string) => { - if ( - (expected === 'auto' && styles[prop] === '') || - (expected === '0' && styles[prop] === '0px') - ) { - return true; - } - expect(styles[prop]).toBe(expected); - return true; // Added return statement to fix type error -}; - -test('generate_position_styles - left position with different alignments', () => { - // Left + Start - let styles = generate_position_styles('left', 'start'); - check_common_styles(styles); - check_style_value(styles, 'right', '100%'); - check_style_value(styles, 'left', 'auto'); - check_style_value(styles, 'top', '0'); - check_style_value(styles, 'bottom', 'auto'); - expect(styles['transform-origin']).toBe('right'); - expect(styles.transform || '').not.toContain('translate'); - - // Left + Center - styles = generate_position_styles('left', 'center'); - check_common_styles(styles); - check_style_value(styles, 'right', '100%'); - check_style_value(styles, 'left', 'auto'); - check_style_value(styles, 'top', '50%'); - expect(styles.transform).toBe('translateY(-50%)'); - - // Left + End - styles = generate_position_styles('left', 'end'); - check_common_styles(styles); - check_style_value(styles, 'right', '100%'); - check_style_value(styles, 'left', 'auto'); - check_style_value(styles, 'bottom', '0'); - check_style_value(styles, 'top', 'auto'); -}); - -test('generate_position_styles - right position with different alignments', () => { - // Right + Start - let styles = generate_position_styles('right', 'start'); - check_common_styles(styles); - check_style_value(styles, 'left', '100%'); - check_style_value(styles, 'right', 'auto'); - check_style_value(styles, 'top', '0'); - check_style_value(styles, 'bottom', 'auto'); - expect(styles['transform-origin']).toBe('left'); - - // Right + Center - styles = generate_position_styles('right', 'center'); - check_common_styles(styles); - check_style_value(styles, 'left', '100%'); - check_style_value(styles, 'right', 'auto'); - check_style_value(styles, 'top', '50%'); - expect(styles.transform).toBe('translateY(-50%)'); - - // Right + End - styles = generate_position_styles('right', 'end'); - check_common_styles(styles); - check_style_value(styles, 'left', '100%'); - check_style_value(styles, 'right', 'auto'); - check_style_value(styles, 'bottom', '0'); - check_style_value(styles, 'top', 'auto'); -}); - -test('generate_position_styles - top position with different alignments', () => { - // Top + Start - let styles = generate_position_styles('top', 'start'); - check_common_styles(styles); - check_style_value(styles, 'bottom', '100%'); - check_style_value(styles, 'top', 'auto'); - check_style_value(styles, 'left', '0'); - check_style_value(styles, 'right', 'auto'); - expect(styles['transform-origin']).toBe('bottom'); - - // Top + Center - styles = generate_position_styles('top', 'center'); - check_common_styles(styles); - check_style_value(styles, 'bottom', '100%'); - check_style_value(styles, 'top', 'auto'); - check_style_value(styles, 'left', '50%'); - expect(styles.transform).toBe('translateX(-50%)'); - - // Top + End - styles = generate_position_styles('top', 'end'); - check_common_styles(styles); - check_style_value(styles, 'bottom', '100%'); - check_style_value(styles, 'top', 'auto'); - check_style_value(styles, 'left', 'auto'); - check_style_value(styles, 'right', '0'); -}); - -test('generate_position_styles - bottom position with different alignments', () => { - // Bottom + Start - let styles = generate_position_styles('bottom', 'start'); - check_common_styles(styles); - check_style_value(styles, 'top', '100%'); - check_style_value(styles, 'bottom', 'auto'); - check_style_value(styles, 'left', '0'); - check_style_value(styles, 'right', 'auto'); - expect(styles['transform-origin']).toBe('top'); - - // Bottom + Center - styles = generate_position_styles('bottom', 'center'); - check_common_styles(styles); - check_style_value(styles, 'top', '100%'); - check_style_value(styles, 'bottom', 'auto'); - check_style_value(styles, 'left', '50%'); - expect(styles.transform).toBe('translateX(-50%)'); - - // Bottom + End - styles = generate_position_styles('bottom', 'end'); - check_common_styles(styles); - check_style_value(styles, 'top', '100%'); - check_style_value(styles, 'bottom', 'auto'); - check_style_value(styles, 'left', 'auto'); - check_style_value(styles, 'right', '0'); -}); - -test('generate_position_styles - with offsets', () => { - // Test left with offset - let styles = generate_position_styles('left', 'start', '10px'); - expect(styles.right).toBe('calc(100% + 10px)'); - - // Test right with offset - styles = generate_position_styles('right', 'start', '10px'); - expect(styles.left).toBe('calc(100% + 10px)'); - - // Test top with offset - styles = generate_position_styles('top', 'start', '10px'); - expect(styles.bottom).toBe('calc(100% + 10px)'); - - // Test bottom with offset - styles = generate_position_styles('bottom', 'start', '10px'); - expect(styles.top).toBe('calc(100% + 10px)'); - - // Test with different offset values - styles = generate_position_styles('left', 'start', '5rem'); - expect(styles.right).toBe('calc(100% + 5rem)'); - - // Test with negative offset - styles = generate_position_styles('left', 'start', '-8px'); - expect(styles.right).toBe('calc(100% + -8px)'); -}); - -test('generate_position_styles - center position', () => { - const styles = generate_position_styles('center'); - check_common_styles(styles); - check_style_value(styles, 'top', '50%'); - check_style_value(styles, 'left', '50%'); - expect(styles.transform).toBe('translate(-50%, -50%)'); - expect(styles['transform-origin']).toBe('center'); - - // Center ignores alignment and offset - const styles_with_params = generate_position_styles('center', 'start', '10px'); - check_style_value(styles_with_params, 'top', '50%'); - check_style_value(styles_with_params, 'left', '50%'); - expect(styles_with_params.transform).toBe('translate(-50%, -50%)'); -}); - -test('generate_position_styles - overlay position', () => { - const styles = generate_position_styles('overlay'); - check_common_styles(styles); - check_style_value(styles, 'top', '0'); - check_style_value(styles, 'left', '0'); - expect(styles.width).toBe('100%'); - expect(styles.height).toBe('100%'); - expect(styles['transform-origin']).toBe('center'); - - // Overlay ignores alignment and offset - const styles_with_params = generate_position_styles('overlay', 'start', '10px'); - check_style_value(styles_with_params, 'top', '0'); - check_style_value(styles_with_params, 'left', '0'); - expect(styles_with_params.width).toBe('100%'); - expect(styles_with_params.height).toBe('100%'); -}); - -test('generate_position_styles - default parameters', () => { - // No parameters (uses defaults) - const styles = generate_position_styles(); - check_common_styles(styles); - check_style_value(styles, 'top', '50%'); - check_style_value(styles, 'left', '50%'); - expect(styles.transform).toBe('translate(-50%, -50%)'); - expect(styles['transform-origin']).toBe('center'); -}); - -test('generate_position_styles - throws on invalid position', () => { - // @ts-expect-error - Testing invalid position - expect(() => generate_position_styles('invalid')).toThrow(); -}); - -// Test all possible combinations systematically -test('generate_position_styles - all position/alignment combinations work', () => { - const positions: Array = ['left', 'right', 'top', 'bottom', 'center', 'overlay']; - const alignments: Array = ['start', 'center', 'end']; - const offsets = ['0', '10px']; - - for (const position of positions) { - for (const align of alignments) { - for (const offset of offsets) { - expect(() => { - const styles = generate_position_styles(position, align, offset); - // Basic validation that we got a style object back - expect(typeof styles).toBe('object'); - expect(styles.position).toBe('absolute'); - }).not.toThrow(); - } - } - } -}); diff --git a/src/test/prompt_helpers.test.ts b/src/test/prompt_helpers.test.ts index 9cec68d2c..fed23c8c6 100644 --- a/src/test/prompt_helpers.test.ts +++ b/src/test/prompt_helpers.test.ts @@ -1,6 +1,4 @@ -// @slop Claude Opus 4 - -import {test, expect} from 'vitest'; +import {test, assert} from 'vitest'; import {format_prompt_content} from '$lib/prompt_helpers.js'; @@ -40,7 +38,7 @@ const create_part = (partial: Partial = {}): SimplePart => { // Basic tests test('format_prompt_content - returns empty string for empty parts array', () => { const result = format_prompt_content([] as any); - expect(result).toBe(''); + assert.strictEqual(result, ''); }); test('format_prompt_content - filters out disabled parts', () => { @@ -50,14 +48,14 @@ test('format_prompt_content - filters out disabled parts', () => { ]; const result = format_prompt_content(parts as any); - expect(result).toBe('Content 2'); + assert.strictEqual(result, 'Content 2'); }); test('format_prompt_content - joins multiple enabled parts with double newlines', () => { const parts = [create_part({content: 'Content 1'}), create_part({content: 'Content 2'})]; const result = format_prompt_content(parts as any); - expect(result).toBe('Content 1\n\nContent 2'); + assert.strictEqual(result, 'Content 1\n\nContent 2'); }); // XML tag tests @@ -71,7 +69,7 @@ test('format_prompt_content - wraps content with XML tags when specified', () => ]; const result = format_prompt_content(parts as any); - expect(result).toBe('\nContent with tag\n'); + assert.strictEqual(result, '\nContent with tag\n'); }); test('format_prompt_content - uses xml_tag_name_default when no XML tag name is provided', () => { @@ -85,7 +83,7 @@ test('format_prompt_content - uses xml_tag_name_default when no XML tag name is ]; const result = format_prompt_content(parts as any); - expect(result).toBe('\nContent with default tag\n'); + assert.strictEqual(result, '\nContent with default tag\n'); }); // Test with different part types @@ -106,7 +104,10 @@ test('format_prompt_content - uses different part types as defaults', () => { ]; const result = format_prompt_content(parts as any); - expect(result).toBe('\nFile content\n\n\n\nSequence content\n'); + assert.strictEqual( + result, + '\nFile content\n\n\n\nSequence content\n', + ); }); test('format_prompt_content - uses different default XML tag names for different part types', () => { @@ -132,7 +133,8 @@ test('format_prompt_content - uses different default XML tag names for different ]; const result = format_prompt_content(parts as any); - expect(result).toBe( + assert.strictEqual( + result, '\nFile content\n\n\n\nText content\n\n\n\nSequence content\n', ); }); @@ -149,7 +151,7 @@ test('format_prompt_content - includes attributes with key and value', () => { ]; const result = format_prompt_content(parts as any); - expect(result).toBe('
\nContent with attributes\n
'); + assert.strictEqual(result, '
\nContent with attributes\n
'); }); test('format_prompt_content - handles empty values as boolean attributes', () => { @@ -163,7 +165,7 @@ test('format_prompt_content - handles empty values as boolean attributes', () => ]; const result = format_prompt_content(parts as any); - expect(result).toBe('\nContent with boolean attribute\n'); + assert.strictEqual(result, '\nContent with boolean attribute\n'); }); test('format_prompt_content - handles explicitly empty string values', () => { @@ -180,7 +182,8 @@ test('format_prompt_content - handles explicitly empty string values', () => { ]; const result = format_prompt_content(parts as any); - expect(result).toBe( + assert.strictEqual( + result, '
\nContent with explicit empty value\n
', ); }); @@ -196,7 +199,7 @@ test('format_prompt_content - filters out attributes without keys', () => { ]; const result = format_prompt_content(parts as any); - expect(result).toBe('
\nContent with missing key\n
'); + assert.strictEqual(result, '
\nContent with missing key\n
'); }); test('format_prompt_content - handles multiple attributes with mix of empty and non-empty values', () => { @@ -216,7 +219,8 @@ test('format_prompt_content - handles multiple attributes with mix of empty and ]; const result = format_prompt_content(parts as any); - expect(result).toBe( + assert.strictEqual( + result, '', ); }); @@ -236,7 +240,7 @@ test('format_prompt_content - ignores attributes with empty keys after trimming' ]; const result = format_prompt_content(parts as any); - expect(result).toBe('
\nContent with whitespace key\n
'); + assert.strictEqual(result, '
\nContent with whitespace key\n
'); }); test('format_prompt_content - trims attribute keys before rendering', () => { @@ -253,7 +257,8 @@ test('format_prompt_content - trims attribute keys before rendering', () => { ]; const result = format_prompt_content(parts as any); - expect(result).toBe( + assert.strictEqual( + result, '
\nContent with trimmed keys\n
', ); }); @@ -274,7 +279,7 @@ test('format_prompt_content - removes attributes with empty keys but preserves o ]; const result = format_prompt_content(parts as any); - expect(result).toBe('
\nMixed attributes\n
'); + assert.strictEqual(result, '
\nMixed attributes\n
'); }); test('format_prompt_content - filters out attributes with empty keys', () => { @@ -288,7 +293,7 @@ test('format_prompt_content - filters out attributes with empty keys', () => { ]; const result = format_prompt_content(parts as any); - expect(result).toBe('
\nContent with empty key\n
'); + assert.strictEqual(result, '
\nContent with empty key\n
'); }); // Edge cases @@ -296,7 +301,7 @@ test('format_prompt_content - trims whitespace from content', () => { const parts = [create_part({content: ' Content with whitespace '})]; const result = format_prompt_content(parts as any); - expect(result).toBe('Content with whitespace'); + assert.strictEqual(result, 'Content with whitespace'); }); test('format_prompt_content - skips parts with empty content', () => { @@ -307,7 +312,7 @@ test('format_prompt_content - skips parts with empty content', () => { ]; const result = format_prompt_content(parts as any); - expect(result).toBe('Real content'); + assert.strictEqual(result, 'Real content'); }); test('format_prompt_content - trims whitespace from XML tag name', () => { @@ -320,7 +325,7 @@ test('format_prompt_content - trims whitespace from XML tag name', () => { ]; const result = format_prompt_content(parts as any); - expect(result).toBe('\nTrimmed tag name\n'); + assert.strictEqual(result, '\nTrimmed tag name\n'); }); // Test that diskfile parts get the path attribute by default @@ -336,7 +341,7 @@ test('format_prompt_content - ensures diskfile parts have path attribute', () => }); const result = format_prompt_content([diskfile_part] as any); - expect(result).toBe('\nFile content with path\n'); + assert.strictEqual(result, '\nFile content with path\n'); }); // Test for when the path attribute is combined with other attributes @@ -355,7 +360,8 @@ test('format_prompt_content - combines path attribute with other attributes for }); const result = format_prompt_content([diskfile_part] as any); - expect(result).toBe( + assert.strictEqual( + result, '\nFile with multiple attributes\n', ); }); diff --git a/src/test/reorderable.svelte.test.ts b/src/test/reorderable.svelte.test.ts index ecb2ec015..958905d21 100644 --- a/src/test/reorderable.svelte.test.ts +++ b/src/test/reorderable.svelte.test.ts @@ -1,8 +1,6 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, vi, describe, beforeEach, afterEach} from 'vitest'; +import {test, vi, describe, beforeEach, afterEach, assert} from 'vitest'; import { Reorderable, @@ -104,21 +102,21 @@ describe('Reorderable', () => { test('creates with default values', () => { const reorderable = new Reorderable(); - expect(reorderable).toBeInstanceOf(Reorderable); - expect(reorderable.list_node).toBeNull(); - expect(reorderable.list_params).toBeNull(); - expect(reorderable.indices.size).toBe(0); - expect(reorderable.elements.size).toBe(0); - expect(reorderable.direction).toBe('vertical'); - expect(reorderable.id).toBeTruthy(); - expect(reorderable.id).not.toBe(new Reorderable().id); - expect(reorderable.list_class).toBe('reorderable_list'); - expect(reorderable.item_class).toBe('reorderable_item'); + assert.instanceOf(reorderable, Reorderable); + assert.isNull(reorderable.list_node); + assert.isNull(reorderable.list_params); + assert.strictEqual(reorderable.indices.size, 0); + assert.strictEqual(reorderable.elements.size, 0); + assert.strictEqual(reorderable.direction, 'vertical'); + assert.ok(reorderable.id); + assert.notStrictEqual(reorderable.id, new Reorderable().id); + assert.strictEqual(reorderable.list_class, 'reorderable-list'); + assert.strictEqual(reorderable.item_class, 'reorderable-item'); }); test('creates with custom direction', () => { const reorderable = new Reorderable({direction: 'horizontal'}); - expect(reorderable.direction).toBe('horizontal'); + assert.strictEqual(reorderable.direction, 'horizontal'); }); test('creates with custom styling', () => { @@ -128,11 +126,11 @@ describe('Reorderable', () => { dragging_class: 'custom_dragging', }); - expect(reorderable.list_class).toBe('custom_list'); - expect(reorderable.item_class).toBe('custom_item'); - expect(reorderable.dragging_class).toBe('custom_dragging'); + assert.strictEqual(reorderable.list_class, 'custom_list'); + assert.strictEqual(reorderable.item_class, 'custom_item'); + assert.strictEqual(reorderable.dragging_class, 'custom_dragging'); // Other styles should have default values - expect(reorderable.drag_over_class).toBe('drag_over'); + assert.strictEqual(reorderable.drag_over_class, 'drag-over'); }); }); @@ -156,43 +154,43 @@ describe('Reorderable', () => { test('initializes correctly', () => { cleanup_fn = attach_list(reorderable, list, {onreorder: mock_callback}); - expect(reorderable.list_node).toBe(list); - expect(reorderable.list_params).toEqual({onreorder: mock_callback}); - expect(list.classList.contains(reorderable.list_class!)).toBe(true); - expect(list.getAttribute('role')).toBe('list'); - expect(list.dataset.reorderable_list_id).toBe(reorderable.id); + assert.strictEqual(reorderable.list_node, list); + assert.deepEqual(reorderable.list_params, {onreorder: mock_callback}); + assert.ok(list.classList.contains(reorderable.list_class!)); + assert.strictEqual(list.getAttribute('role'), 'list'); + assert.strictEqual(list.dataset.reorderable_list_id, reorderable.id); }); test('re-attachment changes callbacks', () => { const mock_callback2 = vi.fn(); const cleanup1 = attach_list(reorderable, list, {onreorder: mock_callback}); - expect(reorderable.list_params).toEqual({onreorder: mock_callback}); + assert.deepEqual(reorderable.list_params, {onreorder: mock_callback}); // Re-attach with new callback cleanup1(); cleanup_fn = attach_list(reorderable, list, {onreorder: mock_callback2}); - expect(reorderable.list_params).toEqual({onreorder: mock_callback2}); + assert.deepEqual(reorderable.list_params, {onreorder: mock_callback2}); }); test('destroy cleans up', () => { cleanup_fn = attach_list(reorderable, list, {onreorder: mock_callback}); // Before destroy - expect(reorderable.list_node).toBe(list); - expect(list.classList.contains(reorderable.list_class!)).toBe(true); + assert.strictEqual(reorderable.list_node, list); + assert.ok(list.classList.contains(reorderable.list_class!)); // Destroy cleanup_fn(); cleanup_fn = undefined; // After destroy - expect(reorderable.list_node).toBeNull(); - expect(reorderable.list_params).toBeNull(); - expect(list.classList.contains(reorderable.list_class!)).toBe(false); - expect(list.hasAttribute('role')).toBe(false); - expect(list.dataset.reorderable_list_id).toBeUndefined(); + assert.isNull(reorderable.list_node); + assert.isNull(reorderable.list_params); + assert.ok(!list.classList.contains(reorderable.list_class!)); + assert.ok(!list.hasAttribute('role')); + assert.ok(list.dataset.reorderable_list_id === undefined); }); }); @@ -218,11 +216,11 @@ describe('Reorderable', () => { test('initializes correctly', () => { cleanup_fn = attach_item(reorderable, item, {index: 0}); - expect(item.classList.contains(reorderable.item_class!)).toBe(true); - expect(item.getAttribute('draggable')).toBe('true'); - expect(item.getAttribute('role')).toBe('listitem'); - expect(item.dataset.reorderable_item_id).toBeDefined(); - expect(item.dataset.reorderable_list_id).toBe(reorderable.id); + assert.ok(item.classList.contains(reorderable.item_class!)); + assert.strictEqual(item.getAttribute('draggable'), 'true'); + assert.strictEqual(item.getAttribute('role'), 'listitem'); + assert.isDefined(item.dataset.reorderable_item_id); + assert.strictEqual(item.dataset.reorderable_list_id, reorderable.id); // Either in pending items or regular maps const item_id = item.dataset.reorderable_item_id as ReorderableItemId; @@ -230,7 +228,7 @@ describe('Reorderable', () => { ? reorderable.indices.has(item_id) : reorderable.pending_items.some((p) => p.id === item_id); - expect(is_indexed).toBe(true); + assert.ok(is_indexed); }); test('re-attachment changes index', () => { @@ -241,10 +239,10 @@ describe('Reorderable', () => { // Check initial index if (reorderable.initialized) { - expect(reorderable.indices.get(item_id)).toBe(0); + assert.strictEqual(reorderable.indices.get(item_id), 0); } else { const pending_item = reorderable.pending_items.find((p) => p.id === item_id); - expect(pending_item?.index).toBe(0); + assert.strictEqual(pending_item?.index, 0); } // Re-attach with new index @@ -256,10 +254,10 @@ describe('Reorderable', () => { // Check if index was updated in the appropriate storage if (reorderable.initialized) { - expect(reorderable.indices.get(new_item_id)).toBe(5); + assert.strictEqual(reorderable.indices.get(new_item_id), 5); } else { const pending_item = reorderable.pending_items.find((p) => p.id === new_item_id); - expect(pending_item?.index).toBe(5); + assert.strictEqual(pending_item?.index, 5); } }); @@ -269,23 +267,23 @@ describe('Reorderable', () => { const item_id = item.dataset.reorderable_item_id as ReorderableItemId; // Before destroy - expect(item.classList.contains(reorderable.item_class!)).toBe(true); + assert.ok(item.classList.contains(reorderable.item_class!)); // Destroy cleanup_fn(); cleanup_fn = undefined; // After destroy - expect(item.classList.contains(reorderable.item_class!)).toBe(false); - expect(item.hasAttribute('draggable')).toBe(false); - expect(item.hasAttribute('role')).toBe(false); - expect(item.dataset.reorderable_item_id).toBeUndefined(); - expect(item.dataset.reorderable_list_id).toBeUndefined(); + assert.ok(!item.classList.contains(reorderable.item_class!)); + assert.ok(!item.hasAttribute('draggable')); + assert.ok(!item.hasAttribute('role')); + assert.ok(item.dataset.reorderable_item_id === undefined); + assert.ok(item.dataset.reorderable_list_id === undefined); // Item should be removed from storage const still_pending = reorderable.pending_items.some((p) => p.id === item_id); const still_indexed = reorderable.indices.has(item_id); - expect(still_pending || still_indexed).toBe(false); + assert.ok(!(still_pending || still_indexed)); }); }); @@ -319,29 +317,29 @@ describe('Reorderable', () => { test('update_indicator applies correct classes', () => { // Update indicators reorderable.update_indicator(item_id, 'top'); - expect(item.classList.contains(reorderable.drag_over_class!)).toBe(true); - expect(item.classList.contains(reorderable.drag_over_top_class!)).toBe(true); + assert.ok(item.classList.contains(reorderable.drag_over_class!)); + assert.ok(item.classList.contains(reorderable.drag_over_top_class!)); // Change indicator reorderable.update_indicator(item_id, 'bottom'); - expect(item.classList.contains(reorderable.drag_over_top_class!)).toBe(false); - expect(item.classList.contains(reorderable.drag_over_bottom_class!)).toBe(true); + assert.ok(!item.classList.contains(reorderable.drag_over_top_class!)); + assert.ok(item.classList.contains(reorderable.drag_over_bottom_class!)); // Invalid drop reorderable.update_indicator(item_id, 'left', false); - expect(item.classList.contains(reorderable.drag_over_left_class!)).toBe(false); - expect(item.classList.contains(reorderable.invalid_drop_class!)).toBe(true); + assert.ok(!item.classList.contains(reorderable.drag_over_left_class!)); + assert.ok(item.classList.contains(reorderable.invalid_drop_class!)); }); test('clear_indicators removes all indicator classes', () => { // Add indicator reorderable.update_indicator(item_id, 'right'); - expect(item.classList.contains(reorderable.drag_over_right_class!)).toBe(true); + assert.ok(item.classList.contains(reorderable.drag_over_right_class!)); // Clear indicators reorderable.clear_indicators(); - expect(item.classList.contains(reorderable.drag_over_class!)).toBe(false); - expect(item.classList.contains(reorderable.drag_over_right_class!)).toBe(false); + assert.ok(!item.classList.contains(reorderable.drag_over_class!)); + assert.ok(!item.classList.contains(reorderable.drag_over_right_class!)); }); }); @@ -395,10 +393,10 @@ describe('Reorderable', () => { first_item.dispatchEvent(drag_event); // Check if drag operation was set up - expect(reorderable.source_index).toBe(0); - expect(reorderable.source_item_id).toBe(item_id); - expect(first_item.classList.contains(reorderable.dragging_class!)).toBe(true); - expect(mock_data_transfer.setData).toHaveBeenCalled(); + assert.strictEqual(reorderable.source_index, 0); + assert.strictEqual(reorderable.source_item_id, item_id); + assert.ok(first_item.classList.contains(reorderable.dragging_class!)); + assert.ok(mock_data_transfer.setData.mock.calls.length > 0); }); test('dragend resets state', () => { @@ -416,9 +414,9 @@ describe('Reorderable', () => { list.dispatchEvent(dragend_event); // Check if state was reset - expect(reorderable.source_index).toBe(-1); - expect(reorderable.source_item_id).toBeNull(); - expect(first_item.classList.contains(reorderable.dragging_class!)).toBe(false); + assert.strictEqual(reorderable.source_index, -1); + assert.isNull(reorderable.source_item_id); + assert.ok(!first_item.classList.contains(reorderable.dragging_class!)); }); }); @@ -431,10 +429,8 @@ describe('Reorderable', () => { // Initialize first reorderable const cleanup1 = attach_list(reorderable1, list, {onreorder: vi.fn()}); - // Expect no error when trying to initialize second reorderable with same list - expect(() => { - attach_list(reorderable2, list, {onreorder: vi.fn()}); - }).not.toThrow(); + // Should not throw when trying to initialize second reorderable with same list + attach_list(reorderable2, list, {onreorder: vi.fn()}); // Clean up cleanup1(); @@ -456,7 +452,7 @@ describe('Reorderable', () => { const cleanup2 = attachment2(list); // Should work without errors - expect(reorderable.list_node).toBe(list); + assert.strictEqual(reorderable.list_node, list); // Clean up if (cleanup2) cleanup2(); @@ -497,8 +493,8 @@ describe('Reorderable', () => { inner_item.dispatchEvent(drag_event); // Should find the outer item as the dragged item - expect(reorderable.source_item_id).toBe(outer_id); - expect(reorderable.source_index).toBe(0); + assert.strictEqual(reorderable.source_item_id, outer_id); + assert.strictEqual(reorderable.source_index, 0); // Clean up outer_action.destroy?.(); @@ -537,13 +533,13 @@ describe('Reorderable', () => { target_item.dispatchEvent(drop_event1); // onreorder should not be called for invalid target - expect(onreorder).not.toHaveBeenCalled(); + assert.strictEqual(onreorder.mock.calls.length, 0); // Directly call the onreorder function as the implementation would reorderable.list_params?.onreorder(0, 2); // Now the callback should have been called - expect(onreorder).toHaveBeenCalledWith(0, 2); + assert.deepEqual(onreorder.mock.calls[0], [0, 2]); // Clean up for (const r of action_results) r?.destroy(); @@ -578,15 +574,15 @@ describe('Reorderable', () => { const other_id = other_item.dataset.reorderable_item_id as ReorderableItemId; reorderable.update_indicator(other_id, 'bottom'); - expect(other_item.classList.contains(reorderable.drag_over_class!)).toBe(true); + assert.ok(other_item.classList.contains(reorderable.drag_over_class!)); // Now try to apply indicators to the source item reorderable.update_indicator(source_id, 'top'); // Indicators should be cleared instead - expect(source_item.classList.contains(reorderable.drag_over_class!)).toBe(false); - expect(reorderable.active_indicator_item_id).toBeNull(); - expect(reorderable.current_indicator).toBe('none'); + assert.ok(!source_item.classList.contains(reorderable.drag_over_class!)); + assert.isNull(reorderable.active_indicator_item_id); + assert.strictEqual(reorderable.current_indicator, 'none'); // Clean up for (const r of action_results) r?.destroy(); @@ -637,15 +633,15 @@ describe('Reorderable', () => { first_item1.dispatchEvent(drag_event1); // Should only affect first reorderable - expect(reorderable1.source_index).toBe(0); - expect(reorderable2.source_index).toBe(-1); + assert.strictEqual(reorderable1.source_index, 0); + assert.strictEqual(reorderable2.source_index, -1); // Directly call the callback instead of relying on event propagation onreorder1(0, 1); // Only first callback should be called - expect(onreorder1).toHaveBeenCalled(); - expect(onreorder2).not.toHaveBeenCalled(); + assert.ok(onreorder1.mock.calls.length > 0); + assert.strictEqual(onreorder2.mock.calls.length, 0); // Clean up for (const r of action_results1) r?.destroy(); @@ -676,24 +672,24 @@ describe('Reorderable', () => { }); // Check list class - expect(list.classList.contains('my_list')).toBe(true); + assert.ok(list.classList.contains('my_list')); // Check item class const first_item = items[0]; const second_item = items[1]; if (!first_item || !second_item) throw new Error('Expected first and second items'); - expect(first_item.classList.contains('my_item')).toBe(true); + assert.ok(first_item.classList.contains('my_item')); // Apply dragging class first_item.classList.add(reorderable.dragging_class!); - expect(first_item.classList.contains('my_dragging')).toBe(true); + assert.ok(first_item.classList.contains('my_dragging')); // Apply indicator second_item.classList.add(reorderable.drag_over_class!); second_item.classList.add(reorderable.drag_over_top_class!); - expect(second_item.classList.contains('my_drag_over')).toBe(true); - expect(second_item.classList.contains('my_drag_over_top')).toBe(true); + assert.ok(second_item.classList.contains('my_drag_over')); + assert.ok(second_item.classList.contains('my_drag_over_top')); // Clean up for (const r of action_results) r?.destroy(); @@ -713,12 +709,12 @@ describe('Reorderable', () => { }); // Check list role - expect(list.getAttribute('role')).toBe('list'); + assert.strictEqual(list.getAttribute('role'), 'list'); // Check item role const first_item = items[0]; if (!first_item) throw new Error('Expected first item'); - expect(first_item.getAttribute('role')).toBe('listitem'); + assert.strictEqual(first_item.getAttribute('role'), 'listitem'); // Clean up for (const r of action_results) r?.destroy(); diff --git a/src/test/request_tracker.svelte.test.ts b/src/test/request_tracker.svelte.test.ts index e4c89bc66..4442e8ae6 100644 --- a/src/test/request_tracker.svelte.test.ts +++ b/src/test/request_tracker.svelte.test.ts @@ -1,13 +1,18 @@ -// @slop Claude Opus 4 - // @vitest-environment jsdom -import {test, expect, describe, vi, beforeEach, afterEach} from 'vitest'; +import {test, describe, vi, beforeEach, afterEach, assert} from 'vitest'; +import { + JSONRPC_INTERNAL_ERROR, + JSONRPC_VERSION, + JsonrpcErrorCode, +} from '@fuzdev/fuz_app/http/jsonrpc.js'; +import { + create_jsonrpc_response, + is_jsonrpc_response, +} from '@fuzdev/fuz_app/http/jsonrpc_helpers.js'; +import {ThrownJsonrpcError} from '@fuzdev/fuz_app/http/jsonrpc_errors.js'; -import {RequestTracker} from '$lib/request_tracker.svelte.js'; -import {JSONRPC_INTERNAL_ERROR, JSONRPC_VERSION, JsonrpcErrorCode} from '$lib/jsonrpc.js'; -import {create_jsonrpc_response, is_jsonrpc_response} from '$lib/jsonrpc_helpers.js'; -import {ThrownJsonrpcError} from '$lib/jsonrpc_errors.js'; +import {RequestTracker} from '@fuzdev/fuz_app/actions/request_tracker.svelte.js'; describe('RequestTracker', () => { let warn_spy: ReturnType; @@ -37,27 +42,27 @@ describe('RequestTracker', () => { test('creates with default timeout', () => { const tracker = new RequestTracker(); - expect(tracker).toBeInstanceOf(RequestTracker); - expect(tracker.request_timeout_ms).toBe(120_000); - expect(tracker.pending_requests.size).toBe(0); + assert.instanceOf(tracker, RequestTracker); + assert.strictEqual(tracker.request_timeout_ms, 120_000); + assert.strictEqual(tracker.pending_requests.size, 0); }); test('creates with custom timeout', () => { const custom_timeout = 5000; const tracker = new RequestTracker(custom_timeout); - expect(tracker.request_timeout_ms).toBe(custom_timeout); - expect(tracker.pending_requests).toBeInstanceOf(Map); + assert.strictEqual(tracker.request_timeout_ms, custom_timeout); + assert.instanceOf(tracker.pending_requests, Map); }); test('handles zero or negative timeout values', () => { // Zero timeout should be allowed but would cause immediate timeouts const zero_tracker = new RequestTracker(0); - expect(zero_tracker.request_timeout_ms).toBe(0); + assert.strictEqual(zero_tracker.request_timeout_ms, 0); // Negative timeout should be allowed (though it's an edge case) const negative_tracker = new RequestTracker(-1000); - expect(negative_tracker.request_timeout_ms).toBe(-1000); + assert.strictEqual(negative_tracker.request_timeout_ms, -1000); }); }); @@ -68,21 +73,21 @@ describe('RequestTracker', () => { const deferred = tracker.track_request(id); // Should return a deferred promise with the correct interface - expect(deferred).toBeDefined(); - expect(deferred.promise).toBeInstanceOf(Promise); - expect(deferred.resolve).toBeInstanceOf(Function); - expect(deferred.reject).toBeInstanceOf(Function); + assert.isDefined(deferred); + assert.instanceOf(deferred.promise, Promise); + assert.instanceOf(deferred.resolve, Function); + assert.instanceOf(deferred.reject, Function); // Request should be stored with the correct properties - expect(tracker.pending_requests.has(id)).toBe(true); + assert.ok(tracker.pending_requests.has(id)); const request = tracker.pending_requests.get(id); - expect(request).toBeDefined(); - expect(request?.deferred).toBe(deferred); - expect(request?.status).toBe('pending'); - expect(request?.timeout).toBeDefined(); - expect(request?.created).toBeDefined(); - expect(typeof request?.created).toBe('string'); + assert.isDefined(request); + assert.strictEqual(request.deferred, deferred); + assert.strictEqual(request.status, 'pending'); + assert.isDefined(request.timeout); + assert.isDefined(request.created); + assert.strictEqual(typeof request.created, 'string'); // Clean up tracker.cancel_request(id); @@ -96,10 +101,10 @@ describe('RequestTracker', () => { const deferred1 = tracker.track_request(id1); const deferred2 = tracker.track_request(id2); - expect(deferred1).not.toBe(deferred2); - expect(tracker.pending_requests.size).toBe(2); - expect(tracker.pending_requests.get(id1)?.deferred).toBe(deferred1); - expect(tracker.pending_requests.get(id2)?.deferred).toBe(deferred2); + assert.notStrictEqual(deferred1, deferred2); + assert.strictEqual(tracker.pending_requests.size, 2); + assert.strictEqual(tracker.pending_requests.get(id1)?.deferred, deferred1); + assert.strictEqual(tracker.pending_requests.get(id2)?.deferred, deferred2); // Add promise handlers to catch rejections const promise1 = deferred1.promise.catch(() => { @@ -128,7 +133,7 @@ describe('RequestTracker', () => { return err; // Return to ensure promise settles }); - expect(tracker.pending_requests.has(id)).toBe(true); + assert.ok(tracker.pending_requests.has(id)); // Fast-forward time to trigger timeout vi.advanceTimersByTime(1001); @@ -136,11 +141,11 @@ describe('RequestTracker', () => { await Promise.resolve(); // Allow promise microtasks to process // Request should be removed and promise rejected with timeout error - expect(tracker.pending_requests.has(id)).toBe(false); - expect(rejection_error).toBeDefined(); - expect(rejection_error).toBeInstanceOf(ThrownJsonrpcError); - expect(rejection_error.code).toBe(JSONRPC_INTERNAL_ERROR); - expect(rejection_error.message).toBe(`request timed out: ${id}`); + assert.ok(!tracker.pending_requests.has(id)); + assert.isDefined(rejection_error); + assert.instanceOf(rejection_error, ThrownJsonrpcError); + assert.strictEqual(rejection_error.code, JSONRPC_INTERNAL_ERROR); + assert.strictEqual(rejection_error.message, `request timed out: ${id}`); }); test('cleans up previous request with same id', () => { @@ -152,18 +157,18 @@ describe('RequestTracker', () => { // Track first request const deferred1 = tracker.track_request(id); const timeout1 = tracker.pending_requests.get(id)?.timeout; - expect(timeout1).toBeDefined(); + assert.isDefined(timeout1); // Track second request with same id const deferred2 = tracker.track_request(id); // Verify timeout was cleared for first request - expect(clear_timeout_spy).toHaveBeenCalledWith(timeout1); - expect(deferred1).not.toBe(deferred2); + assert.ok(clear_timeout_spy.mock.calls.some((call) => call[0] === timeout1)); + assert.notStrictEqual(deferred1, deferred2); // Only one request should exist - expect(tracker.pending_requests.size).toBe(1); - expect(tracker.pending_requests.get(id)?.deferred).toBe(deferred2); + assert.strictEqual(tracker.pending_requests.size, 1); + assert.strictEqual(tracker.pending_requests.get(id)?.deferred, deferred2); // Clean up tracker.cancel_request(id); @@ -207,10 +212,10 @@ describe('RequestTracker', () => { const result = await promise1; // The promise should have timed out rather than be settled directly - expect(result).toBe('timeout'); + assert.strictEqual(result, 'timeout'); // The first promise should not be directly resolved or rejected by the tracker - expect(promise1_settled).toBe(false); + assert.ok(!promise1_settled); // Cancel all requests to clean up tracker.cancel_all_requests(); @@ -231,14 +236,14 @@ describe('RequestTracker', () => { tracker.resolve_request(id, response); // Verify timeout was cleared - expect(clear_timeout_spy).toHaveBeenCalledWith(timeout); + assert.ok(clear_timeout_spy.mock.calls.some((call) => call[0] === timeout)); // Verify request status was updated before resolution - expect(tracker.pending_requests.has(id)).toBe(false); + assert.ok(!tracker.pending_requests.has(id)); // Verify promise resolves with correct value const result = await deferred.promise; - expect(result).toBe(response); + assert.strictEqual(result, response); }); test('logs warning for unknown request id', () => { @@ -249,8 +254,10 @@ describe('RequestTracker', () => { tracker.resolve_request(unknown_id, response); - expect(warn_spy).toHaveBeenCalledTimes(1); - expect(warn_spy).toHaveBeenCalledWith(`received response for unknown request: ${unknown_id}`); + assert.strictEqual(warn_spy.mock.calls.length, 1); + assert.deepEqual(warn_spy.mock.calls[0], [ + `received response for unknown request: ${unknown_id}`, + ]); }); test('handles various data types', async () => { @@ -269,7 +276,7 @@ describe('RequestTracker', () => { const response = create_jsonrpc_response(id, {method}); tracker.resolve_request(id, response); const result = await deferred.promise; - expect(result).toBe(response); + assert.strictEqual(result, response); }); await Promise.all(promises); @@ -296,7 +303,7 @@ describe('RequestTracker', () => { tracker.resolve_request(id, create_jsonrpc_response(id, {test: 'result'})); await promise; - expect(status_when_resolved).toBe('success'); + assert.strictEqual(status_when_resolved, 'success'); }); }); @@ -318,15 +325,14 @@ describe('RequestTracker', () => { tracker.reject_request(id, error); // Verify timeout was cleared - expect(clear_timeout_spy).toHaveBeenCalledWith(timeout); - expect(tracker.pending_requests.has(id)).toBe(false); + assert.ok(clear_timeout_spy.mock.calls.some((call) => call[0] === timeout)); + assert.ok(!tracker.pending_requests.has(id)); // Verify promise rejects with the correct error - await expect(deferred.promise).rejects.toBeInstanceOf(ThrownJsonrpcError); - const rejection_error = await deferred.promise.catch((err) => err); - expect(rejection_error.code).toBe(error.error.code); - expect(rejection_error.message).toBe(error.error.message); + assert.instanceOf(rejection_error, ThrownJsonrpcError); + assert.strictEqual(rejection_error.code, error.error.code); + assert.strictEqual(rejection_error.message, error.error.message); }); test('logs warning for unknown request id', () => { @@ -339,8 +345,10 @@ describe('RequestTracker', () => { error: {code: JsonrpcErrorCode.parse(-32000), message: 'test'}, }); - expect(warn_spy).toHaveBeenCalledTimes(1); - expect(warn_spy).toHaveBeenCalledWith(`received error for unknown request: ${unknown_id}`); + assert.strictEqual(warn_spy.mock.calls.length, 1); + assert.deepEqual(warn_spy.mock.calls[0], [ + `received error for unknown request: ${unknown_id}`, + ]); }); test('handles various error types', async () => { @@ -379,13 +387,12 @@ describe('RequestTracker', () => { for (const {id, error} of test_cases) { const deferred = tracker.track_request(id); tracker.reject_request(id, error); - await expect(deferred.promise).rejects.toBeInstanceOf(ThrownJsonrpcError); // eslint-disable-line no-await-in-loop - - const rejection_error = await deferred.promise.catch((err) => err); // eslint-disable-line no-await-in-loop - expect(rejection_error.code).toBe(error.error.code); - expect(rejection_error.message).toBe(error.error.message); + const rejection_error = await deferred.promise.catch((err) => err); + assert.instanceOf(rejection_error, ThrownJsonrpcError); + assert.strictEqual(rejection_error.code, error.error.code); + assert.strictEqual(rejection_error.message, error.error.message); - expect(tracker.pending_requests.has(id)).toBe(false); + assert.ok(!tracker.pending_requests.has(id)); } }); @@ -416,7 +423,7 @@ describe('RequestTracker', () => { }); await promise; - expect(status_when_rejected).toBe('failure'); + assert.strictEqual(status_when_rejected, 'failure'); }); }); @@ -439,12 +446,13 @@ describe('RequestTracker', () => { tracker.handle_message(message); // Verify resolve_request was called with correct arguments - expect(resolve_spy).toHaveBeenCalledWith(id, message); + assert.ok(resolve_spy.mock.calls.length > 0); + assert.deepEqual(resolve_spy.mock.calls[0], [id, message] as any); // Verify promise resolves with correct value const response = await deferred.promise; - expect(response).toBe(message); - expect(tracker.pending_requests.has(id)).toBe(false); + assert.strictEqual(response, message as any); + assert.ok(!tracker.pending_requests.has(id)); }); test('rejects request with error when message contains error', async () => { @@ -466,15 +474,15 @@ describe('RequestTracker', () => { tracker.handle_message(message); // Verify reject_request was called with correct arguments - expect(reject_spy).toHaveBeenCalledWith(id, message); + assert.ok(reject_spy.mock.calls.length > 0); + assert.deepEqual(reject_spy.mock.calls[0], [id, message] as any); // Verify promise rejects with correct error - await expect(deferred.promise).rejects.toBeInstanceOf(ThrownJsonrpcError); - const rejection_error = await deferred.promise.catch((err) => err); - expect(rejection_error.code).toBe(message.error.code); - expect(rejection_error.message).toBe(message.error.message); - expect(tracker.pending_requests.has(id)).toBe(false); + assert.instanceOf(rejection_error, ThrownJsonrpcError); + assert.strictEqual(rejection_error.code, message.error.code); + assert.strictEqual(rejection_error.message, message.error.message); + assert.ok(!tracker.pending_requests.has(id)); }); test('ignores notification messages (no id)', () => { @@ -494,11 +502,11 @@ describe('RequestTracker', () => { }); // Verify no resolve/reject was called - expect(resolve_spy).not.toHaveBeenCalled(); - expect(reject_spy).not.toHaveBeenCalled(); + assert.strictEqual(resolve_spy.mock.calls.length, 0); + assert.strictEqual(reject_spy.mock.calls.length, 0); // Original request should still be pending - expect(tracker.pending_requests.has(id)).toBe(true); + assert.ok(tracker.pending_requests.has(id)); // Clean up tracker.cancel_request(id); @@ -521,11 +529,11 @@ describe('RequestTracker', () => { }); // Verify no resolve/reject was called - expect(resolve_spy).not.toHaveBeenCalled(); - expect(reject_spy).not.toHaveBeenCalled(); + assert.strictEqual(resolve_spy.mock.calls.length, 0); + assert.strictEqual(reject_spy.mock.calls.length, 0); // Original request should still be pending - expect(tracker.pending_requests.has(id)).toBe(true); + assert.ok(tracker.pending_requests.has(id)); // Clean up tracker.cancel_request(id); @@ -546,11 +554,11 @@ describe('RequestTracker', () => { tracker.handle_message({}); // Verify no resolve/reject was called - expect(resolve_spy).not.toHaveBeenCalled(); - expect(reject_spy).not.toHaveBeenCalled(); + assert.strictEqual(resolve_spy.mock.calls.length, 0); + assert.strictEqual(reject_spy.mock.calls.length, 0); // Original request should still be pending - expect(tracker.pending_requests.has(id)).toBe(true); + assert.ok(tracker.pending_requests.has(id)); // Clean up tracker.cancel_request(id); @@ -574,8 +582,9 @@ describe('RequestTracker', () => { tracker.handle_message(message); // Verify resolve_request was called with correct arguments - expect(resolve_spy).toHaveBeenCalledWith(id, message); - expect(tracker.pending_requests.has(id)).toBe(false); + assert.ok(resolve_spy.mock.calls.length > 0); + assert.deepEqual(resolve_spy.mock.calls[0], [id, message] as any); + assert.ok(!tracker.pending_requests.has(id)); }); test('prioritizes error over result if both exist in the message', async () => { @@ -598,14 +607,14 @@ describe('RequestTracker', () => { tracker.handle_message(message); // Should call reject_request, not resolve_request - expect(reject_spy).toHaveBeenCalledWith(id, message); + assert.ok(reject_spy.mock.calls.length > 0); + assert.deepEqual(reject_spy.mock.calls[0], [id, message] as any); // Promise should be rejected with the error - await expect(deferred.promise).rejects.toBeInstanceOf(ThrownJsonrpcError); - const rejection_error = await deferred.promise.catch((err) => err); - expect(rejection_error.code).toBe(message.error.code); - expect(rejection_error.message).toBe(message.error.message); + assert.instanceOf(rejection_error, ThrownJsonrpcError); + assert.strictEqual(rejection_error.code, message.error.code); + assert.strictEqual(rejection_error.message, message.error.message); }); }); @@ -621,10 +630,10 @@ describe('RequestTracker', () => { tracker.cancel_request(id); // Verify timeout was cleared - expect(clear_timeout_spy).toHaveBeenCalledWith(timeout); + assert.ok(clear_timeout_spy.mock.calls.some((call) => call[0] === timeout)); // Request should be removed - expect(tracker.pending_requests.has(id)).toBe(false); + assert.ok(!tracker.pending_requests.has(id)); }); test('does nothing for unknown request id', () => { @@ -635,7 +644,7 @@ describe('RequestTracker', () => { tracker.cancel_request(unknown_id); // Should not attempt to clear any timeout - expect(clear_timeout_spy).not.toHaveBeenCalled(); + assert.strictEqual(clear_timeout_spy.mock.calls.length, 0); }); test('handles cancel without affecting other requests', () => { @@ -649,8 +658,8 @@ describe('RequestTracker', () => { tracker.cancel_request(id1); // Only the specified request should be removed - expect(tracker.pending_requests.has(id1)).toBe(false); - expect(tracker.pending_requests.has(id2)).toBe(true); + assert.ok(!tracker.pending_requests.has(id1)); + assert.ok(tracker.pending_requests.has(id2)); // Clean up tracker.cancel_request(id2); @@ -691,14 +700,14 @@ describe('RequestTracker', () => { const result = await promise; // Result should be timeout, not a resolution or rejection - expect(result).toBe('timeout'); + assert.strictEqual(result, 'timeout'); // Request should be removed - expect(tracker.pending_requests.has(id)).toBe(false); + assert.ok(!tracker.pending_requests.has(id)); // Promise should be neither resolved nor rejected directly - expect(was_resolved).toBe(false); - expect(was_rejected).toBe(false); + assert.ok(!was_resolved); + assert.ok(!was_rejected); }); }); @@ -718,18 +727,22 @@ describe('RequestTracker', () => { const timeout2 = tracker.pending_requests.get(id2)?.timeout; // Set up promise rejection tracking - const promise1 = expect(deferred1.promise).rejects.toBeInstanceOf(ThrownJsonrpcError); - const promise2 = expect(deferred2.promise).rejects.toBeInstanceOf(ThrownJsonrpcError); + const promise1 = deferred1.promise.catch((err) => { + assert.instanceOf(err, ThrownJsonrpcError); + }); + const promise2 = deferred2.promise.catch((err) => { + assert.instanceOf(err, ThrownJsonrpcError); + }); // Cancel all requests tracker.cancel_all_requests(custom_reason); // Verify timeouts were cleared - expect(clear_timeout_spy).toHaveBeenCalledWith(timeout1); - expect(clear_timeout_spy).toHaveBeenCalledWith(timeout2); + assert.ok(clear_timeout_spy.mock.calls.some((call) => call[0] === timeout1)); + assert.ok(clear_timeout_spy.mock.calls.some((call) => call[0] === timeout2)); // All requests should be removed - expect(tracker.pending_requests.size).toBe(0); + assert.strictEqual(tracker.pending_requests.size, 0); // Wait for promise rejections to complete await Promise.allSettled([promise1, promise2]); @@ -740,10 +753,12 @@ describe('RequestTracker', () => { const id = 'req_1'; const deferred = tracker.track_request(id); - const promise = expect(deferred.promise).rejects.toBeInstanceOf(ThrownJsonrpcError); + const promise = deferred.promise.catch((err) => { + assert.instanceOf(err, ThrownJsonrpcError); + }); tracker.cancel_all_requests(); - expect(tracker.pending_requests.size).toBe(0); + assert.strictEqual(tracker.pending_requests.size, 0); await promise; }); @@ -755,7 +770,7 @@ describe('RequestTracker', () => { tracker.cancel_all_requests(); // Should not attempt to clear any timeouts - expect(clear_timeout_spy).not.toHaveBeenCalled(); + assert.strictEqual(clear_timeout_spy.mock.calls.length, 0); }); test('sets failure status before rejecting', async () => { @@ -781,7 +796,7 @@ describe('RequestTracker', () => { tracker.cancel_all_requests(); await promise; - expect(status_when_rejected).toBe('failure'); + assert.strictEqual(status_when_rejected, 'failure'); }); test('rejects with ThrownJsonrpcError instance when cancelling all requests', async () => { @@ -791,7 +806,9 @@ describe('RequestTracker', () => { const deferred = tracker.track_request(id); // Set up testing for ThrownJsonrpcError instance - const promise = expect(deferred.promise).rejects.toBeInstanceOf(ThrownJsonrpcError); + const promise = deferred.promise.catch((err) => { + assert.instanceOf(err, ThrownJsonrpcError); + }); tracker.cancel_all_requests(); @@ -813,12 +830,12 @@ describe('RequestTracker', () => { const deferred2 = tracker.track_request(id); // Should be different deferred objects - expect(deferred1).not.toBe(deferred2); - expect(tracker.pending_requests.size).toBe(1); - expect(tracker.pending_requests.get(id)?.deferred).toBe(deferred2); + assert.notStrictEqual(deferred1, deferred2); + assert.strictEqual(tracker.pending_requests.size, 1); + assert.strictEqual(tracker.pending_requests.get(id)?.deferred, deferred2); // Should have cleared the timeout from the first request - expect(clear_timeout_spy).toHaveBeenCalledWith(timeout1); + assert.ok(clear_timeout_spy.mock.calls.some((call) => call[0] === timeout1)); // Clean up tracker.cancel_request(id); @@ -835,14 +852,14 @@ describe('RequestTracker', () => { for (const {id, method} of test_cases) { const deferred = tracker.track_request(id); - expect(tracker.pending_requests.has(id)).toBe(true); + assert.ok(tracker.pending_requests.has(id)); const response = create_jsonrpc_response(id, {method}); tracker.resolve_request(id, response); - expect(tracker.pending_requests.has(id)).toBe(false); + assert.ok(!tracker.pending_requests.has(id)); - const result = await deferred.promise; // eslint-disable-line no-await-in-loop - expect(result).toBe(response); + const result = await deferred.promise; + assert.strictEqual(result, response); } }); @@ -860,13 +877,13 @@ describe('RequestTracker', () => { }); const result = await deferred.promise; - expect(result).toEqual({ + assert.deepEqual(result as any, { jsonrpc: JSONRPC_VERSION, id, method: 'test', result: null, }); - expect(tracker.pending_requests.has(id)).toBe(false); + assert.ok(!tracker.pending_requests.has(id)); }); test('request timeout uses correct error object', async () => { @@ -882,9 +899,9 @@ describe('RequestTracker', () => { vi.advanceTimersByTime(101); const error = await error_promise; - expect(error).toBeInstanceOf(ThrownJsonrpcError); - expect(error.code).toBe(JSONRPC_INTERNAL_ERROR); - expect(error.message).toBe(`request timed out: ${id}`); + assert.instanceOf(error, ThrownJsonrpcError); + assert.strictEqual(error.code, JSONRPC_INTERNAL_ERROR); + assert.strictEqual(error.message, `request timed out: ${id}`); }); test('handles undefined timeout when clearing timeouts', () => { @@ -903,7 +920,7 @@ describe('RequestTracker', () => { tracker.cancel_request(id); // Request should be removed - expect(tracker.pending_requests.has(id)).toBe(false); + assert.ok(!tracker.pending_requests.has(id)); // Cleanup clearTimeout(original_timeout); @@ -951,7 +968,7 @@ describe('RequestTracker', () => { // Promise should resolve with first value const result = await deferred.promise; - expect(result).toEqual({ + assert.deepEqual(result, { jsonrpc: '2.0', id, result: { @@ -960,7 +977,7 @@ describe('RequestTracker', () => { }); // Warnings should be logged for the duplicate calls - expect(warn_spy).toHaveBeenCalledTimes(2); + assert.strictEqual(warn_spy.mock.calls.length, 2); }); }); @@ -971,8 +988,8 @@ describe('RequestTracker', () => { // Track the request const deferred = tracker.track_request(id); - expect(tracker.pending_requests.has(id)).toBe(true); - expect(tracker.pending_requests.get(id)?.status).toBe('pending'); + assert.ok(tracker.pending_requests.has(id)); + assert.strictEqual(tracker.pending_requests.get(id)?.status, 'pending'); // Resolve the request const response = create_jsonrpc_response(id, {status: 'success'}); @@ -982,11 +999,11 @@ describe('RequestTracker', () => { const result = await deferred.promise; // Request should be resolved and removed - expect(result).toEqual({ + assert.deepEqual(result, { ...response, result: {status: 'success'}, }); - expect(tracker.pending_requests.has(id)).toBe(false); + assert.ok(!tracker.pending_requests.has(id)); }); test('handles simultaneous requests with different IDs', async () => { @@ -1000,7 +1017,7 @@ describe('RequestTracker', () => { })); // All requests should be pending - expect(tracker.pending_requests.size).toBe(ids.length); + assert.strictEqual(tracker.pending_requests.size, ids.length); // Resolve them in reverse order for (let i = ids.length - 1; i >= 0; i--) { @@ -1014,14 +1031,14 @@ describe('RequestTracker', () => { // Verify each result matches its request results.forEach((result, index) => { - expect(result.id).toBe(ids[index]); + assert.strictEqual(result.id, ids[index]); if (is_jsonrpc_response(result)) { - expect(result.result.test).toBe('result'); + assert.strictEqual(result.result.test, 'result'); } }); // All requests should be removed - expect(tracker.pending_requests.size).toBe(0); + assert.strictEqual(tracker.pending_requests.size, 0); }); test('handles a mix of resolved, rejected, and timed out requests', async () => { @@ -1051,19 +1068,19 @@ describe('RequestTracker', () => { // Set up promises to check results const resolve_promise = resolve_deferred.promise.then((result) => { - expect(result).toHaveProperty('method', 'test_method'); + assert.strictEqual((result as any).method, 'test_method'); return true; }); const reject_promise = reject_deferred.promise.catch((error) => { - expect(error).toBeInstanceOf(ThrownJsonrpcError); - expect(error.message).toBe('rejected'); + assert.instanceOf(error, ThrownJsonrpcError); + assert.strictEqual(error.message, 'rejected'); return true; }); const timeout_promise = timeout_deferred.promise.catch((error) => { - expect(error).toBeInstanceOf(ThrownJsonrpcError); - expect(error.message).toBe(`request timed out: ${timeout_id}`); + assert.instanceOf(error, ThrownJsonrpcError); + assert.strictEqual(error.message, `request timed out: ${timeout_id}`); return true; }); @@ -1071,7 +1088,7 @@ describe('RequestTracker', () => { await Promise.allSettled([resolve_promise, reject_promise, timeout_promise]); // All requests should be removed - expect(tracker.pending_requests.size).toBe(0); + assert.strictEqual(tracker.pending_requests.size, 0); }); }); }); diff --git a/src/test/server/backend_websocket_transport.test.ts b/src/test/server/backend_websocket_transport.test.ts new file mode 100644 index 000000000..5982a86dd --- /dev/null +++ b/src/test/server/backend_websocket_transport.test.ts @@ -0,0 +1,339 @@ +import {describe, test, assert} from 'vitest'; +import {WSContext} from 'hono/ws'; + +import {BackendWebsocketTransport} from '@fuzdev/fuz_app/actions/transports_ws_backend.js'; +import {WS_CLOSE_SESSION_REVOKED} from '@fuzdev/fuz_app/actions/transports.js'; +import type {Uuid} from '../../lib/zod_helpers.js'; + +interface MockWs { + ws: WSContext; + closed: {code?: number; reason?: string} | null; + sent: Array; +} + +/** Create a mock WSContext that records `send` and `close` calls. */ +const create_mock_ws = (): MockWs => { + const mock: MockWs = { + ws: null!, + closed: null, + sent: [], + }; + mock.ws = new WSContext({ + send: (data) => { + mock.sent.push(data); + }, + close: (code, reason) => { + mock.closed = {code, reason}; + }, + readyState: 1, // OPEN + }); + return mock; +}; + +const ACCOUNT_A = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' as Uuid; +const ACCOUNT_B = 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb' as Uuid; +const TOKEN_HASH_1 = 'hash_session_1'; +const TOKEN_HASH_2 = 'hash_session_2'; + +describe('BackendWebsocketTransport', () => { + describe('add_connection', () => { + test('returns a connection ID and makes transport ready', () => { + const transport = new BackendWebsocketTransport(); + const m = create_mock_ws(); + + assert.ok(!transport.is_ready()); + const id = transport.add_connection(m.ws, TOKEN_HASH_1, ACCOUNT_A); + assert.ok(id); + assert.ok(transport.is_ready()); + }); + + test('accepts null token_hash for bearer token connections', () => { + const transport = new BackendWebsocketTransport(); + const m = create_mock_ws(); + + const id = transport.add_connection(m.ws, null, ACCOUNT_A); + assert.ok(id); + assert.ok(transport.is_ready()); + }); + }); + + describe('remove_connection', () => { + test('removes the connection and makes transport not ready', () => { + const transport = new BackendWebsocketTransport(); + const m = create_mock_ws(); + + transport.add_connection(m.ws, TOKEN_HASH_1, ACCOUNT_A); + assert.ok(transport.is_ready()); + + transport.remove_connection(m.ws); + assert.ok(!transport.is_ready()); + }); + + test('is idempotent — second call is a no-op', () => { + const transport = new BackendWebsocketTransport(); + const m = create_mock_ws(); + + transport.add_connection(m.ws, TOKEN_HASH_1, ACCOUNT_A); + transport.remove_connection(m.ws); + transport.remove_connection(m.ws); // should not throw + assert.ok(!transport.is_ready()); + }); + }); + + describe('close_sockets_for_session', () => { + test('closes matching sockets and returns count', () => { + const transport = new BackendWebsocketTransport(); + const m1 = create_mock_ws(); + const m2 = create_mock_ws(); + + transport.add_connection(m1.ws, TOKEN_HASH_1, ACCOUNT_A); + transport.add_connection(m2.ws, TOKEN_HASH_2, ACCOUNT_A); + + const count = transport.close_sockets_for_session(TOKEN_HASH_1); + assert.strictEqual(count, 1); + assert.strictEqual(m1.closed?.code, WS_CLOSE_SESSION_REVOKED); + assert.strictEqual(m1.closed?.reason, 'Session revoked'); + assert.strictEqual(m2.closed, null); + }); + + test('closes multiple sockets with the same session', () => { + const transport = new BackendWebsocketTransport(); + const m1 = create_mock_ws(); + const m2 = create_mock_ws(); + + transport.add_connection(m1.ws, TOKEN_HASH_1, ACCOUNT_A); + transport.add_connection(m2.ws, TOKEN_HASH_1, ACCOUNT_A); + + const count = transport.close_sockets_for_session(TOKEN_HASH_1); + assert.strictEqual(count, 2); + assert.ok(m1.closed); + assert.ok(m2.closed); + }); + + test('returns 0 when no sockets match', () => { + const transport = new BackendWebsocketTransport(); + const m = create_mock_ws(); + + transport.add_connection(m.ws, TOKEN_HASH_1, ACCOUNT_A); + + const count = transport.close_sockets_for_session('nonexistent_hash'); + assert.strictEqual(count, 0); + assert.strictEqual(m.closed, null); + }); + + test('skips connections with null token_hash', () => { + const transport = new BackendWebsocketTransport(); + const m_bearer = create_mock_ws(); + const m_session = create_mock_ws(); + + transport.add_connection(m_bearer.ws, null, ACCOUNT_A); + transport.add_connection(m_session.ws, TOKEN_HASH_1, ACCOUNT_A); + + const count = transport.close_sockets_for_session(TOKEN_HASH_1); + assert.strictEqual(count, 1); + assert.strictEqual(m_bearer.closed, null); + assert.ok(m_session.closed); + }); + + test('cleans up tracking state after revocation', () => { + const transport = new BackendWebsocketTransport(); + const m = create_mock_ws(); + + transport.add_connection(m.ws, TOKEN_HASH_1, ACCOUNT_A); + transport.close_sockets_for_session(TOKEN_HASH_1); + + assert.ok(!transport.is_ready()); + // remove_connection after revocation is safe (idempotent) + transport.remove_connection(m.ws); + assert.ok(!transport.is_ready()); + }); + }); + + describe('close_sockets_for_account', () => { + test('closes all sockets for an account across sessions', () => { + const transport = new BackendWebsocketTransport(); + const m1 = create_mock_ws(); + const m2 = create_mock_ws(); + const m3 = create_mock_ws(); + + transport.add_connection(m1.ws, TOKEN_HASH_1, ACCOUNT_A); + transport.add_connection(m2.ws, TOKEN_HASH_2, ACCOUNT_A); + transport.add_connection(m3.ws, TOKEN_HASH_1, ACCOUNT_B); + + const count = transport.close_sockets_for_account(ACCOUNT_A); + assert.strictEqual(count, 2); + assert.ok(m1.closed); + assert.ok(m2.closed); + assert.strictEqual(m3.closed, null); + assert.ok(transport.is_ready()); // m3 still connected + }); + + test('returns 0 when no sockets match', () => { + const transport = new BackendWebsocketTransport(); + const m = create_mock_ws(); + + transport.add_connection(m.ws, TOKEN_HASH_1, ACCOUNT_A); + + const count = transport.close_sockets_for_account(ACCOUNT_B); + assert.strictEqual(count, 0); + }); + + test('closes connections with null token_hash', () => { + const transport = new BackendWebsocketTransport(); + const m_bearer = create_mock_ws(); + const m_session = create_mock_ws(); + + transport.add_connection(m_bearer.ws, null, ACCOUNT_A); + transport.add_connection(m_session.ws, TOKEN_HASH_1, ACCOUNT_A); + + const count = transport.close_sockets_for_account(ACCOUNT_A); + assert.strictEqual(count, 2); + assert.ok(m_bearer.closed); + assert.ok(m_session.closed); + }); + }); + + describe('is_ready', () => { + test('stays ready after partial removal', () => { + const transport = new BackendWebsocketTransport(); + const m1 = create_mock_ws(); + const m2 = create_mock_ws(); + const m3 = create_mock_ws(); + + transport.add_connection(m1.ws, TOKEN_HASH_1, ACCOUNT_A); + transport.add_connection(m2.ws, TOKEN_HASH_2, ACCOUNT_A); + transport.add_connection(m3.ws, TOKEN_HASH_1, ACCOUNT_B); + + transport.remove_connection(m1.ws); + assert.ok(transport.is_ready()); + + transport.remove_connection(m2.ws); + assert.ok(transport.is_ready()); + + transport.remove_connection(m3.ws); + assert.ok(!transport.is_ready()); + }); + + test('stays ready after partial revocation', () => { + const transport = new BackendWebsocketTransport(); + const m1 = create_mock_ws(); + const m2 = create_mock_ws(); + + transport.add_connection(m1.ws, TOKEN_HASH_1, ACCOUNT_A); + transport.add_connection(m2.ws, TOKEN_HASH_2, ACCOUNT_B); + + transport.close_sockets_for_session(TOKEN_HASH_1); + assert.ok(transport.is_ready()); // m2 still connected + }); + }); + + describe('broadcast after revocation', () => { + test('send only reaches remaining connections', async () => { + const transport = new BackendWebsocketTransport(); + const m1 = create_mock_ws(); + const m2 = create_mock_ws(); + const m3 = create_mock_ws(); + + transport.add_connection(m1.ws, TOKEN_HASH_1, ACCOUNT_A); + transport.add_connection(m2.ws, TOKEN_HASH_2, ACCOUNT_A); + transport.add_connection(m3.ws, TOKEN_HASH_1, ACCOUNT_B); + + // Revoke account A's sockets + transport.close_sockets_for_account(ACCOUNT_A); + + // Broadcast a notification — only m3 should receive it + await transport.send({jsonrpc: '2.0', method: 'test_event', params: {}}); + + assert.strictEqual(m1.sent.length, 0); + assert.strictEqual(m2.sent.length, 0); + assert.strictEqual(m3.sent.length, 1); + }); + }); + + describe('interleaved revocation', () => { + test('session revoke then account revoke does not double-count', () => { + const transport = new BackendWebsocketTransport(); + const m1 = create_mock_ws(); + const m2 = create_mock_ws(); + + // Same account, different sessions + transport.add_connection(m1.ws, TOKEN_HASH_1, ACCOUNT_A); + transport.add_connection(m2.ws, TOKEN_HASH_2, ACCOUNT_A); + + // Revoke session 1 — closes m1 + const session_count = transport.close_sockets_for_session(TOKEN_HASH_1); + assert.strictEqual(session_count, 1); + + // Revoke account A — only m2 remains, m1 already cleaned up + const account_count = transport.close_sockets_for_account(ACCOUNT_A); + assert.strictEqual(account_count, 1); + + assert.ok(!transport.is_ready()); + }); + + test('account revoke then session revoke returns 0', () => { + const transport = new BackendWebsocketTransport(); + const m = create_mock_ws(); + + transport.add_connection(m.ws, TOKEN_HASH_1, ACCOUNT_A); + + transport.close_sockets_for_account(ACCOUNT_A); + const count = transport.close_sockets_for_session(TOKEN_HASH_1); + assert.strictEqual(count, 0); + }); + }); + + describe('bearer token connections', () => { + test('session revoke skips bearer, account revoke catches both', () => { + const transport = new BackendWebsocketTransport(); + const m_bearer = create_mock_ws(); + const m_session = create_mock_ws(); + + transport.add_connection(m_bearer.ws, null, ACCOUNT_A); + transport.add_connection(m_session.ws, TOKEN_HASH_1, ACCOUNT_A); + + // Session revoke only catches the session connection + const session_count = transport.close_sockets_for_session(TOKEN_HASH_1); + assert.strictEqual(session_count, 1); + assert.strictEqual(m_bearer.closed, null); + assert.ok(m_session.closed); + + // Account revoke catches the remaining bearer connection + const account_count = transport.close_sockets_for_account(ACCOUNT_A); + assert.strictEqual(account_count, 1); + assert.ok(m_bearer.closed); + }); + + test('remove_connection after bearer add is safe', () => { + const transport = new BackendWebsocketTransport(); + const m = create_mock_ws(); + + transport.add_connection(m.ws, null, ACCOUNT_A); + transport.remove_connection(m.ws); + assert.ok(!transport.is_ready()); + }); + }); + + describe('revocation then remove_connection', () => { + test('remove_connection after close_sockets_for_session is safe', () => { + const transport = new BackendWebsocketTransport(); + const m = create_mock_ws(); + + transport.add_connection(m.ws, TOKEN_HASH_1, ACCOUNT_A); + transport.close_sockets_for_session(TOKEN_HASH_1); + + // onClose handler would call remove_connection — must not throw + transport.remove_connection(m.ws); + }); + + test('remove_connection after close_sockets_for_account is safe', () => { + const transport = new BackendWebsocketTransport(); + const m = create_mock_ws(); + + transport.add_connection(m.ws, TOKEN_HASH_1, ACCOUNT_A); + transport.close_sockets_for_account(ACCOUNT_A); + + transport.remove_connection(m.ws); + }); + }); +}); diff --git a/src/test/server/env_file_helpers.basic.test.ts b/src/test/server/env_file_helpers.basic.test.ts index 9fce7a597..a3dac5525 100644 --- a/src/test/server/env_file_helpers.basic.test.ts +++ b/src/test/server/env_file_helpers.basic.test.ts @@ -1,11 +1,8 @@ -// @slop Claude Sonnet 4.5 - -import {test, expect, describe} from 'vitest'; +import {test, describe, assert} from 'vitest'; +import {assert_rejects} from '@fuzdev/fuz_util/testing.js'; import {update_env_variable} from '$lib/server/env_file_helpers.js'; -/* eslint-disable @typescript-eslint/require-await */ - /** * Creates an in-memory file system for testing. * No module-level mocks - uses dependency injection instead. @@ -30,165 +27,110 @@ const create_mock_fs = (initial_files: Record = {}) => { files[path] = content; }, get_file: (path: string): string | undefined => files[path], - get_all_files: (): Record => ({...files}), }; }; -describe('update_env_variable - basic functionality', () => { - test('updates existing variable with quotes', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="old_value"\n', - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="new_value"\n'); - }); - - test('updates existing variable without quotes', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY=old_value\n', - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY=new_value\n'); - }); - - test('adds new variable to empty file', async () => { - const fs = create_mock_fs({ - '/test/.env': '', - }); - - await update_env_variable('NEW_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('NEW_KEY="new_value"'); - }); - - test('adds new variable to existing file with content', async () => { - const fs = create_mock_fs({ - '/test/.env': 'EXISTING_KEY="existing_value"', - }); - - await update_env_variable('NEW_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('EXISTING_KEY="existing_value"\nNEW_KEY="new_value"'); - }); - - test('creates file if it does not exist', async () => { - const fs = create_mock_fs({}); - - await update_env_variable('NEW_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('NEW_KEY="new_value"'); - }); - - test('preserves quote style for quoted variables', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="old_value"', - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="new_value"'); - }); +// null initial means no file exists (triggers ENOENT → create) +const basic_cases: Array< + [label: string, initial: string | null, key: string, value: string, expected: string] +> = [ + [ + 'updates existing variable with quotes', + 'API_KEY="old_value"\n', + 'API_KEY', + 'new_value', + 'API_KEY="new_value"\n', + ], + [ + 'updates existing variable without quotes', + 'API_KEY=old_value\n', + 'API_KEY', + 'new_value', + 'API_KEY=new_value\n', + ], + ['adds new variable to empty file', '', 'NEW_KEY', 'new_value', 'NEW_KEY="new_value"'], + [ + 'adds new variable to existing file with content', + 'EXISTING_KEY="existing_value"', + 'NEW_KEY', + 'new_value', + 'EXISTING_KEY="existing_value"\nNEW_KEY="new_value"', + ], + ['creates file if it does not exist', null, 'NEW_KEY', 'new_value', 'NEW_KEY="new_value"'], + [ + 'preserves quote style for quoted variables', + 'API_KEY="old_value"', + 'API_KEY', + 'new_value', + 'API_KEY="new_value"', + ], + [ + 'preserves quote style for unquoted variables', + 'API_KEY=old_value', + 'API_KEY', + 'new_value', + 'API_KEY=new_value', + ], +]; + +const formatting_cases: Array< + [label: string, initial: string, key: string, value: string, expected: string] +> = [ + [ + 'preserves comments above variables', + '# This is a comment\nAPI_KEY="old_value"\n# Another comment', + 'API_KEY', + 'new_value', + '# This is a comment\nAPI_KEY="new_value"\n# Another comment', + ], + [ + 'preserves empty lines', + 'API_KEY="old_value"\n\nOTHER_KEY="other_value"', + 'API_KEY', + 'new_value', + 'API_KEY="new_value"\n\nOTHER_KEY="other_value"', + ], + [ + 'handles file with trailing newline', + 'API_KEY="old_value"\n', + 'API_KEY', + 'new_value', + 'API_KEY="new_value"\n', + ], + [ + 'handles file without trailing newline', + 'API_KEY="old_value"', + 'API_KEY', + 'new_value', + 'API_KEY="new_value"', + ], +]; - test('preserves quote style for unquoted variables', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY=old_value', - }); +describe('update_env_variable - basic functionality', () => { + test.each(basic_cases)('%s', async (_label, initial, key, value, expected) => { + const fs = create_mock_fs(initial !== null ? {'/test/.env': initial} : {}); - await update_env_variable('API_KEY', 'new_value', { + await update_env_variable(key, value, { env_file_path: '/test/.env', read_file: fs.read_file, write_file: fs.write_file, }); - expect(fs.get_file('/test/.env')).toBe('API_KEY=new_value'); + assert.strictEqual(fs.get_file('/test/.env'), expected); }); }); describe('update_env_variable - formatting preservation', () => { - test('preserves comments above variables', async () => { - const fs = create_mock_fs({ - '/test/.env': '# This is a comment\nAPI_KEY="old_value"\n# Another comment', - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe( - '# This is a comment\nAPI_KEY="new_value"\n# Another comment', - ); - }); + test.each(formatting_cases)('%s', async (_label, initial, key, value, expected) => { + const fs = create_mock_fs({'/test/.env': initial}); - test('preserves empty lines', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="old_value"\n\nOTHER_KEY="other_value"', - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="new_value"\n\nOTHER_KEY="other_value"'); - }); - - test('handles file with trailing newline', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="old_value"\n', - }); - - await update_env_variable('API_KEY', 'new_value', { + await update_env_variable(key, value, { env_file_path: '/test/.env', read_file: fs.read_file, write_file: fs.write_file, }); - expect(fs.get_file('/test/.env')).toBe('API_KEY="new_value"\n'); - }); - - test('handles file without trailing newline', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="old_value"', - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="new_value"'); + assert.strictEqual(fs.get_file('/test/.env'), expected); }); }); @@ -199,13 +141,14 @@ describe('update_env_variable - error handling', () => { throw new Error(error_message); }; - await expect( + const error = await assert_rejects(() => update_env_variable('API_KEY', 'new_value', { env_file_path: '/test/.env', read_file: custom_read, - write_file: async () => {}, // eslint-disable-line @typescript-eslint/no-empty-function + write_file: async () => {}, }), - ).rejects.toThrow(error_message); + ); + assert.include(error.message, error_message); }); test('propagates write file error', async () => { @@ -214,12 +157,13 @@ describe('update_env_variable - error handling', () => { throw new Error(error_message); }; - await expect( + const error = await assert_rejects(() => update_env_variable('API_KEY', 'new_value', { env_file_path: '/test/.env', read_file: async () => '', write_file: custom_write, }), - ).rejects.toThrow(error_message); + ); + assert.include(error.message, error_message); }); }); diff --git a/src/test/server/env_file_helpers.comments.test.ts b/src/test/server/env_file_helpers.comments.test.ts index c4aa61dbd..fe6ef5083 100644 --- a/src/test/server/env_file_helpers.comments.test.ts +++ b/src/test/server/env_file_helpers.comments.test.ts @@ -1,11 +1,7 @@ -// @slop Claude Sonnet 4.5 - -import {test, expect, describe} from 'vitest'; +import {test, describe, assert} from 'vitest'; import {update_env_variable} from '$lib/server/env_file_helpers.js'; -/* eslint-disable @typescript-eslint/require-await */ - const create_mock_fs = (initial_files: Record = {}) => { const files = {...initial_files}; return { @@ -28,204 +24,119 @@ const create_mock_fs = (initial_files: Record = {}) => { }; }; -describe('update_env_variable - inline comment preservation', () => { - test('preserves inline comment after quoted value', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="old_value" # this is important', - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="new_value" # this is important'); - }); - - test('preserves inline comment after unquoted value', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY=old_value # comment here', - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY=new_value # comment here'); - }); - - test('preserves inline comment with no space before hash', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="old_value"# no space comment', - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="new_value"# no space comment'); - }); - - test('preserves inline comment with multiple spaces before hash', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="old_value" # spaced comment', - }); +const comment_cases: Array< + [label: string, initial: string, key: string, value: string, expected: string] +> = [ + [ + 'preserves inline comment after quoted value', + 'API_KEY="old_value" # this is important', + 'API_KEY', + 'new_value', + 'API_KEY="new_value" # this is important', + ], + [ + 'preserves inline comment after unquoted value', + 'API_KEY=old_value # comment here', + 'API_KEY', + 'new_value', + 'API_KEY=new_value # comment here', + ], + [ + 'preserves inline comment with no space before hash', + 'API_KEY="old_value"# no space comment', + 'API_KEY', + 'new_value', + 'API_KEY="new_value"# no space comment', + ], + [ + 'preserves inline comment with multiple spaces before hash', + 'API_KEY="old_value" # spaced comment', + 'API_KEY', + 'new_value', + 'API_KEY="new_value" # spaced comment', + ], + [ + 'does not treat hash inside quoted value as comment', + 'API_KEY="value#with#hashes" # real comment', + 'API_KEY', + 'new_value', + 'API_KEY="new_value" # real comment', + ], + [ + 'treats hash in unquoted value as start of comment', + 'API_KEY=value#notacomment', + 'API_KEY', + 'new_value', + 'API_KEY=new_value#notacomment', + ], + [ + 'handles empty inline comment', + 'API_KEY="old_value" #', + 'API_KEY', + 'new_value', + 'API_KEY="new_value" #', + ], + [ + 'preserves inline comment with special characters', + 'API_KEY="old" # TODO: update this! @important', + 'API_KEY', + 'new', + 'API_KEY="new" # TODO: update this! @important', + ], + [ + 'handles single quotes with inline comment', + "API_KEY='old_value' # comment", + 'API_KEY', + 'new_value', + 'API_KEY="new_value" # comment', + ], + [ + 'does not add inline comment when original has none', + 'API_KEY="old_value"', + 'API_KEY', + 'new_value', + 'API_KEY="new_value"', + ], + [ + 'preserves multiple hashes in comment', + 'API_KEY="old" # comment ## with ### hashes', + 'API_KEY', + 'new', + 'API_KEY="new" # comment ## with ### hashes', + ], + [ + 'preserves comment after escaped backslash at end of value', + 'API_KEY="test\\\\" # important comment', + 'API_KEY', + 'new', + 'API_KEY="new" # important comment', + ], + [ + 'preserves comment after single escaped backslash', + 'PATH="C:\\\\temp\\\\" # Windows path', + 'PATH', + 'D:\\\\new', + 'PATH="D:\\\\new" # Windows path', + ], + [ + 'handles escaped quote followed by more content (not a closing quote)', + 'MSG="Say \\"hello\\" please" # greeting', + 'MSG', + 'new message', + 'MSG="new message" # greeting', + ], +]; - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="new_value" # spaced comment'); - }); - - test('does not treat hash inside quoted value as comment', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="value#with#hashes" # real comment', - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="new_value" # real comment'); - }); - - test('treats hash in unquoted value as start of comment', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY=value#notacomment', - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - // The #notacomment part should be preserved as comment - expect(fs.get_file('/test/.env')).toBe('API_KEY=new_value#notacomment'); - }); - - test('handles empty inline comment', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="old_value" #', - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="new_value" #'); - }); - - test('preserves inline comment with special characters', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="old" # TODO: update this! @important', - }); - - await update_env_variable('API_KEY', 'new', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="new" # TODO: update this! @important'); - }); - - test('handles single quotes with inline comment', async () => { - const fs = create_mock_fs({ - '/test/.env': "API_KEY='old_value' # comment", - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - // Should preserve quotes and comment - expect(fs.get_file('/test/.env')).toBe('API_KEY="new_value" # comment'); - }); - - test('does not add inline comment when original has none', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="old_value"', - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="new_value"'); - }); - - test('preserves multiple hashes in comment', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="old" # comment ## with ### hashes', - }); - - await update_env_variable('API_KEY', 'new', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="new" # comment ## with ### hashes'); - }); - - test('preserves comment after escaped backslash at end of value', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="test\\\\" # important comment', - }); - - await update_env_variable('API_KEY', 'new', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - // The \\\\ is two backslashes (one escaped), then closing quote, then comment - expect(fs.get_file('/test/.env')).toBe('API_KEY="new" # important comment'); - }); - - test('preserves comment after single escaped backslash', async () => { - const fs = create_mock_fs({ - '/test/.env': 'PATH="C:\\\\temp\\\\" # Windows path', - }); - - await update_env_variable('PATH', 'D:\\\\new', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('PATH="D:\\\\new" # Windows path'); - }); - - test('handles escaped quote followed by more content (not a closing quote)', async () => { - const fs = create_mock_fs({ - '/test/.env': 'MSG="Say \\"hello\\" please" # greeting', - }); +describe('update_env_variable - inline comment preservation', () => { + test.each(comment_cases)('%s', async (_label, initial, key, value, expected) => { + const fs = create_mock_fs({'/test/.env': initial}); - await update_env_variable('MSG', 'new message', { + await update_env_variable(key, value, { env_file_path: '/test/.env', read_file: fs.read_file, write_file: fs.write_file, }); - // The \" are escaped quotes, final " is closing quote - expect(fs.get_file('/test/.env')).toBe('MSG="new message" # greeting'); + assert.strictEqual(fs.get_file('/test/.env'), expected); }); }); diff --git a/src/test/server/env_file_helpers.duplicates.test.ts b/src/test/server/env_file_helpers.duplicates.test.ts index 2ac55d968..00d7d691f 100644 --- a/src/test/server/env_file_helpers.duplicates.test.ts +++ b/src/test/server/env_file_helpers.duplicates.test.ts @@ -1,11 +1,7 @@ -// @slop Claude Sonnet 4.5 - -import {test, expect, describe} from 'vitest'; +import {test, describe, assert} from 'vitest'; import {update_env_variable} from '$lib/server/env_file_helpers.js'; -/* eslint-disable @typescript-eslint/require-await */ - const create_mock_fs = (initial_files: Record = {}) => { const files = {...initial_files}; return { @@ -28,172 +24,98 @@ const create_mock_fs = (initial_files: Record = {}) => { }; }; -describe('update_env_variable - duplicate keys (LAST wins behavior)', () => { - test('updates LAST occurrence when key appears twice', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="first_value"\nAPI_KEY="second_value"', - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - // First occurrence stays unchanged, second (last) is updated - expect(fs.get_file('/test/.env')).toBe('API_KEY="first_value"\nAPI_KEY="new_value"'); - }); - - test('updates LAST occurrence when key appears three times', async () => { - const fs = create_mock_fs({ - '/test/.env': 'KEY="first"\nKEY="second"\nKEY="third"', - }); - - await update_env_variable('KEY', 'updated', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - const result = fs.get_file('/test/.env'); - expect(result).toBe('KEY="first"\nKEY="second"\nKEY="updated"'); - - // Verify first two occurrences are unchanged - const lines = result?.split('\n') || []; - expect(lines[0]).toBe('KEY="first"'); - expect(lines[1]).toBe('KEY="second"'); - expect(lines[2]).toBe('KEY="updated"'); - }); - - test('matches dotenv behavior: last wins', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY=first_value\nAPI_KEY=second_value\nAPI_KEY=third_value', - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - // dotenv would use "third_value", so we update the third occurrence - const result = fs.get_file('/test/.env'); - expect(result).toBe('API_KEY=first_value\nAPI_KEY=second_value\nAPI_KEY=new_value'); - }); - - test('updates LAST occurrence with inline comments preserved', async () => { - const fs = create_mock_fs({ - '/test/.env': 'KEY="first" # dev\nKEY="second" # prod', - }); - - await update_env_variable('KEY', 'updated', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('KEY="first" # dev\nKEY="updated" # prod'); - }); - - test('updates LAST occurrence when duplicates have different quote styles', async () => { - const fs = create_mock_fs({ - '/test/.env': 'KEY=unquoted_first\nKEY="quoted_second"', - }); - - await update_env_variable('KEY', 'new', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - // First stays unquoted, second (last) is updated and stays quoted - expect(fs.get_file('/test/.env')).toBe('KEY=unquoted_first\nKEY="new"'); - }); +const duplicate_cases: Array< + [label: string, initial: string, key: string, value: string, expected: string] +> = [ + [ + 'updates LAST occurrence when key appears twice', + 'API_KEY="first_value"\nAPI_KEY="second_value"', + 'API_KEY', + 'new_value', + 'API_KEY="first_value"\nAPI_KEY="new_value"', + ], + [ + 'updates LAST occurrence when key appears three times', + 'KEY="first"\nKEY="second"\nKEY="third"', + 'KEY', + 'updated', + 'KEY="first"\nKEY="second"\nKEY="updated"', + ], + [ + 'matches dotenv behavior: last wins', + 'API_KEY=first_value\nAPI_KEY=second_value\nAPI_KEY=third_value', + 'API_KEY', + 'new_value', + 'API_KEY=first_value\nAPI_KEY=second_value\nAPI_KEY=new_value', + ], + [ + 'updates LAST occurrence with inline comments preserved', + 'KEY="first" # dev\nKEY="second" # prod', + 'KEY', + 'updated', + 'KEY="first" # dev\nKEY="updated" # prod', + ], + [ + 'updates LAST occurrence when duplicates have different quote styles', + 'KEY=unquoted_first\nKEY="quoted_second"', + 'KEY', + 'new', + 'KEY=unquoted_first\nKEY="new"', + ], + [ + 'updates LAST occurrence when separated by other keys', + 'API_KEY="first"\nOTHER_KEY="value"\nAPI_KEY="second"', + 'API_KEY', + 'new', + 'API_KEY="first"\nOTHER_KEY="value"\nAPI_KEY="new"', + ], + [ + 'updates LAST occurrence when separated by comments', + 'API_KEY="first"\n# Comment\nAPI_KEY="second"', + 'API_KEY', + 'new', + 'API_KEY="first"\n# Comment\nAPI_KEY="new"', + ], + [ + 'updates LAST occurrence when separated by empty lines', + 'API_KEY="first"\n\nAPI_KEY="second"', + 'API_KEY', + 'new', + 'API_KEY="first"\n\nAPI_KEY="new"', + ], + [ + 'handles keys that are substrings of each other', + 'KEY="value1"\nSECRET_KEY="value2"', + 'KEY', + 'new_value', + 'KEY="new_value"\nSECRET_KEY="value2"', + ], + [ + 'handles keys that are prefixes of each other', + 'API_KEY="value1"\nAPI_KEY_SECRET="value2"', + 'API_KEY', + 'new_value', + 'API_KEY="new_value"\nAPI_KEY_SECRET="value2"', + ], + [ + 'does not match keys in comments', + '# API_KEY="commented"\nAPI_KEY="actual_value"', + 'API_KEY', + 'new_value', + '# API_KEY="commented"\nAPI_KEY="new_value"', + ], +]; - test('updates LAST occurrence when separated by other keys', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="first"\nOTHER_KEY="value"\nAPI_KEY="second"', - }); - - await update_env_variable('API_KEY', 'new', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="first"\nOTHER_KEY="value"\nAPI_KEY="new"'); - }); - - test('updates LAST occurrence when separated by comments', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="first"\n# Comment\nAPI_KEY="second"', - }); - - await update_env_variable('API_KEY', 'new', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="first"\n# Comment\nAPI_KEY="new"'); - }); - - test('updates LAST occurrence when separated by empty lines', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="first"\n\nAPI_KEY="second"', - }); - - await update_env_variable('API_KEY', 'new', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="first"\n\nAPI_KEY="new"'); - }); - - test('handles keys that are substrings of each other', async () => { - const fs = create_mock_fs({ - '/test/.env': 'KEY="value1"\nSECRET_KEY="value2"', - }); - - await update_env_variable('KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - // Should only update KEY, not SECRET_KEY - expect(fs.get_file('/test/.env')).toBe('KEY="new_value"\nSECRET_KEY="value2"'); - }); - - test('handles keys that are prefixes of each other', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="value1"\nAPI_KEY_SECRET="value2"', - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - // Should only update API_KEY, not API_KEY_SECRET - expect(fs.get_file('/test/.env')).toBe('API_KEY="new_value"\nAPI_KEY_SECRET="value2"'); - }); - - test('does not match keys in comments', async () => { - const fs = create_mock_fs({ - '/test/.env': '# API_KEY="commented"\nAPI_KEY="actual_value"', - }); +describe('update_env_variable - duplicate keys (LAST wins behavior)', () => { + test.each(duplicate_cases)('%s', async (_label, initial, key, value, expected) => { + const fs = create_mock_fs({'/test/.env': initial}); - await update_env_variable('API_KEY', 'new_value', { + await update_env_variable(key, value, { env_file_path: '/test/.env', read_file: fs.read_file, write_file: fs.write_file, }); - // Comment line should be unchanged - expect(fs.get_file('/test/.env')).toBe('# API_KEY="commented"\nAPI_KEY="new_value"'); + assert.strictEqual(fs.get_file('/test/.env'), expected); }); }); diff --git a/src/test/server/env_file_helpers.edge_cases.test.ts b/src/test/server/env_file_helpers.edge_cases.test.ts index bac58a395..e4a99d13e 100644 --- a/src/test/server/env_file_helpers.edge_cases.test.ts +++ b/src/test/server/env_file_helpers.edge_cases.test.ts @@ -1,11 +1,7 @@ -// @slop Claude Sonnet 4.5 - -import {test, expect, describe} from 'vitest'; +import {test, describe, assert} from 'vitest'; import {update_env_variable} from '$lib/server/env_file_helpers.js'; -/* eslint-disable @typescript-eslint/require-await */ - const create_mock_fs = (initial_files: Record = {}) => { const files = {...initial_files}; return { @@ -28,114 +24,77 @@ const create_mock_fs = (initial_files: Record = {}) => { }; }; -describe('update_env_variable - quote detection edge cases', () => { - test('does not add quotes when original value contains quotes but assignment does not', async () => { - const fs = create_mock_fs({ - '/test/.env': "NAME=O'Brien", - }); - - await update_env_variable('NAME', 'Smith', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - // Should preserve unquoted style - expect(fs.get_file('/test/.env')).toBe('NAME=Smith'); - }); - - test('handles value with internal quotes when quoted', async () => { - const fs = create_mock_fs({ - '/test/.env': 'NAME="O\'Brien"', - }); - - await update_env_variable('NAME', 'Smith', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - // Should preserve quoted style - expect(fs.get_file('/test/.env')).toBe('NAME="Smith"'); - }); - - test('handles single quote style', async () => { - const fs = create_mock_fs({ - '/test/.env': "API_KEY='old_value'", - }); - - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - // Converts to double quotes (implementation detail) - expect(fs.get_file('/test/.env')).toBe('API_KEY="new_value"'); - }); - - test('handles escaped quotes in value', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="value with \\" escaped quotes"', - }); - - await update_env_variable('API_KEY', 'new', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="new"'); - }); - - test('handles escaped quote at end of value', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="test\\\\"', - }); - - await update_env_variable('API_KEY', 'new', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="new"'); - }); +const quote_detection_cases: Array< + [label: string, initial: string, key: string, value: string, expected: string] +> = [ + [ + 'does not add quotes when original value contains quotes but assignment does not', + "NAME=O'Brien", + 'NAME', + 'Smith', + 'NAME=Smith', + ], + [ + 'handles value with internal quotes when quoted', + 'NAME="O\'Brien"', + 'NAME', + 'Smith', + 'NAME="Smith"', + ], + [ + 'handles single quote style', + "API_KEY='old_value'", + 'API_KEY', + 'new_value', + 'API_KEY="new_value"', + ], + [ + 'handles escaped quotes in value', + 'API_KEY="value with \\" escaped quotes"', + 'API_KEY', + 'new', + 'API_KEY="new"', + ], + [ + 'handles escaped quote at end of value', + 'API_KEY="test\\\\"', + 'API_KEY', + 'new', + 'API_KEY="new"', + ], + [ + 'handles multiple escaped quotes in sequence', + 'API_KEY="test\\\\\\"value"', + 'API_KEY', + 'new', + 'API_KEY="new"', + ], + [ + 'handles escaped quote with inline comment', + 'API_KEY="test\\" quote" # comment', + 'API_KEY', + 'new', + 'API_KEY="new" # comment', + ], +]; - test('handles multiple escaped quotes in sequence', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="test\\\\\\"value"', - }); - - await update_env_variable('API_KEY', 'new', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY="new"'); - }); - - test('handles escaped quote with inline comment', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="test\\" quote" # comment', - }); +describe('update_env_variable - quote detection edge cases', () => { + test.each(quote_detection_cases)('%s', async (_label, initial, key, value, expected) => { + const fs = create_mock_fs({'/test/.env': initial}); - await update_env_variable('API_KEY', 'new', { + await update_env_variable(key, value, { env_file_path: '/test/.env', read_file: fs.read_file, write_file: fs.write_file, }); - expect(fs.get_file('/test/.env')).toBe('API_KEY="new" # comment'); + assert.strictEqual(fs.get_file('/test/.env'), expected); }); }); describe('update_env_variable - special values', () => { test('handles empty value', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="old_value"', - }); + const fs = create_mock_fs({'/test/.env': 'API_KEY="old_value"'}); await update_env_variable('API_KEY', '', { env_file_path: '/test/.env', @@ -143,13 +102,11 @@ describe('update_env_variable - special values', () => { write_file: fs.write_file, }); - expect(fs.get_file('/test/.env')).toBe('API_KEY=""'); + assert.strictEqual(fs.get_file('/test/.env'), 'API_KEY=""'); }); test('handles value with equals sign', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="old_value"', - }); + const fs = create_mock_fs({'/test/.env': 'API_KEY="old_value"'}); await update_env_variable('API_KEY', 'value=with=equals', { env_file_path: '/test/.env', @@ -157,13 +114,11 @@ describe('update_env_variable - special values', () => { write_file: fs.write_file, }); - expect(fs.get_file('/test/.env')).toBe('API_KEY="value=with=equals"'); + assert.strictEqual(fs.get_file('/test/.env'), 'API_KEY="value=with=equals"'); }); test('handles value with newlines', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="old_value"', - }); + const fs = create_mock_fs({'/test/.env': 'API_KEY="old_value"'}); await update_env_variable('API_KEY', 'value\nwith\nnewlines', { env_file_path: '/test/.env', @@ -171,13 +126,11 @@ describe('update_env_variable - special values', () => { write_file: fs.write_file, }); - expect(fs.get_file('/test/.env')).toBe('API_KEY="value\nwith\nnewlines"'); + assert.strictEqual(fs.get_file('/test/.env'), 'API_KEY="value\nwith\nnewlines"'); }); test('handles value with backslashes (Windows paths)', async () => { - const fs = create_mock_fs({ - '/test/.env': 'PATH_KEY="old_path"', - }); + const fs = create_mock_fs({'/test/.env': 'PATH_KEY="old_path"'}); await update_env_variable('PATH_KEY', 'C:\\Users\\Admin\\Documents', { env_file_path: '/test/.env', @@ -185,13 +138,11 @@ describe('update_env_variable - special values', () => { write_file: fs.write_file, }); - expect(fs.get_file('/test/.env')).toBe('PATH_KEY="C:\\Users\\Admin\\Documents"'); + assert.strictEqual(fs.get_file('/test/.env'), 'PATH_KEY="C:\\Users\\Admin\\Documents"'); }); test('handles value with unicode characters', async () => { - const fs = create_mock_fs({ - '/test/.env': 'UNICODE_KEY="old"', - }); + const fs = create_mock_fs({'/test/.env': 'UNICODE_KEY="old"'}); const unicode_value = '你好世界 🌍 Привет мир'; await update_env_variable('UNICODE_KEY', unicode_value, { @@ -200,13 +151,11 @@ describe('update_env_variable - special values', () => { write_file: fs.write_file, }); - expect(fs.get_file('/test/.env')).toBe(`UNICODE_KEY="${unicode_value}"`); + assert.strictEqual(fs.get_file('/test/.env'), `UNICODE_KEY="${unicode_value}"`); }); test('handles very long values', async () => { - const fs = create_mock_fs({ - '/test/.env': 'LONG_KEY="short"', - }); + const fs = create_mock_fs({'/test/.env': 'LONG_KEY="short"'}); const long_value = 'x'.repeat(10000); await update_env_variable('LONG_KEY', long_value, { @@ -215,13 +164,11 @@ describe('update_env_variable - special values', () => { write_file: fs.write_file, }); - expect(fs.get_file('/test/.env')).toBe(`LONG_KEY="${long_value}"`); + assert.strictEqual(fs.get_file('/test/.env'), `LONG_KEY="${long_value}"`); }); test('handles value with JSON content', async () => { - const fs = create_mock_fs({ - '/test/.env': 'JSON_KEY="old"', - }); + const fs = create_mock_fs({'/test/.env': 'JSON_KEY="old"'}); const json_value = '{"name":"test","nested":{"key":"value"},"array":[1,2,3]}'; await update_env_variable('JSON_KEY', json_value, { @@ -230,13 +177,11 @@ describe('update_env_variable - special values', () => { write_file: fs.write_file, }); - expect(fs.get_file('/test/.env')).toBe(`JSON_KEY="${json_value}"`); + assert.strictEqual(fs.get_file('/test/.env'), `JSON_KEY="${json_value}"`); }); test('handles value with special characters', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY="old_value"', - }); + const fs = create_mock_fs({'/test/.env': 'API_KEY="old_value"'}); await update_env_variable('API_KEY', 'value!@#$%^&*()_+-=[]{}|;:,.<>?', { env_file_path: '/test/.env', @@ -244,53 +189,47 @@ describe('update_env_variable - special values', () => { write_file: fs.write_file, }); - expect(fs.get_file('/test/.env')).toBe('API_KEY="value!@#$%^&*()_+-=[]{}|;:,.<>?"'); + assert.strictEqual(fs.get_file('/test/.env'), 'API_KEY="value!@#$%^&*()_+-=[]{}|;:,.<>?"'); }); }); -describe('update_env_variable - whitespace handling', () => { - test('handles key with spaces around equals sign', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY = "old_value"', - }); +const whitespace_cases: Array< + [label: string, initial: string, key: string, value: string, expected: string] +> = [ + [ + 'handles key with spaces around equals sign', + 'API_KEY = "old_value"', + 'API_KEY', + 'new_value', + 'API_KEY="new_value"', + ], + [ + 'handles key with leading whitespace in file', + ' LEADING_SPACE="old_value"', + 'LEADING_SPACE', + 'new_value', + 'LEADING_SPACE="new_value"', + ], + [ + 'handles key with trailing whitespace before equals', + 'TRAILING_SPACE ="old_value"', + 'TRAILING_SPACE', + 'new_value', + 'TRAILING_SPACE="new_value"', + ], +]; - await update_env_variable('API_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - // Normalizes to no spaces - expect(fs.get_file('/test/.env')).toBe('API_KEY="new_value"'); - }); - - test('handles key with leading whitespace in file', async () => { - const fs = create_mock_fs({ - '/test/.env': ' LEADING_SPACE="old_value"', - }); - - await update_env_variable('LEADING_SPACE', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - // Normalizes whitespace - expect(fs.get_file('/test/.env')).toBe('LEADING_SPACE="new_value"'); - }); - - test('handles key with trailing whitespace before equals', async () => { - const fs = create_mock_fs({ - '/test/.env': 'TRAILING_SPACE ="old_value"', - }); +describe('update_env_variable - whitespace handling', () => { + test.each(whitespace_cases)('%s', async (_label, initial, key, value, expected) => { + const fs = create_mock_fs({'/test/.env': initial}); - await update_env_variable('TRAILING_SPACE', 'new_value', { + await update_env_variable(key, value, { env_file_path: '/test/.env', read_file: fs.read_file, write_file: fs.write_file, }); - expect(fs.get_file('/test/.env')).toBe('TRAILING_SPACE="new_value"'); + assert.strictEqual(fs.get_file('/test/.env'), expected); }); test('preserves exact original formatting for non-matching lines', async () => { @@ -305,90 +244,88 @@ describe('update_env_variable - whitespace handling', () => { }); const result = fs.get_file('/test/.env'); - expect(result).toBe( + assert.strictEqual( + result, ' INDENT_KEY = "spaced" \nTARGET_KEY="new"\n\t\tTAB_KEY\t=\t"tabbed"\t', ); // Verify exact preservation of unchanged lines const lines = result?.split('\n') || []; - expect(lines[0]).toBe(' INDENT_KEY = "spaced" '); - expect(lines[2]).toBe('\t\tTAB_KEY\t=\t"tabbed"\t'); + assert.strictEqual(lines[0], ' INDENT_KEY = "spaced" '); + assert.strictEqual(lines[2], '\t\tTAB_KEY\t=\t"tabbed"\t'); }); }); -describe('update_env_variable - special keys', () => { - test('handles key with underscores and numbers', async () => { - const fs = create_mock_fs({ - '/test/.env': 'API_KEY_123="old_value"', - }); - - await update_env_variable('API_KEY_123', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('API_KEY_123="new_value"'); - }); - - test('handles key with dots (regex special char)', async () => { - const fs = create_mock_fs({ - '/test/.env': 'NORMAL_KEY="value1"\nSPECIAL.KEY="value2"', - }); +const special_key_cases: Array< + [label: string, initial: string, key: string, value: string, expected: string] +> = [ + [ + 'handles key with underscores and numbers', + 'API_KEY_123="old_value"', + 'API_KEY_123', + 'new_value', + 'API_KEY_123="new_value"', + ], + [ + 'handles key with dots (regex special char)', + 'NORMAL_KEY="value1"\nSPECIAL.KEY="value2"', + 'SPECIAL.KEY', + 'new_value', + 'NORMAL_KEY="value1"\nSPECIAL.KEY="new_value"', + ], + [ + 'handles empty key name', + 'VALID_KEY="value"', + '', + 'empty_key_value', + 'VALID_KEY="value"\n="empty_key_value"', + ], +]; - await update_env_variable('SPECIAL.KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('NORMAL_KEY="value1"\nSPECIAL.KEY="new_value"'); - }); - - test('handles empty key name', async () => { - const fs = create_mock_fs({ - '/test/.env': 'VALID_KEY="value"', - }); +describe('update_env_variable - special keys', () => { + test.each(special_key_cases)('%s', async (_label, initial, key, value, expected) => { + const fs = create_mock_fs({'/test/.env': initial}); - // Empty key edge case - await update_env_variable('', 'empty_key_value', { + await update_env_variable(key, value, { env_file_path: '/test/.env', read_file: fs.read_file, write_file: fs.write_file, }); - expect(fs.get_file('/test/.env')).toBe('VALID_KEY="value"\n="empty_key_value"'); + assert.strictEqual(fs.get_file('/test/.env'), expected); }); }); -describe('update_env_variable - file variations', () => { - test('handles file with only comments', async () => { - const fs = create_mock_fs({ - '/test/.env': '# Comment 1\n# Comment 2', - }); +const file_variation_cases: Array< + [label: string, initial: string, key: string, value: string, expected: string] +> = [ + [ + 'handles file with only comments', + '# Comment 1\n# Comment 2', + 'NEW_KEY', + 'new_value', + '# Comment 1\n# Comment 2\nNEW_KEY="new_value"', + ], + [ + 'handles file with only empty lines', + '\n\n\n', + 'NEW_KEY', + 'new_value', + '\n\n\n\nNEW_KEY="new_value"', + ], +]; - await update_env_variable('NEW_KEY', 'new_value', { - env_file_path: '/test/.env', - read_file: fs.read_file, - write_file: fs.write_file, - }); - - expect(fs.get_file('/test/.env')).toBe('# Comment 1\n# Comment 2\nNEW_KEY="new_value"'); - }); - - test('handles file with only empty lines', async () => { - const fs = create_mock_fs({ - '/test/.env': '\n\n\n', - }); +describe('update_env_variable - file variations', () => { + test.each(file_variation_cases)('%s', async (_label, initial, key, value, expected) => { + const fs = create_mock_fs({'/test/.env': initial}); - await update_env_variable('NEW_KEY', 'new_value', { + await update_env_variable(key, value, { env_file_path: '/test/.env', read_file: fs.read_file, write_file: fs.write_file, }); - // File ends with newline, so blank line separator is added - expect(fs.get_file('/test/.env')).toBe('\n\n\n\nNEW_KEY="new_value"'); + assert.strictEqual(fs.get_file('/test/.env'), expected); }); test('verifies path is resolved to absolute', async () => { @@ -403,8 +340,8 @@ describe('update_env_variable - file variations', () => { }); // Path should be absolute - expect(resolved_path).toBeDefined(); - expect(resolved_path?.startsWith('/')).toBe(true); - expect(resolved_path?.endsWith('relative/.env')).toBe(true); + assert.ok(resolved_path); + assert.ok(resolved_path.startsWith('/')); + assert.ok(resolved_path.endsWith('relative/.env')); }); }); diff --git a/src/test/server/routes/auth_adversarial_headers.test.ts b/src/test/server/routes/auth_adversarial_headers.test.ts new file mode 100644 index 000000000..dbd3f3f4a --- /dev/null +++ b/src/test/server/routes/auth_adversarial_headers.test.ts @@ -0,0 +1,14 @@ +import {describe_standard_adversarial_headers} from '@fuzdev/fuz_app/testing/adversarial_headers.js'; + +const TRUSTED_PROXY = '127.0.0.1'; +const DEV_ORIGIN = 'http://localhost:5173'; + +describe_standard_adversarial_headers( + 'zzz adversarial header attacks (dev origin)', + { + trusted_proxies: [TRUSTED_PROXY, '::1'], + allowed_origins: DEV_ORIGIN, + connection_ip: TRUSTED_PROXY, + }, + DEV_ORIGIN, +); diff --git a/src/test/server/routes/auth_attack_surface.gen.json.ts b/src/test/server/routes/auth_attack_surface.gen.json.ts new file mode 100644 index 000000000..5eb732184 --- /dev/null +++ b/src/test/server/routes/auth_attack_surface.gen.json.ts @@ -0,0 +1,7 @@ +import type {Gen} from '@fuzdev/gro'; + +import {create_zzz_app_surface_spec} from './auth_attack_surface_helpers.js'; + +export const gen: Gen = () => { + return JSON.stringify(create_zzz_app_surface_spec().surface); +}; diff --git a/src/test/server/routes/auth_attack_surface.json b/src/test/server/routes/auth_attack_surface.json new file mode 100644 index 000000000..63b3f56c3 --- /dev/null +++ b/src/test/server/routes/auth_attack_surface.json @@ -0,0 +1,3038 @@ +{ + "diagnostics": [], + "middleware": [ + {"name": "host_validation", "path": "*", "error_schemas": null}, + { + "name": "origin", + "path": "/api/*", + "error_schemas": { + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + } + } + }, + {"name": "session", "path": "/api/*", "error_schemas": null}, + {"name": "request_context", "path": "/api/*", "error_schemas": null}, + { + "name": "bearer_auth", + "path": "/api/*", + "error_schemas": { + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + } + ], + "routes": [ + { + "method": "GET", + "path": "/health", + "auth": {"type": "none"}, + "applicable_middleware": ["host_validation"], + "description": "Health check", + "is_mutation": false, + "transaction": false, + "rate_limit_key": null, + "params_schema": null, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": {"status": {"type": "string", "const": "ok"}}, + "required": ["status"], + "additionalProperties": false + }, + "error_schemas": null + }, + { + "method": "POST", + "path": "/api/account/login", + "auth": {"type": "none"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "Exchange credentials for session", + "is_mutation": true, + "transaction": true, + "rate_limit_key": "both", + "params_schema": null, + "query_schema": null, + "input_schema": { + "type": "object", + "properties": { + "username": {"type": "string", "minLength": 1, "maxLength": 255}, + "password": {"type": "string", "minLength": 1, "maxLength": 300, "sensitivity": "secret"} + }, + "required": ["username", "password"], + "additionalProperties": false + }, + "output_schema": { + "type": "object", + "properties": {"ok": {"type": "boolean", "const": true}}, + "required": ["ok"], + "additionalProperties": false + }, + "error_schemas": { + "400": { + "type": "object", + "properties": { + "error": {"type": "string"}, + "issues": { + "type": "array", + "items": { + "type": "object", + "properties": { + "code": {"type": "string"}, + "message": {"type": "string"}, + "path": { + "type": "array", + "items": {"anyOf": [{"type": "string"}, {"type": "number"}]} + } + }, + "required": ["code", "message", "path"], + "additionalProperties": {} + } + } + }, + "required": ["error", "issues"], + "additionalProperties": {} + }, + "401": { + "type": "object", + "properties": {"error": {"type": "string", "const": "invalid_credentials"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "POST", + "path": "/api/account/logout", + "auth": {"type": "authenticated"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "Revoke current session and clear cookie", + "is_mutation": true, + "transaction": true, + "rate_limit_key": null, + "params_schema": null, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": {"ok": {"type": "boolean", "const": true}, "username": {"type": "string"}}, + "required": ["ok", "username"], + "additionalProperties": false + }, + "error_schemas": { + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "GET", + "path": "/api/account/verify", + "auth": {"type": "authenticated"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "Check session validity", + "is_mutation": false, + "transaction": false, + "rate_limit_key": null, + "params_schema": null, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": { + "ok": {"type": "boolean", "const": true}, + "account": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "username": { + "type": "string", + "minLength": 3, + "maxLength": 39, + "pattern": "^[a-zA-Z][0-9a-zA-Z_-]*[0-9a-zA-Z]$" + }, + "email": { + "anyOf": [ + { + "type": "string", + "format": "email", + "pattern": "^(?!\\.)(?!.*\\.\\.)([A-Za-z0-9_'+\\-\\.]*)[A-Za-z0-9_+-]@([A-Za-z0-9][A-Za-z0-9\\-]*\\.)+[A-Za-z]{2,}$" + }, + {"type": "null"} + ] + }, + "email_verified": {"type": "boolean"}, + "created_at": {"type": "string"} + }, + "required": ["id", "username", "email", "email_verified", "created_at"], + "additionalProperties": false + } + }, + "required": ["ok", "account"], + "additionalProperties": false + }, + "error_schemas": { + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "GET", + "path": "/api/account/sessions", + "auth": {"type": "authenticated"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "List auth sessions for current account", + "is_mutation": false, + "transaction": false, + "rate_limit_key": null, + "params_schema": null, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": { + "sessions": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "account_id": {"type": "string"}, + "created_at": {"type": "string"}, + "expires_at": {"type": "string"}, + "last_seen_at": {"type": "string"} + }, + "required": ["id", "account_id", "created_at", "expires_at", "last_seen_at"], + "additionalProperties": false + } + } + }, + "required": ["sessions"], + "additionalProperties": false + }, + "error_schemas": { + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "POST", + "path": "/api/account/sessions/:id/revoke", + "auth": {"type": "authenticated"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "Revoke a single auth session (account-scoped)", + "is_mutation": true, + "transaction": true, + "rate_limit_key": null, + "params_schema": { + "type": "object", + "properties": {"id": {"type": "string", "pattern": "^[0-9a-f]{64}$"}}, + "required": ["id"], + "additionalProperties": false + }, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": {"ok": {"type": "boolean", "const": true}, "revoked": {"type": "boolean"}}, + "required": ["ok", "revoked"], + "additionalProperties": false + }, + "error_schemas": { + "400": { + "type": "object", + "properties": { + "error": {"type": "string"}, + "issues": { + "type": "array", + "items": { + "type": "object", + "properties": { + "code": {"type": "string"}, + "message": {"type": "string"}, + "path": { + "type": "array", + "items": {"anyOf": [{"type": "string"}, {"type": "number"}]} + } + }, + "required": ["code", "message", "path"], + "additionalProperties": {} + } + } + }, + "required": ["error", "issues"], + "additionalProperties": {} + }, + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "POST", + "path": "/api/account/sessions/revoke-all", + "auth": {"type": "authenticated"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "Revoke all auth sessions for current account", + "is_mutation": true, + "transaction": true, + "rate_limit_key": null, + "params_schema": null, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": {"ok": {"type": "boolean", "const": true}, "count": {"type": "number"}}, + "required": ["ok", "count"], + "additionalProperties": false + }, + "error_schemas": { + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "POST", + "path": "/api/account/tokens/create", + "auth": {"type": "authenticated"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "Create API token (shown once)", + "is_mutation": true, + "transaction": true, + "rate_limit_key": null, + "params_schema": null, + "query_schema": null, + "input_schema": { + "type": "object", + "properties": {"name": {"type": "string"}}, + "required": ["name"], + "additionalProperties": false + }, + "output_schema": { + "type": "object", + "properties": { + "ok": {"type": "boolean", "const": true}, + "token": {"type": "string"}, + "id": {"type": "string"}, + "name": {"type": "string"} + }, + "required": ["ok", "token", "id", "name"], + "additionalProperties": false + }, + "error_schemas": { + "400": { + "type": "object", + "properties": { + "error": {"type": "string"}, + "issues": { + "type": "array", + "items": { + "type": "object", + "properties": { + "code": {"type": "string"}, + "message": {"type": "string"}, + "path": { + "type": "array", + "items": {"anyOf": [{"type": "string"}, {"type": "number"}]} + } + }, + "required": ["code", "message", "path"], + "additionalProperties": {} + } + } + }, + "required": ["error", "issues"], + "additionalProperties": {} + }, + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "GET", + "path": "/api/account/tokens", + "auth": {"type": "authenticated"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "List API tokens for current account", + "is_mutation": false, + "transaction": false, + "rate_limit_key": null, + "params_schema": null, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": { + "tokens": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "account_id": {"type": "string"}, + "name": {"type": "string"}, + "expires_at": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "last_used_at": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "last_used_ip": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "created_at": {"type": "string"} + }, + "required": [ + "id", + "account_id", + "name", + "expires_at", + "last_used_at", + "last_used_ip", + "created_at" + ], + "additionalProperties": false + } + } + }, + "required": ["tokens"], + "additionalProperties": false + }, + "error_schemas": { + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "POST", + "path": "/api/account/tokens/:id/revoke", + "auth": {"type": "authenticated"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "Revoke an API token (account-scoped)", + "is_mutation": true, + "transaction": true, + "rate_limit_key": null, + "params_schema": { + "type": "object", + "properties": {"id": {"type": "string", "pattern": "^tok_[A-Za-z0-9_-]{12}$"}}, + "required": ["id"], + "additionalProperties": false + }, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": {"ok": {"type": "boolean", "const": true}, "revoked": {"type": "boolean"}}, + "required": ["ok", "revoked"], + "additionalProperties": false + }, + "error_schemas": { + "400": { + "type": "object", + "properties": { + "error": {"type": "string"}, + "issues": { + "type": "array", + "items": { + "type": "object", + "properties": { + "code": {"type": "string"}, + "message": {"type": "string"}, + "path": { + "type": "array", + "items": {"anyOf": [{"type": "string"}, {"type": "number"}]} + } + }, + "required": ["code", "message", "path"], + "additionalProperties": {} + } + } + }, + "required": ["error", "issues"], + "additionalProperties": {} + }, + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "POST", + "path": "/api/account/password", + "auth": {"type": "authenticated"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "Change password (revokes all sessions and API tokens)", + "is_mutation": true, + "transaction": true, + "rate_limit_key": "ip", + "params_schema": null, + "query_schema": null, + "input_schema": { + "type": "object", + "properties": { + "current_password": { + "type": "string", + "minLength": 1, + "maxLength": 300, + "sensitivity": "secret" + }, + "new_password": { + "type": "string", + "minLength": 12, + "maxLength": 300, + "sensitivity": "secret" + } + }, + "required": ["current_password", "new_password"], + "additionalProperties": false + }, + "output_schema": { + "type": "object", + "properties": { + "ok": {"type": "boolean", "const": true}, + "sessions_revoked": {"type": "number"}, + "tokens_revoked": {"type": "number"} + }, + "required": ["ok", "sessions_revoked", "tokens_revoked"], + "additionalProperties": false + }, + "error_schemas": { + "400": { + "type": "object", + "properties": { + "error": {"type": "string"}, + "issues": { + "type": "array", + "items": { + "type": "object", + "properties": { + "code": {"type": "string"}, + "message": {"type": "string"}, + "path": { + "type": "array", + "items": {"anyOf": [{"type": "string"}, {"type": "number"}]} + } + }, + "required": ["code", "message", "path"], + "additionalProperties": {} + } + } + }, + "required": ["error", "issues"], + "additionalProperties": {} + }, + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "GET", + "path": "/api/account/status", + "auth": {"type": "none"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "Current account info (unauthenticated: 401 with bootstrap status)", + "is_mutation": false, + "transaction": false, + "rate_limit_key": null, + "params_schema": null, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": {"account": {"type": "object", "properties": {}, "additionalProperties": {}}}, + "required": ["account"], + "additionalProperties": {} + }, + "error_schemas": { + "401": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "authentication_required"}, + "bootstrap_available": {"type": "boolean"} + }, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "GET", + "path": "/api/server/status", + "auth": {"type": "authenticated"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "Server version and uptime", + "is_mutation": false, + "transaction": false, + "rate_limit_key": null, + "params_schema": null, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": {"version": {"type": "string"}, "uptime_ms": {"type": "number"}}, + "required": ["version", "uptime_ms"], + "additionalProperties": {} + }, + "error_schemas": { + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "POST", + "path": "/api/rpc", + "auth": {"type": "none"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "JSON-RPC 2.0 endpoint — 23 methods", + "is_mutation": true, + "transaction": false, + "rate_limit_key": null, + "params_schema": null, + "query_schema": null, + "input_schema": null, + "output_schema": {}, + "error_schemas": { + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "GET", + "path": "/api/rpc", + "auth": {"type": "none"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "JSON-RPC 2.0 endpoint (cacheable reads) — 23 methods", + "is_mutation": false, + "transaction": false, + "rate_limit_key": null, + "params_schema": null, + "query_schema": null, + "input_schema": null, + "output_schema": {}, + "error_schemas": { + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "GET", + "path": "/api/admin/accounts", + "auth": {"type": "role", "role": "admin"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "List all accounts with their permits", + "is_mutation": false, + "transaction": false, + "rate_limit_key": null, + "params_schema": null, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": { + "accounts": { + "type": "array", + "items": { + "type": "object", + "properties": { + "account": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "username": { + "type": "string", + "minLength": 3, + "maxLength": 39, + "pattern": "^[a-zA-Z][0-9a-zA-Z_-]*[0-9a-zA-Z]$" + }, + "email": { + "anyOf": [ + { + "type": "string", + "format": "email", + "pattern": "^(?!\\.)(?!.*\\.\\.)([A-Za-z0-9_'+\\-\\.]*)[A-Za-z0-9_+-]@([A-Za-z0-9][A-Za-z0-9\\-]*\\.)+[A-Za-z]{2,}$" + }, + {"type": "null"} + ] + }, + "email_verified": {"type": "boolean"}, + "created_at": {"type": "string"}, + "updated_at": {"type": "string"}, + "updated_by": {"anyOf": [{"type": "string"}, {"type": "null"}]} + }, + "required": [ + "id", + "username", + "email", + "email_verified", + "created_at", + "updated_at", + "updated_by" + ], + "additionalProperties": false + }, + "actor": { + "anyOf": [ + { + "type": "object", + "properties": {"id": {"type": "string"}, "name": {"type": "string"}}, + "required": ["id", "name"], + "additionalProperties": false + }, + {"type": "null"} + ] + }, + "permits": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "role": {"type": "string"}, + "created_at": {"type": "string"}, + "expires_at": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "granted_by": {"anyOf": [{"type": "string"}, {"type": "null"}]} + }, + "required": ["id", "role", "created_at", "expires_at", "granted_by"], + "additionalProperties": false + } + } + }, + "required": ["account", "actor", "permits"], + "additionalProperties": false + } + }, + "grantable_roles": { + "type": "array", + "items": {"type": "string", "pattern": "^[a-z][a-z_]*[a-z]$|^[a-z]$"} + } + }, + "required": ["accounts", "grantable_roles"], + "additionalProperties": false + }, + "error_schemas": { + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "POST", + "path": "/api/admin/accounts/:account_id/permits/grant", + "auth": {"type": "role", "role": "admin"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "Grant a role permit to an account", + "is_mutation": true, + "transaction": true, + "rate_limit_key": null, + "params_schema": { + "type": "object", + "properties": { + "account_id": { + "type": "string", + "format": "uuid", + "pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$" + } + }, + "required": ["account_id"], + "additionalProperties": false + }, + "query_schema": null, + "input_schema": { + "type": "object", + "properties": {"role": {"type": "string", "enum": ["keeper", "admin"]}}, + "required": ["role"], + "additionalProperties": false + }, + "output_schema": { + "type": "object", + "properties": { + "ok": {"type": "boolean", "const": true}, + "permit": { + "type": "object", + "properties": {"id": {"type": "string"}, "role": {"type": "string"}}, + "required": ["id", "role"], + "additionalProperties": false + } + }, + "required": ["ok", "permit"], + "additionalProperties": false + }, + "error_schemas": { + "400": { + "type": "object", + "properties": { + "error": {"type": "string"}, + "issues": { + "type": "array", + "items": { + "type": "object", + "properties": { + "code": {"type": "string"}, + "message": {"type": "string"}, + "path": { + "type": "array", + "items": {"anyOf": [{"type": "string"}, {"type": "number"}]} + } + }, + "required": ["code", "message", "path"], + "additionalProperties": {} + } + } + }, + "required": ["error", "issues"], + "additionalProperties": {} + }, + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": { + "error": { + "type": "string", + "enum": ["insufficient_permissions", "role_not_web_grantable"] + } + }, + "required": ["error"], + "additionalProperties": {} + }, + "404": { + "type": "object", + "properties": {"error": {"type": "string", "const": "account_not_found"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "POST", + "path": "/api/admin/accounts/:account_id/sessions/revoke-all", + "auth": {"type": "role", "role": "admin"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "Revoke all sessions for an account", + "is_mutation": true, + "transaction": true, + "rate_limit_key": null, + "params_schema": { + "type": "object", + "properties": { + "account_id": { + "type": "string", + "format": "uuid", + "pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$" + } + }, + "required": ["account_id"], + "additionalProperties": false + }, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": {"ok": {"type": "boolean", "const": true}, "count": {"type": "number"}}, + "required": ["ok", "count"], + "additionalProperties": false + }, + "error_schemas": { + "400": { + "type": "object", + "properties": { + "error": {"type": "string"}, + "issues": { + "type": "array", + "items": { + "type": "object", + "properties": { + "code": {"type": "string"}, + "message": {"type": "string"}, + "path": { + "type": "array", + "items": {"anyOf": [{"type": "string"}, {"type": "number"}]} + } + }, + "required": ["code", "message", "path"], + "additionalProperties": {} + } + } + }, + "required": ["error", "issues"], + "additionalProperties": {} + }, + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "404": { + "type": "object", + "properties": {"error": {"type": "string", "const": "account_not_found"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "POST", + "path": "/api/admin/accounts/:account_id/tokens/revoke-all", + "auth": {"type": "role", "role": "admin"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "Revoke all API tokens for an account", + "is_mutation": true, + "transaction": true, + "rate_limit_key": null, + "params_schema": { + "type": "object", + "properties": { + "account_id": { + "type": "string", + "format": "uuid", + "pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$" + } + }, + "required": ["account_id"], + "additionalProperties": false + }, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": {"ok": {"type": "boolean", "const": true}, "count": {"type": "number"}}, + "required": ["ok", "count"], + "additionalProperties": false + }, + "error_schemas": { + "400": { + "type": "object", + "properties": { + "error": {"type": "string"}, + "issues": { + "type": "array", + "items": { + "type": "object", + "properties": { + "code": {"type": "string"}, + "message": {"type": "string"}, + "path": { + "type": "array", + "items": {"anyOf": [{"type": "string"}, {"type": "number"}]} + } + }, + "required": ["code", "message", "path"], + "additionalProperties": {} + } + } + }, + "required": ["error", "issues"], + "additionalProperties": {} + }, + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "404": { + "type": "object", + "properties": {"error": {"type": "string", "const": "account_not_found"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "POST", + "path": "/api/admin/accounts/:account_id/permits/:permit_id/revoke", + "auth": {"type": "role", "role": "admin"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "Revoke a permit", + "is_mutation": true, + "transaction": true, + "rate_limit_key": null, + "params_schema": { + "type": "object", + "properties": { + "account_id": { + "type": "string", + "format": "uuid", + "pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$" + }, + "permit_id": { + "type": "string", + "format": "uuid", + "pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$" + } + }, + "required": ["account_id", "permit_id"], + "additionalProperties": false + }, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": { + "ok": {"type": "boolean", "const": true}, + "revoked": {"type": "boolean", "const": true} + }, + "required": ["ok", "revoked"], + "additionalProperties": false + }, + "error_schemas": { + "400": { + "type": "object", + "properties": { + "error": {"type": "string"}, + "issues": { + "type": "array", + "items": { + "type": "object", + "properties": { + "code": {"type": "string"}, + "message": {"type": "string"}, + "path": { + "type": "array", + "items": {"anyOf": [{"type": "string"}, {"type": "number"}]} + } + }, + "required": ["code", "message", "path"], + "additionalProperties": {} + } + } + }, + "required": ["error", "issues"], + "additionalProperties": {} + }, + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "404": { + "type": "object", + "properties": { + "error": {"type": "string", "enum": ["account_not_found", "permit_not_found"]} + }, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "GET", + "path": "/api/admin/audit-log", + "auth": {"type": "role", "role": "admin"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "List audit log events with optional filters", + "is_mutation": false, + "transaction": false, + "rate_limit_key": null, + "params_schema": null, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": { + "events": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "seq": { + "type": "integer", + "minimum": -9007199254740991, + "maximum": 9007199254740991 + }, + "event_type": { + "type": "string", + "enum": [ + "login", + "logout", + "bootstrap", + "signup", + "password_change", + "session_revoke", + "session_revoke_all", + "token_create", + "token_revoke", + "token_revoke_all", + "permit_grant", + "permit_revoke", + "invite_create", + "invite_delete", + "app_settings_update" + ] + }, + "outcome": {"type": "string", "enum": ["success", "failure"]}, + "actor_id": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "account_id": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "target_account_id": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "ip": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "created_at": {"type": "string"}, + "metadata": { + "anyOf": [ + { + "type": "object", + "propertyNames": {"type": "string"}, + "additionalProperties": {} + }, + {"type": "null"} + ] + }, + "username": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "target_username": {"anyOf": [{"type": "string"}, {"type": "null"}]} + }, + "required": [ + "id", + "seq", + "event_type", + "outcome", + "actor_id", + "account_id", + "target_account_id", + "ip", + "created_at", + "metadata", + "username", + "target_username" + ], + "additionalProperties": false + } + } + }, + "required": ["events"], + "additionalProperties": false + }, + "error_schemas": { + "400": { + "type": "object", + "properties": {"error": {"type": "string", "const": "invalid_event_type"}}, + "required": ["error"], + "additionalProperties": {} + }, + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "GET", + "path": "/api/admin/audit-log/permit-history", + "auth": {"type": "role", "role": "admin"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "List permit grant and revoke events with usernames", + "is_mutation": false, + "transaction": false, + "rate_limit_key": null, + "params_schema": null, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": { + "events": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "seq": { + "type": "integer", + "minimum": -9007199254740991, + "maximum": 9007199254740991 + }, + "event_type": { + "type": "string", + "enum": [ + "login", + "logout", + "bootstrap", + "signup", + "password_change", + "session_revoke", + "session_revoke_all", + "token_create", + "token_revoke", + "token_revoke_all", + "permit_grant", + "permit_revoke", + "invite_create", + "invite_delete", + "app_settings_update" + ] + }, + "outcome": {"type": "string", "enum": ["success", "failure"]}, + "actor_id": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "account_id": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "target_account_id": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "ip": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "created_at": {"type": "string"}, + "metadata": { + "anyOf": [ + { + "type": "object", + "propertyNames": {"type": "string"}, + "additionalProperties": {} + }, + {"type": "null"} + ] + }, + "username": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "target_username": {"anyOf": [{"type": "string"}, {"type": "null"}]} + }, + "required": [ + "id", + "seq", + "event_type", + "outcome", + "actor_id", + "account_id", + "target_account_id", + "ip", + "created_at", + "metadata", + "username", + "target_username" + ], + "additionalProperties": false + } + } + }, + "required": ["events"], + "additionalProperties": false + }, + "error_schemas": { + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "GET", + "path": "/api/admin/sessions", + "auth": {"type": "role", "role": "admin"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "List all active sessions across all accounts", + "is_mutation": false, + "transaction": false, + "rate_limit_key": null, + "params_schema": null, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": { + "sessions": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "account_id": {"type": "string"}, + "created_at": {"type": "string"}, + "expires_at": {"type": "string"}, + "last_seen_at": {"type": "string"}, + "username": {"type": "string"} + }, + "required": [ + "id", + "account_id", + "created_at", + "expires_at", + "last_seen_at", + "username" + ], + "additionalProperties": false + } + } + }, + "required": ["sessions"], + "additionalProperties": false + }, + "error_schemas": { + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "GET", + "path": "/api/admin/settings", + "auth": {"type": "role", "role": "admin"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "Get app settings", + "is_mutation": false, + "transaction": false, + "rate_limit_key": null, + "params_schema": null, + "query_schema": null, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": { + "settings": { + "type": "object", + "properties": { + "open_signup": {"type": "boolean"}, + "updated_at": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "updated_by": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "updated_by_username": {"anyOf": [{"type": "string"}, {"type": "null"}]} + }, + "required": ["open_signup", "updated_at", "updated_by", "updated_by_username"], + "additionalProperties": false + } + }, + "required": ["settings"], + "additionalProperties": false + }, + "error_schemas": { + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "PATCH", + "path": "/api/admin/settings", + "auth": {"type": "role", "role": "admin"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "Update app settings", + "is_mutation": true, + "transaction": true, + "rate_limit_key": null, + "params_schema": null, + "query_schema": null, + "input_schema": { + "type": "object", + "properties": {"open_signup": {"type": "boolean"}}, + "required": ["open_signup"], + "additionalProperties": false + }, + "output_schema": { + "type": "object", + "properties": { + "ok": {"type": "boolean", "const": true}, + "settings": { + "type": "object", + "properties": { + "open_signup": {"type": "boolean"}, + "updated_at": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "updated_by": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "updated_by_username": {"anyOf": [{"type": "string"}, {"type": "null"}]} + }, + "required": ["open_signup", "updated_at", "updated_by", "updated_by_username"], + "additionalProperties": false + } + }, + "required": ["ok", "settings"], + "additionalProperties": false + }, + "error_schemas": { + "400": { + "type": "object", + "properties": { + "error": {"type": "string"}, + "issues": { + "type": "array", + "items": { + "type": "object", + "properties": { + "code": {"type": "string"}, + "message": {"type": "string"}, + "path": { + "type": "array", + "items": {"anyOf": [{"type": "string"}, {"type": "number"}]} + } + }, + "required": ["code", "message", "path"], + "additionalProperties": {} + } + } + }, + "required": ["error", "issues"], + "additionalProperties": {} + }, + "401": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string"}}, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + }, + { + "method": "POST", + "path": "/api/account/bootstrap", + "auth": {"type": "none"}, + "applicable_middleware": [ + "host_validation", + "origin", + "session", + "request_context", + "bearer_auth" + ], + "description": "Create initial keeper account (one-shot)", + "is_mutation": true, + "transaction": false, + "rate_limit_key": "ip", + "params_schema": null, + "query_schema": null, + "input_schema": { + "type": "object", + "properties": { + "token": {"type": "string", "minLength": 1, "sensitivity": "secret"}, + "username": { + "type": "string", + "minLength": 3, + "maxLength": 39, + "pattern": "^[a-zA-Z][0-9a-zA-Z_-]*[0-9a-zA-Z]$" + }, + "password": {"type": "string", "minLength": 12, "maxLength": 300, "sensitivity": "secret"} + }, + "required": ["token", "username", "password"], + "additionalProperties": false + }, + "output_schema": { + "type": "object", + "properties": {"ok": {"type": "boolean", "const": true}, "username": {"type": "string"}}, + "required": ["ok", "username"], + "additionalProperties": false + }, + "error_schemas": { + "400": { + "type": "object", + "properties": { + "error": {"type": "string"}, + "issues": { + "type": "array", + "items": { + "type": "object", + "properties": { + "code": {"type": "string"}, + "message": {"type": "string"}, + "path": { + "type": "array", + "items": {"anyOf": [{"type": "string"}, {"type": "number"}]} + } + }, + "required": ["code", "message", "path"], + "additionalProperties": {} + } + } + }, + "required": ["error", "issues"], + "additionalProperties": {} + }, + "401": { + "type": "object", + "properties": {"error": {"type": "string", "const": "invalid_token"}}, + "required": ["error"], + "additionalProperties": {} + }, + "403": { + "type": "object", + "properties": {"error": {"type": "string", "const": "already_bootstrapped"}}, + "required": ["error"], + "additionalProperties": {} + }, + "404": { + "type": "object", + "properties": { + "error": {"type": "string", "enum": ["token_file_missing", "bootstrap_not_configured"]} + }, + "required": ["error"], + "additionalProperties": {} + }, + "429": { + "type": "object", + "properties": { + "error": {"type": "string", "const": "rate_limit_exceeded"}, + "retry_after": {"type": "number"} + }, + "required": ["error", "retry_after"], + "additionalProperties": {} + } + } + } + ], + "rpc_endpoints": [ + { + "path": "/api/rpc", + "methods": [ + { + "name": "ping", + "auth": {"type": "none"}, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": {"ping_id": {"anyOf": [{"type": "string"}, {"type": "number"}]}}, + "required": ["ping_id"], + "additionalProperties": false + }, + "side_effects": false, + "description": "Health check — echoes the request ID back to the caller." + }, + { + "name": "session_load", + "auth": {"type": "authenticated"}, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": { + "data": { + "type": "object", + "properties": { + "zzz_dir": {"type": "string"}, + "scoped_dirs": {"readOnly": true, "type": "array", "items": {"type": "string"}}, + "files": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "source_dir": {"type": "string"}, + "contents": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "ctime": {"anyOf": [{"type": "number"}, {"type": "null"}]}, + "mtime": {"anyOf": [{"type": "number"}, {"type": "null"}]}, + "dependents": { + "type": "array", + "items": {"type": "array", "prefixItems": [{"type": "string"}, {}]} + }, + "dependencies": { + "type": "array", + "items": {"type": "array", "prefixItems": [{"type": "string"}, {}]} + } + }, + "required": [ + "id", + "source_dir", + "contents", + "ctime", + "mtime", + "dependents", + "dependencies" + ], + "additionalProperties": false + } + }, + "provider_status": { + "type": "array", + "items": { + "oneOf": [ + { + "type": "object", + "properties": { + "name": {"type": "string"}, + "available": {"type": "boolean", "const": true}, + "checked_at": {"type": "number"} + }, + "required": ["name", "available", "checked_at"], + "additionalProperties": false + }, + { + "type": "object", + "properties": { + "name": {"type": "string"}, + "available": {"type": "boolean", "const": false}, + "error": {"type": "string"}, + "checked_at": {"type": "number"} + }, + "required": ["name", "available", "error", "checked_at"], + "additionalProperties": false + } + ] + } + }, + "workspaces": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": {"type": "string"}, + "name": {"type": "string"}, + "opened_at": {"type": "string"} + }, + "required": ["path", "name", "opened_at"], + "additionalProperties": false + } + } + }, + "required": ["zzz_dir", "scoped_dirs", "files", "provider_status", "workspaces"], + "additionalProperties": false + } + }, + "required": ["data"], + "additionalProperties": false + }, + "side_effects": false, + "description": "Load initial session data including filesystem state and provider status." + }, + { + "name": "diskfile_update", + "auth": {"type": "authenticated"}, + "input_schema": { + "type": "object", + "properties": {"path": {"type": "string"}, "content": {"type": "string"}}, + "required": ["path", "content"], + "additionalProperties": false + }, + "output_schema": null, + "side_effects": true, + "description": "Write new content to a file on disk." + }, + { + "name": "diskfile_delete", + "auth": {"type": "authenticated"}, + "input_schema": { + "type": "object", + "properties": {"path": {"type": "string"}}, + "required": ["path"], + "additionalProperties": false + }, + "output_schema": null, + "side_effects": true, + "description": "Delete a file from disk." + }, + { + "name": "directory_create", + "auth": {"type": "authenticated"}, + "input_schema": { + "type": "object", + "properties": {"path": {"type": "string"}}, + "required": ["path"], + "additionalProperties": false + }, + "output_schema": null, + "side_effects": true, + "description": "Create a new directory on disk." + }, + { + "name": "completion_create", + "auth": {"type": "authenticated"}, + "input_schema": { + "type": "object", + "properties": { + "completion_request": { + "type": "object", + "properties": { + "created": { + "type": "string", + "format": "date-time", + "pattern": "^(?:(?:\\d\\d[2468][048]|\\d\\d[13579][26]|\\d\\d0[48]|[02468][048]00|[13579][26]00)-02-29|\\d{4}-(?:(?:0[13578]|1[02])-(?:0[1-9]|[12]\\d|3[01])|(?:0[469]|11)-(?:0[1-9]|[12]\\d|30)|(?:02)-(?:0[1-9]|1\\d|2[0-8])))T(?:(?:[01]\\d|2[0-3]):[0-5]\\d(?::[0-5]\\d(?:\\.\\d+)?)?(?:Z))$" + }, + "provider_name": { + "type": "string", + "enum": ["ollama", "claude", "chatgpt", "gemini"] + }, + "model": {"type": "string"}, + "prompt": {"type": "string"}, + "completion_messages": { + "type": "array", + "items": { + "type": "object", + "properties": {"role": {"type": "string"}, "content": {"type": "string"}}, + "required": ["role", "content"], + "additionalProperties": {} + } + } + }, + "required": ["created", "provider_name", "model", "prompt"], + "additionalProperties": false + }, + "_meta": { + "type": "object", + "properties": { + "progressToken": { + "type": "string", + "format": "uuid", + "pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$" + } + }, + "additionalProperties": {} + } + }, + "required": ["completion_request"], + "additionalProperties": false + }, + "output_schema": { + "type": "object", + "properties": { + "completion_response": { + "type": "object", + "properties": { + "created": { + "type": "string", + "format": "date-time", + "pattern": "^(?:(?:\\d\\d[2468][048]|\\d\\d[13579][26]|\\d\\d0[48]|[02468][048]00|[13579][26]00)-02-29|\\d{4}-(?:(?:0[13578]|1[02])-(?:0[1-9]|[12]\\d|3[01])|(?:0[469]|11)-(?:0[1-9]|[12]\\d|30)|(?:02)-(?:0[1-9]|1\\d|2[0-8])))T(?:(?:[01]\\d|2[0-3]):[0-5]\\d(?::[0-5]\\d(?:\\.\\d+)?)?(?:Z))$" + }, + "provider_name": { + "type": "string", + "enum": ["ollama", "claude", "chatgpt", "gemini"] + }, + "model": {"type": "string"}, + "data": { + "oneOf": [ + { + "type": "object", + "properties": {"type": {"type": "string", "const": "ollama"}, "value": {}}, + "required": ["type", "value"], + "additionalProperties": false + }, + { + "type": "object", + "properties": {"type": {"type": "string", "const": "claude"}, "value": {}}, + "required": ["type", "value"], + "additionalProperties": false + }, + { + "type": "object", + "properties": {"type": {"type": "string", "const": "chatgpt"}, "value": {}}, + "required": ["type", "value"], + "additionalProperties": false + }, + { + "type": "object", + "properties": { + "type": {"type": "string", "const": "gemini"}, + "value": { + "type": "object", + "properties": { + "text": {"type": "string"}, + "candidates": { + "anyOf": [{"type": "array", "items": {}}, {"type": "null"}] + }, + "function_calls": { + "anyOf": [{"type": "array", "items": {}}, {"type": "null"}] + }, + "prompt_feedback": {"anyOf": [{}, {"type": "null"}]}, + "usage_metadata": {"anyOf": [{}, {"type": "null"}]} + }, + "required": ["text"], + "additionalProperties": false + } + }, + "required": ["type", "value"], + "additionalProperties": false + } + ] + } + }, + "required": ["created", "provider_name", "model", "data"], + "additionalProperties": false + }, + "_meta": { + "type": "object", + "properties": { + "progressToken": { + "type": "string", + "format": "uuid", + "pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$" + } + }, + "additionalProperties": {} + } + }, + "required": ["completion_response"], + "additionalProperties": false + }, + "side_effects": true, + "description": "Start an AI completion request, optionally with a progress token for streaming." + }, + { + "name": "ollama_list", + "auth": {"type": "authenticated"}, + "input_schema": null, + "output_schema": { + "anyOf": [ + { + "type": "object", + "properties": { + "models": { + "type": "array", + "items": { + "type": "object", + "properties": { + "details": { + "type": "object", + "properties": { + "families": {"type": "array", "items": {"type": "string"}}, + "family": {"type": "string"}, + "format": {"type": "string"}, + "parameter_size": {"type": "string"}, + "parent_model": {"type": "string"}, + "quantization_level": {"type": "string"} + }, + "required": [ + "families", + "family", + "format", + "parameter_size", + "parent_model", + "quantization_level" + ], + "additionalProperties": {} + }, + "digest": {"type": "string"}, + "model": {"type": "string"}, + "modified_at": {"type": "string"}, + "name": {"type": "string"}, + "size": {"type": "number"} + }, + "required": ["digest", "model", "modified_at", "name", "size"], + "additionalProperties": {} + } + } + }, + "required": ["models"], + "additionalProperties": {} + }, + {"type": "null"} + ] + }, + "side_effects": false, + "description": "List all locally available Ollama models." + }, + { + "name": "ollama_ps", + "auth": {"type": "authenticated"}, + "input_schema": null, + "output_schema": { + "anyOf": [ + { + "type": "object", + "properties": { + "models": { + "type": "array", + "items": { + "type": "object", + "properties": { + "details": { + "type": "object", + "properties": { + "families": {"type": "array", "items": {"type": "string"}}, + "family": {"type": "string"}, + "format": {"type": "string"}, + "parameter_size": {"type": "string"}, + "parent_model": {"type": "string"}, + "quantization_level": {"type": "string"} + }, + "required": [ + "families", + "family", + "format", + "parameter_size", + "parent_model", + "quantization_level" + ], + "additionalProperties": {} + }, + "digest": {"type": "string"}, + "expires_at": {"type": "string"}, + "model": {"type": "string"}, + "name": {"type": "string"}, + "size": {"type": "number"}, + "size_vram": {"type": "number"} + }, + "required": ["digest", "expires_at", "model", "name", "size", "size_vram"], + "additionalProperties": {} + } + } + }, + "required": ["models"], + "additionalProperties": {} + }, + {"type": "null"} + ] + }, + "side_effects": false, + "description": "List currently running Ollama models." + }, + { + "name": "ollama_show", + "auth": {"type": "authenticated"}, + "input_schema": { + "type": "object", + "properties": { + "model": {"type": "string"}, + "system": {"type": "string"}, + "template": {"type": "string"}, + "options": {} + }, + "required": ["model"], + "additionalProperties": {} + }, + "output_schema": { + "anyOf": [ + { + "type": "object", + "properties": { + "capabilities": {"type": "array", "items": {"type": "string"}}, + "details": { + "type": "object", + "properties": { + "families": {"type": "array", "items": {"type": "string"}}, + "family": {"type": "string"}, + "format": {"type": "string"}, + "parameter_size": {"type": "string"}, + "parent_model": {"type": "string"}, + "quantization_level": {"type": "string"} + }, + "required": [ + "families", + "family", + "format", + "parameter_size", + "parent_model", + "quantization_level" + ], + "additionalProperties": {} + }, + "license": {"type": "string"}, + "model_info": {}, + "modelfile": {"type": "string"}, + "modified_at": {"type": "string"}, + "template": {"type": "string"}, + "tensors": {"type": "array", "items": {}} + }, + "additionalProperties": {} + }, + {"type": "null"} + ] + }, + "side_effects": false, + "description": "Show detailed information about an Ollama model." + }, + { + "name": "ollama_pull", + "auth": {"type": "authenticated"}, + "input_schema": { + "type": "object", + "properties": { + "model": {"type": "string"}, + "insecure": {"type": "boolean"}, + "stream": {"type": "boolean"}, + "_meta": { + "type": "object", + "properties": { + "progressToken": { + "type": "string", + "format": "uuid", + "pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$" + } + }, + "additionalProperties": {} + } + }, + "required": ["model"], + "additionalProperties": false + }, + "output_schema": null, + "side_effects": true, + "description": "Pull an Ollama model from the registry." + }, + { + "name": "ollama_delete", + "auth": {"type": "authenticated"}, + "input_schema": { + "type": "object", + "properties": {"model": {"type": "string"}}, + "required": ["model"], + "additionalProperties": {} + }, + "output_schema": null, + "side_effects": true, + "description": "Delete an Ollama model from local storage." + }, + { + "name": "ollama_copy", + "auth": {"type": "authenticated"}, + "input_schema": { + "type": "object", + "properties": {"source": {"type": "string"}, "destination": {"type": "string"}}, + "required": ["source", "destination"], + "additionalProperties": {} + }, + "output_schema": null, + "side_effects": true, + "description": "Copy an Ollama model under a new name." + }, + { + "name": "ollama_create", + "auth": {"type": "authenticated"}, + "input_schema": { + "type": "object", + "properties": { + "model": {"type": "string"}, + "from": {"type": "string"}, + "stream": {"type": "boolean"}, + "quantize": {"type": "string"}, + "template": {"type": "string"}, + "license": { + "anyOf": [{"type": "string"}, {"type": "array", "items": {"type": "string"}}] + }, + "system": {"type": "string"}, + "parameters": { + "type": "object", + "propertyNames": {"type": "string"}, + "additionalProperties": {} + }, + "messages": {"type": "array", "items": {}}, + "adapters": { + "type": "object", + "propertyNames": {"type": "string"}, + "additionalProperties": {"type": "string"} + }, + "_meta": { + "type": "object", + "properties": { + "progressToken": { + "type": "string", + "format": "uuid", + "pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$" + } + }, + "additionalProperties": {} + } + }, + "required": ["model"], + "additionalProperties": false + }, + "output_schema": null, + "side_effects": true, + "description": "Create a new Ollama model from a Modelfile." + }, + { + "name": "ollama_unload", + "auth": {"type": "authenticated"}, + "input_schema": { + "type": "object", + "properties": {"model": {"type": "string"}}, + "required": ["model"], + "additionalProperties": false + }, + "output_schema": null, + "side_effects": true, + "description": "Unload an Ollama model from memory." + }, + { + "name": "provider_load_status", + "auth": {"type": "authenticated"}, + "input_schema": { + "type": "object", + "properties": { + "provider_name": { + "type": "string", + "enum": ["ollama", "claude", "chatgpt", "gemini"] + }, + "reload": {"type": "boolean"} + }, + "required": ["provider_name"], + "additionalProperties": false + }, + "output_schema": { + "type": "object", + "properties": { + "status": { + "oneOf": [ + { + "type": "object", + "properties": { + "name": {"type": "string"}, + "available": {"type": "boolean", "const": true}, + "checked_at": {"type": "number"} + }, + "required": ["name", "available", "checked_at"], + "additionalProperties": false + }, + { + "type": "object", + "properties": { + "name": {"type": "string"}, + "available": {"type": "boolean", "const": false}, + "error": {"type": "string"}, + "checked_at": {"type": "number"} + }, + "required": ["name", "available", "error", "checked_at"], + "additionalProperties": false + } + ] + } + }, + "required": ["status"], + "additionalProperties": false + }, + "side_effects": false, + "description": "Check the availability and status of an AI provider." + }, + { + "name": "provider_update_api_key", + "auth": {"type": "keeper"}, + "input_schema": { + "type": "object", + "properties": { + "provider_name": { + "type": "string", + "enum": ["ollama", "claude", "chatgpt", "gemini"] + }, + "api_key": {"type": "string"} + }, + "required": ["provider_name", "api_key"], + "additionalProperties": false + }, + "output_schema": { + "type": "object", + "properties": { + "status": { + "oneOf": [ + { + "type": "object", + "properties": { + "name": {"type": "string"}, + "available": {"type": "boolean", "const": true}, + "checked_at": {"type": "number"} + }, + "required": ["name", "available", "checked_at"], + "additionalProperties": false + }, + { + "type": "object", + "properties": { + "name": {"type": "string"}, + "available": {"type": "boolean", "const": false}, + "error": {"type": "string"}, + "checked_at": {"type": "number"} + }, + "required": ["name", "available", "error", "checked_at"], + "additionalProperties": false + } + ] + } + }, + "required": ["status"], + "additionalProperties": false + }, + "side_effects": true, + "description": "Update the API key for an AI provider." + }, + { + "name": "terminal_create", + "auth": {"type": "authenticated"}, + "input_schema": { + "type": "object", + "properties": { + "command": {"type": "string"}, + "args": {"type": "array", "items": {"type": "string"}}, + "cwd": {"type": "string"}, + "preset_id": { + "type": "string", + "format": "uuid", + "pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$" + } + }, + "required": ["command", "args"], + "additionalProperties": false + }, + "output_schema": { + "type": "object", + "properties": { + "terminal_id": { + "type": "string", + "format": "uuid", + "pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$" + } + }, + "required": ["terminal_id"], + "additionalProperties": false + }, + "side_effects": true, + "description": "Spawn a PTY process and return the terminal ID." + }, + { + "name": "terminal_data_send", + "auth": {"type": "authenticated"}, + "input_schema": { + "type": "object", + "properties": { + "terminal_id": { + "type": "string", + "format": "uuid", + "pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$" + }, + "data": {"type": "string"} + }, + "required": ["terminal_id", "data"], + "additionalProperties": false + }, + "output_schema": null, + "side_effects": true, + "description": "Send stdin bytes to a terminal." + }, + { + "name": "terminal_resize", + "auth": {"type": "authenticated"}, + "input_schema": { + "type": "object", + "properties": { + "terminal_id": { + "type": "string", + "format": "uuid", + "pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$" + }, + "cols": { + "type": "integer", + "minimum": -9007199254740991, + "maximum": 9007199254740991 + }, + "rows": {"type": "integer", "minimum": -9007199254740991, "maximum": 9007199254740991} + }, + "required": ["terminal_id", "cols", "rows"], + "additionalProperties": false + }, + "output_schema": null, + "side_effects": true, + "description": "Update PTY dimensions for a terminal." + }, + { + "name": "terminal_close", + "auth": {"type": "authenticated"}, + "input_schema": { + "type": "object", + "properties": { + "terminal_id": { + "type": "string", + "format": "uuid", + "pattern": "^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$" + }, + "signal": {"type": "string"} + }, + "required": ["terminal_id"], + "additionalProperties": false + }, + "output_schema": { + "type": "object", + "properties": {"exit_code": {"anyOf": [{"type": "number"}, {"type": "null"}]}}, + "required": ["exit_code"], + "additionalProperties": false + }, + "side_effects": true, + "description": "Kill a terminal process and return the exit code." + }, + { + "name": "workspace_open", + "auth": {"type": "authenticated"}, + "input_schema": { + "type": "object", + "properties": {"path": {"type": "string"}}, + "required": ["path"], + "additionalProperties": false + }, + "output_schema": { + "type": "object", + "properties": { + "workspace": { + "type": "object", + "properties": { + "path": {"type": "string"}, + "name": {"type": "string"}, + "opened_at": {"type": "string"} + }, + "required": ["path", "name", "opened_at"], + "additionalProperties": false + }, + "files": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "source_dir": {"type": "string"}, + "contents": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "ctime": {"anyOf": [{"type": "number"}, {"type": "null"}]}, + "mtime": {"anyOf": [{"type": "number"}, {"type": "null"}]}, + "dependents": { + "type": "array", + "items": {"type": "array", "prefixItems": [{"type": "string"}, {}]} + }, + "dependencies": { + "type": "array", + "items": {"type": "array", "prefixItems": [{"type": "string"}, {}]} + } + }, + "required": [ + "id", + "source_dir", + "contents", + "ctime", + "mtime", + "dependents", + "dependencies" + ], + "additionalProperties": false + } + } + }, + "required": ["workspace", "files"], + "additionalProperties": false + }, + "side_effects": true, + "description": "Open a workspace directory — registers with ScopedFs and starts file watching." + }, + { + "name": "workspace_close", + "auth": {"type": "authenticated"}, + "input_schema": { + "type": "object", + "properties": {"path": {"type": "string"}}, + "required": ["path"], + "additionalProperties": false + }, + "output_schema": null, + "side_effects": true, + "description": "Close a workspace directory — stops file watching and removes from ScopedFs." + }, + { + "name": "workspace_list", + "auth": {"type": "authenticated"}, + "input_schema": null, + "output_schema": { + "type": "object", + "properties": { + "workspaces": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": {"type": "string"}, + "name": {"type": "string"}, + "opened_at": {"type": "string"} + }, + "required": ["path", "name", "opened_at"], + "additionalProperties": false + } + } + }, + "required": ["workspaces"], + "additionalProperties": false + }, + "side_effects": false, + "description": "List all open workspaces." + } + ] + } + ], + "env": [ + { + "name": "NODE_ENV", + "description": "Runtime environment mode", + "sensitivity": null, + "has_default": false, + "optional": false + }, + { + "name": "PORT", + "description": "HTTP server port", + "sensitivity": null, + "has_default": true, + "optional": true + }, + { + "name": "HOST", + "description": "HTTP server bind address", + "sensitivity": null, + "has_default": true, + "optional": true + }, + { + "name": "DATABASE_URL", + "description": "Database URL (postgres://, file://, or memory://)", + "sensitivity": "secret", + "has_default": false, + "optional": false + }, + { + "name": "SECRET_COOKIE_KEYS", + "description": "Cookie signing keys, separated by __ for rotation", + "sensitivity": "secret", + "has_default": false, + "optional": false + }, + { + "name": "ALLOWED_ORIGINS", + "description": "Comma-separated origin patterns for API verification", + "sensitivity": null, + "has_default": false, + "optional": false + }, + { + "name": "PUBLIC_API_URL", + "description": "Public API base URL", + "sensitivity": null, + "has_default": true, + "optional": true + }, + { + "name": "PUBLIC_WEBSOCKET_URL", + "description": "Public WebSocket URL", + "sensitivity": null, + "has_default": false, + "optional": true + }, + { + "name": "PUBLIC_CONTACT_EMAIL", + "description": "Public contact email address", + "sensitivity": null, + "has_default": false, + "optional": true + }, + { + "name": "BOOTSTRAP_TOKEN_PATH", + "description": "Path to one-shot admin bootstrap token", + "sensitivity": "secret", + "has_default": false, + "optional": true + }, + { + "name": "SMTP_HOST", + "description": "SMTP server hostname", + "sensitivity": null, + "has_default": false, + "optional": true + }, + { + "name": "SMTP_USER", + "description": "SMTP authentication username", + "sensitivity": null, + "has_default": false, + "optional": true + }, + { + "name": "SMTP_PASSWORD", + "description": "SMTP authentication password", + "sensitivity": "secret", + "has_default": false, + "optional": true + }, + { + "name": "PUBLIC_ZZZ_DIR", + "description": "Zzz app data directory", + "sensitivity": null, + "has_default": true, + "optional": true + }, + { + "name": "PUBLIC_ZZZ_SCOPED_DIRS", + "description": "Comma-separated filesystem paths the server can access", + "sensitivity": null, + "has_default": true, + "optional": true + }, + { + "name": "PUBLIC_BACKEND_ARTIFICIAL_RESPONSE_DELAY", + "description": "Artificial response delay in ms (testing)", + "sensitivity": null, + "has_default": true, + "optional": true + }, + { + "name": "SECRET_ANTHROPIC_API_KEY", + "description": "Anthropic API key for Claude provider", + "sensitivity": "secret", + "has_default": false, + "optional": true + }, + { + "name": "SECRET_OPENAI_API_KEY", + "description": "OpenAI API key for ChatGPT provider", + "sensitivity": "secret", + "has_default": false, + "optional": true + }, + { + "name": "SECRET_GOOGLE_API_KEY", + "description": "Google API key for Gemini provider", + "sensitivity": "secret", + "has_default": false, + "optional": true + } + ], + "events": [] +} diff --git a/src/test/server/routes/auth_attack_surface.test.ts b/src/test/server/routes/auth_attack_surface.test.ts new file mode 100644 index 000000000..014b12e32 --- /dev/null +++ b/src/test/server/routes/auth_attack_surface.test.ts @@ -0,0 +1,40 @@ +import {describe_standard_attack_surface_tests} from '@fuzdev/fuz_app/testing/attack_surface.js'; +import {describe_rpc_attack_surface_tests} from '@fuzdev/fuz_app/testing/rpc_attack_surface.js'; + +import { + create_zzz_app_surface_spec, + resolve_zzz_fixture_path, +} from './auth_attack_surface_helpers.js'; + +describe_standard_attack_surface_tests({ + build: create_zzz_app_surface_spec, + snapshot_path: resolve_zzz_fixture_path('auth_attack_surface.json'), + expected_public_routes: [ + 'GET /health', + 'GET /api/account/status', + 'POST /api/account/login', + 'POST /api/account/bootstrap', + 'GET /api/rpc', + 'POST /api/rpc', + ], + expected_api_middleware: [ + 'host_validation', + 'origin', + 'session', + 'request_context', + 'bearer_auth', + ], + roles: ['admin', 'keeper'], + security_policy: { + public_mutation_allowlist: [ + 'POST /api/account/login', + 'POST /api/account/bootstrap', + 'POST /api/rpc', + ], + }, +}); + +describe_rpc_attack_surface_tests({ + build: create_zzz_app_surface_spec, + roles: ['admin', 'keeper'], +}); diff --git a/src/test/server/routes/auth_attack_surface_helpers.ts b/src/test/server/routes/auth_attack_surface_helpers.ts new file mode 100644 index 000000000..2b04aaddc --- /dev/null +++ b/src/test/server/routes/auth_attack_surface_helpers.ts @@ -0,0 +1,52 @@ +/** + * Attack surface helpers for zzz. + * + * Provides the shared `create_zzz_app_surface_spec` factory and fixture + * path resolver used by attack surface tests and snapshot generation. + * + * @module + */ + +import {create_test_app_surface_spec, stub, stub_mw} from '@fuzdev/fuz_app/testing/stubs.js'; +import {resolve_fixture_path} from '@fuzdev/fuz_app/testing/assertions.js'; +import type {AppSurfaceSpec} from '@fuzdev/fuz_app/http/surface.js'; +import type {MiddlewareSpec} from '@fuzdev/fuz_app/http/middleware_spec.js'; + +import {zzz_session_config} from '$lib/server/routes/account.js'; +import { + create_zzz_app_route_specs, + create_zzz_rpc_endpoint_spec, +} from '$lib/server/zzz_route_specs.js'; +import {ZzzServerEnv} from '$lib/server/server_env.js'; + +/** Stub deps for zzz RPC actions — handlers are never called during surface generation. */ +const zzz_stub_deps = { + backend: stub, +}; + +/** + * Create the zzz attack surface spec for snapshot and adversarial testing. + * + * Mirrors production assembly: route specs + host_validation middleware + + * RPC endpoint with all 24 actions. + */ +export const create_zzz_app_surface_spec = (): AppSurfaceSpec => + create_test_app_surface_spec({ + session_options: zzz_session_config, + create_route_specs: (ctx) => + create_zzz_app_route_specs(ctx, { + zzz: zzz_stub_deps, + version: '', + get_uptime_ms: () => 0, + }), + rpc_endpoints: [create_zzz_rpc_endpoint_spec(zzz_stub_deps)], + env_schema: ZzzServerEnv, + transform_middleware: (specs: Array): Array => [ + {name: 'host_validation', path: '*', handler: stub_mw}, + ...specs, + ], + }); + +/** Resolve fixture paths relative to this module. */ +export const resolve_zzz_fixture_path = (filename: string): string => + resolve_fixture_path(filename, import.meta.url); diff --git a/src/test/server/routes/server.integration.db.test.ts b/src/test/server/routes/server.integration.db.test.ts new file mode 100644 index 000000000..557e3700c --- /dev/null +++ b/src/test/server/routes/server.integration.db.test.ts @@ -0,0 +1,86 @@ +import {describe_standard_integration_tests} from '@fuzdev/fuz_app/testing/integration.js'; +import {describe_standard_admin_integration_tests} from '@fuzdev/fuz_app/testing/admin_integration.js'; +import {describe_rate_limiting_tests} from '@fuzdev/fuz_app/testing/rate_limiting.js'; +import {describe_round_trip_validation} from '@fuzdev/fuz_app/testing/round_trip.js'; +import {describe_rpc_round_trip_tests} from '@fuzdev/fuz_app/testing/rpc_round_trip.js'; +import {describe_data_exposure_tests} from '@fuzdev/fuz_app/testing/data_exposure.js'; +import {create_role_schema} from '@fuzdev/fuz_app/auth/role_schema.js'; +import type {RouteSpec} from '@fuzdev/fuz_app/http/route_spec.js'; +import type {AppServerContext} from '@fuzdev/fuz_app/server/app_server.js'; +import {stub} from '@fuzdev/fuz_app/testing/stubs.js'; + +import {zzz_session_config} from '$lib/server/routes/account.js'; +import { + create_zzz_app_route_specs, + create_zzz_rpc_endpoint_spec, +} from '$lib/server/zzz_route_specs.js'; + +import {db_factories} from '../../db_fixture.js'; +import {create_zzz_app_surface_spec} from './auth_attack_surface_helpers.js'; + +/** Stub deps — handlers are never called by auth integration tests. */ +const zzz_rpc_stub_deps = { + backend: stub, +}; + +/** Route factory with stub deps for composable suites. */ +const create_zzz_test_route_specs = (ctx: AppServerContext): Array => + create_zzz_app_route_specs(ctx, { + zzz: zzz_rpc_stub_deps, + version: '', + get_uptime_ms: () => 0, + }); + +/** zzz uses default admin/keeper roles — no app-specific extensions. */ +const zzz_roles = create_role_schema({}); + +// -- Composable suites -- + +describe_standard_integration_tests({ + session_options: zzz_session_config, + create_route_specs: create_zzz_test_route_specs, + db_factories, +}); + +describe_standard_admin_integration_tests({ + session_options: zzz_session_config, + create_route_specs: create_zzz_test_route_specs, + roles: zzz_roles, + db_factories, +}); + +describe_rate_limiting_tests({ + session_options: zzz_session_config, + create_route_specs: create_zzz_test_route_specs, + db_factories, +}); + +describe_round_trip_validation({ + session_options: zzz_session_config, + create_route_specs: create_zzz_test_route_specs, + skip_routes: [ + 'GET /api/rpc', // covered by describe_rpc_round_trip_tests + 'POST /api/rpc', + ], +}); + +describe_rpc_round_trip_tests({ + session_options: zzz_session_config, + create_route_specs: create_zzz_test_route_specs, + rpc_endpoints: [create_zzz_rpc_endpoint_spec(zzz_rpc_stub_deps)], + // Domain handlers use a throwing stub Backend — the RPC dispatcher catches + // all throws and returns well-formed JSON-RPC error responses, which the + // round-trip test accepts. Only DiskfileDirectoryPath inputs need overrides + // because the schema generator can't produce trailing-slash absolute paths. + input_overrides: new Map([ + ['workspace_open', {path: '/test/dir/'}], + ['workspace_close', {path: '/test/dir/'}], + ]), +}); + +describe_data_exposure_tests({ + build: create_zzz_app_surface_spec, + session_options: zzz_session_config, + create_route_specs: create_zzz_test_route_specs, + db_factories, +}); diff --git a/src/test/server/scoped_fs_advanced.test.ts b/src/test/server/scoped_fs_advanced.test.ts index 67e710b8d..0967a1f36 100644 --- a/src/test/server/scoped_fs_advanced.test.ts +++ b/src/test/server/scoped_fs_advanced.test.ts @@ -1,14 +1,10 @@ -// @slop Claude Sonnet 3.7 - -import {test, expect, vi, beforeEach, afterEach, describe} from 'vitest'; +import {test, vi, beforeEach, describe, assert} from 'vitest'; +import {assert_rejects} from '@fuzdev/fuz_util/testing.js'; import * as fs from 'node:fs/promises'; -import * as fs_sync from 'node:fs'; import {ScopedFs, SymlinkNotAllowedError} from '$lib/server/scoped_fs.js'; -/* eslint-disable @typescript-eslint/require-await, @typescript-eslint/no-empty-function, no-await-in-loop */ - -// Mock fs/promises and fs modules +// Mock fs/promises vi.mock('node:fs/promises', () => ({ readFile: vi.fn(), writeFile: vi.fn(), @@ -21,10 +17,6 @@ vi.mock('node:fs/promises', () => ({ access: vi.fn(), })); -vi.mock('node:fs', () => ({ - existsSync: vi.fn(), -})); - // Test constants const TEST_ALLOWED_PATHS = ['/allowed/path', '/allowed/other/path/', '/another/allowed/directory']; const FILE_PATHS = { @@ -48,15 +40,8 @@ const DIR_PATHS = { const create_test_instance = () => new ScopedFs(TEST_ALLOWED_PATHS); -// Setup/cleanup for each test -let console_spy: any; - beforeEach(() => { vi.clearAllMocks(); - console_spy = vi.spyOn(console, 'error').mockImplementation(() => {}); - - // Default mock implementations - vi.mocked(fs_sync.existsSync).mockReturnValue(true); // Default lstat mock returning a non-symlink file vi.mocked(fs.lstat).mockImplementation(() => @@ -68,10 +53,6 @@ beforeEach(() => { ); }); -afterEach(() => { - console_spy.mockRestore(); -}); - // Helper to create ENOENT error with proper code property const create_enoent_error = (path: string): NodeJS.ErrnoException => { const error = new Error( @@ -106,11 +87,6 @@ const setup_mock_filesystem = () => { '/another/allowed/directory': {isDir: true, isSymlink: false}, }; - vi.mocked(fs_sync.existsSync).mockImplementation((pathStr) => { - if (typeof pathStr !== 'string') return false; - return (filesystem as any)[pathStr] !== undefined; - }); - vi.mocked(fs.lstat).mockImplementation(async (pathStr) => { const entry = (filesystem as any)[pathStr as string]; if (!entry) { @@ -133,11 +109,11 @@ describe('ScopedFs - constructor edge cases', () => { const scoped_fs = new ScopedFs(paths_with_empty); // Should only have 2 allowed paths (empty strings filtered) - expect(scoped_fs.allowed_paths.length).toBe(2); + assert.strictEqual(scoped_fs.allowed_paths.length, 2); // Valid paths should be allowed - expect(scoped_fs.is_path_allowed('/valid/path/file.txt')).toBe(true); - expect(scoped_fs.is_path_allowed('/another/path/file.txt')).toBe(true); + assert.ok(scoped_fs.is_path_allowed('/valid/path/file.txt')); + assert.ok(scoped_fs.is_path_allowed('/another/path/file.txt')); }); test('should filter out null and undefined values from allowed paths', () => { @@ -145,7 +121,7 @@ describe('ScopedFs - constructor edge cases', () => { const scoped_fs = new ScopedFs(paths_with_nullish); // Should only have 2 allowed paths (nullish values filtered) - expect(scoped_fs.allowed_paths.length).toBe(2); + assert.strictEqual(scoped_fs.allowed_paths.length, 2); }); test('should handle array with only empty/falsy values', () => { @@ -153,10 +129,10 @@ describe('ScopedFs - constructor edge cases', () => { const scoped_fs = new ScopedFs(empty_array); // Should have no allowed paths - expect(scoped_fs.allowed_paths.length).toBe(0); + assert.strictEqual(scoped_fs.allowed_paths.length, 0); // No paths should be allowed - expect(scoped_fs.is_path_allowed('/any/path')).toBe(false); + assert.ok(!scoped_fs.is_path_allowed('/any/path')); }); }); @@ -176,7 +152,7 @@ describe('ScopedFs - advanced path validation', () => { // All should be allowed for (const path of special_paths) { - expect(scoped_fs.is_path_allowed(path)).toBe(true); + assert.ok(scoped_fs.is_path_allowed(path)); } }); @@ -192,7 +168,7 @@ describe('ScopedFs - advanced path validation', () => { ]; for (const path of paths_with_multiple_slashes) { - expect(scoped_fs.is_path_allowed(path)).toBe(true); + assert.ok(scoped_fs.is_path_allowed(path)); } }); @@ -217,14 +193,14 @@ describe('ScopedFs - advanced path validation', () => { // Test valid paths for (const path of valid_paths) { - expect(scoped_fs.is_path_allowed(path)).toBe(true); - expect(await scoped_fs.is_path_safe(path)).toBe(true); + assert.ok(scoped_fs.is_path_allowed(path)); + assert.ok(await scoped_fs.is_path_safe(path)); } // Test invalid paths for (const path of invalid_paths) { - expect(scoped_fs.is_path_allowed(path)).toBe(false); - expect(await scoped_fs.is_path_safe(path)).toBe(false); + assert.ok(!scoped_fs.is_path_allowed(path)); + assert.ok(!(await scoped_fs.is_path_safe(path))); } }); }); @@ -269,7 +245,7 @@ describe('ScopedFs - advanced directory operations', () => { } await scoped_fs.readdir(DIR_PATHS.ALLOWED, option as any); - expect(fs.readdir).toHaveBeenCalledWith(DIR_PATHS.ALLOWED, option); + assert.deepEqual(vi.mocked(fs.readdir).mock.calls[0], [DIR_PATHS.ALLOWED, option] as any); } }); @@ -278,17 +254,18 @@ describe('ScopedFs - advanced directory operations', () => { // Test creating a deeply nested directory await scoped_fs.mkdir(DIR_PATHS.NESTED, {recursive: true}); - expect(fs.mkdir).toHaveBeenCalledWith(DIR_PATHS.NESTED, {recursive: true}); + assert.deepEqual(vi.mocked(fs.mkdir).mock.calls[0], [DIR_PATHS.NESTED, {recursive: true}]); // Without recursive flag, it should still try to create the directory await scoped_fs.mkdir(DIR_PATHS.NEW_DIR); - expect(fs.mkdir).toHaveBeenCalledWith(DIR_PATHS.NEW_DIR, undefined); + assert.deepEqual(vi.mocked(fs.mkdir).mock.calls[1], [DIR_PATHS.NEW_DIR, undefined]); // Should properly bubble up errors from fs.mkdir const error = new Error('EEXIST: directory already exists'); vi.mocked(fs.mkdir).mockRejectedValueOnce(error); - await expect(scoped_fs.mkdir(DIR_PATHS.ALLOWED)).rejects.toThrow(error); + const e = await assert_rejects(() => scoped_fs.mkdir(DIR_PATHS.ALLOWED)); + assert.strictEqual(e, error); }); test('rm - should handle various removal options', async () => { @@ -307,7 +284,7 @@ describe('ScopedFs - advanced directory operations', () => { vi.mocked(fs.rm).mockResolvedValueOnce(); await scoped_fs.rm(DIR_PATHS.ALLOWED, options); - expect(fs.rm).toHaveBeenCalledWith(DIR_PATHS.ALLOWED, options); + assert.deepEqual(vi.mocked(fs.rm).mock.calls[0], [DIR_PATHS.ALLOWED, options]); } }); }); @@ -338,7 +315,6 @@ describe('ScopedFs - advanced security features', () => { for (const {path, symlink_at} of symlink_test_cases) { // Reset mocks for each test case vi.mocked(fs.lstat).mockReset(); - vi.mocked(fs_sync.existsSync).mockReturnValue(true); // Setup a custom lstat implementation for this test case vi.mocked(fs.lstat).mockImplementation(async (p) => { @@ -357,8 +333,9 @@ describe('ScopedFs - advanced security features', () => { }); // Each case should be rejected with a SymlinkNotAllowedError - await expect(scoped_fs.read_file(path)).rejects.toThrow(SymlinkNotAllowedError); - expect(fs.readFile).not.toHaveBeenCalled(); + const e = await assert_rejects(() => scoped_fs.read_file(path)); + assert.instanceOf(e, SymlinkNotAllowedError); + assert.strictEqual(vi.mocked(fs.readFile).mock.calls.length, 0); } }); @@ -374,8 +351,9 @@ describe('ScopedFs - advanced security features', () => { ]; for (const path of tricky_traversal_paths) { - expect(scoped_fs.is_path_allowed(path)).toBe(false); - await expect(scoped_fs.read_file(path)).rejects.toThrow('Path is not allowed'); + assert.ok(!scoped_fs.is_path_allowed(path)); + const error = await assert_rejects(() => scoped_fs.read_file(path)); + assert.include(error.message, 'Path is not allowed'); } }); @@ -393,8 +371,8 @@ describe('ScopedFs - advanced security features', () => { // Should detect symlink and return false without calling access const exists = await scoped_fs.exists('/allowed/path/evil-symlink'); - expect(exists).toBe(false); - expect(fs.access).not.toHaveBeenCalled(); + assert.ok(!exists); + assert.strictEqual(vi.mocked(fs.access).mock.calls.length, 0); }); }); @@ -406,10 +384,9 @@ describe('ScopedFs - error handling and edge cases', () => { vi.mocked(fs.lstat).mockRejectedValue(new Error('Unknown filesystem error')); // The error during symlink check should be caught and rethrown - await expect(scoped_fs.read_file(FILE_PATHS.ALLOWED)).rejects.toThrow( - 'Unknown filesystem error', - ); - expect(fs.readFile).not.toHaveBeenCalled(); + const error = await assert_rejects(() => scoped_fs.read_file(FILE_PATHS.ALLOWED)); + assert.include(error.message, 'Unknown filesystem error'); + assert.strictEqual(vi.mocked(fs.readFile).mock.calls.length, 0); }); test('should ignore ENOENT errors when checking target path', async () => { @@ -425,7 +402,11 @@ describe('ScopedFs - error handling and edge cases', () => { // Should proceed despite ENOENT - file may not exist yet await scoped_fs.write_file(FILE_PATHS.NONEXISTENT, 'new content'); - expect(fs.writeFile).toHaveBeenCalledWith(FILE_PATHS.NONEXISTENT, 'new content', 'utf8'); + assert.deepEqual(vi.mocked(fs.writeFile).mock.calls[0], [ + FILE_PATHS.NONEXISTENT, + 'new content', + 'utf8', + ]); }); test('should ignore ENOENT errors when checking parent directories', async () => { @@ -454,7 +435,7 @@ describe('ScopedFs - error handling and edge cases', () => { // Should proceed despite parent directories not existing await scoped_fs.write_file(deep_path, 'content'); - expect(fs.writeFile).toHaveBeenCalledWith(deep_path, 'content', 'utf8'); + assert.deepEqual(vi.mocked(fs.writeFile).mock.calls[0], [deep_path, 'content', 'utf8']); }); test('should NOT ignore non-ENOENT errors when checking paths', async () => { @@ -466,8 +447,9 @@ describe('ScopedFs - error handling and edge cases', () => { vi.mocked(fs.lstat).mockRejectedValueOnce(eacces_error); // Should throw the EACCES error, not ignore it - await expect(scoped_fs.read_file(FILE_PATHS.ALLOWED)).rejects.toThrow('EACCES'); - expect(fs.readFile).not.toHaveBeenCalled(); + const error = await assert_rejects(() => scoped_fs.read_file(FILE_PATHS.ALLOWED)); + assert.include(error.message, 'EACCES'); + assert.strictEqual(vi.mocked(fs.readFile).mock.calls.length, 0); }); test('should NOT ignore non-ENOENT errors when checking parent directories', async () => { @@ -487,8 +469,9 @@ describe('ScopedFs - error handling and edge cases', () => { vi.mocked(fs.lstat).mockRejectedValueOnce(eacces_error); // Should throw the EACCES error from parent check - await expect(scoped_fs.read_file(nested_path)).rejects.toThrow('EACCES'); - expect(fs.readFile).not.toHaveBeenCalled(); + const error = await assert_rejects(() => scoped_fs.read_file(nested_path)); + assert.include(error.message, 'EACCES'); + assert.strictEqual(vi.mocked(fs.readFile).mock.calls.length, 0); }); test('should handle a variety of filesystem errors from underlying operations', async () => { @@ -521,8 +504,9 @@ describe('ScopedFs - error handling and edge cases', () => { vi.mocked(fs.readFile).mockRejectedValueOnce(error); // Error should be passed through - await expect(scoped_fs.read_file(FILE_PATHS.ALLOWED)).rejects.toThrow(message); - expect(fs.readFile).toHaveBeenCalledWith(FILE_PATHS.ALLOWED, 'utf8'); + const e = await assert_rejects(() => scoped_fs.read_file(FILE_PATHS.ALLOWED)); + assert.include(e.message, message); + assert.deepEqual(vi.mocked(fs.readFile).mock.calls[0], [FILE_PATHS.ALLOWED, 'utf8']); } }); @@ -530,19 +514,18 @@ describe('ScopedFs - error handling and edge cases', () => { const scoped_fs = create_test_instance(); const deep_nonexistent_path = '/allowed/path/does/not/exist/yet/file.txt'; - // Setup existsSync to simulate missing directories - vi.mocked(fs_sync.existsSync).mockImplementation((p) => { - return String(p) === '/allowed/path'; // Only the base allowed path exists - }); - // The path itself is allowed since it's under an allowed directory - expect(scoped_fs.is_path_allowed(deep_nonexistent_path)).toBe(true); + assert.ok(scoped_fs.is_path_allowed(deep_nonexistent_path)); // Write operation should be allowed after path validation vi.mocked(fs.writeFile).mockResolvedValueOnce(); await scoped_fs.write_file(deep_nonexistent_path, 'content'); - expect(fs.writeFile).toHaveBeenCalledWith(deep_nonexistent_path, 'content', 'utf8'); + assert.deepEqual(vi.mocked(fs.writeFile).mock.calls[0], [ + deep_nonexistent_path, + 'content', + 'utf8', + ]); }); test('should handle extreme edge cases gracefully', async () => { @@ -556,14 +539,14 @@ describe('ScopedFs - error handling and edge cases', () => { ]; for (const path of edge_case_paths) { - expect(scoped_fs.is_path_allowed(path)).toBe(true); + assert.ok(scoped_fs.is_path_allowed(path)); // Mock a successful read to test the full flow vi.mocked(fs.readFile).mockReset(); vi.mocked(fs.readFile).mockResolvedValueOnce('content' as any); const content = await scoped_fs.read_file(path); - expect(content).toBe('content'); + assert.strictEqual(content, 'content'); } }); }); @@ -591,12 +574,16 @@ describe('ScopedFs - advanced use cases', () => { await scoped_fs.rm(source_file); // Verify all operations happened with correct parameters - expect(content).toBe('file content'); - expect(fs.mkdir).toHaveBeenCalledWith(workflow_dir, undefined); - expect(fs.writeFile).toHaveBeenCalledWith(source_file, 'original content', 'utf8'); - expect(fs.readFile).toHaveBeenCalledWith(source_file, 'utf8'); - expect(fs.copyFile).toHaveBeenCalledWith(source_file, dest_file, undefined); - expect(fs.rm).toHaveBeenCalledWith(source_file, undefined); + assert.strictEqual(content, 'file content'); + assert.deepEqual(vi.mocked(fs.mkdir).mock.calls[0], [workflow_dir, undefined]); + assert.deepEqual(vi.mocked(fs.writeFile).mock.calls[0], [ + source_file, + 'original content', + 'utf8', + ]); + assert.deepEqual(vi.mocked(fs.readFile).mock.calls[0], [source_file, 'utf8']); + assert.deepEqual(vi.mocked(fs.copyFile).mock.calls[0], [source_file, dest_file, undefined]); + assert.deepEqual(vi.mocked(fs.rm).mock.calls[0], [source_file, undefined]); }); test('should handle concurrent operations correctly', async () => { @@ -621,12 +608,15 @@ describe('ScopedFs - advanced use cases', () => { ]); // Verify results - expect(result1).toBe('content1'); - expect(result2).toBe('content2'); - expect(fs.readFile).toHaveBeenCalledTimes(2); - expect(fs.writeFile).toHaveBeenCalledTimes(2); - expect(fs.writeFile).toHaveBeenCalledWith('/allowed/path/output1.txt', 'data1', 'utf8'); - expect(fs.writeFile).toHaveBeenCalledWith('/allowed/path/output2.txt', 'data2', 'utf8'); + assert.strictEqual(result1, 'content1'); + assert.strictEqual(result2, 'content2'); + assert.strictEqual(vi.mocked(fs.readFile).mock.calls.length, 2); + assert.strictEqual(vi.mocked(fs.writeFile).mock.calls.length, 2); + // Check that both write calls happened (order may vary with concurrent ops) + const write_calls = vi.mocked(fs.writeFile).mock.calls; + const write_paths = write_calls.map((c) => c[0]); + assert.ok(write_paths.includes('/allowed/path/output1.txt')); + assert.ok(write_paths.includes('/allowed/path/output2.txt')); }); test('should handle sequential operations that build on each other', async () => { @@ -666,10 +656,10 @@ describe('ScopedFs - advanced use cases', () => { await scoped_fs.rm(base_dir, {recursive: true}); // Verify everything worked as expected - expect(contents).toEqual(['content1', 'content2']); - expect(fs.mkdir).toHaveBeenCalledWith(base_dir, undefined); - expect(fs.readdir).toHaveBeenCalledWith(base_dir, undefined); - expect(fs.rm).toHaveBeenCalledWith(base_dir, {recursive: true}); + assert.deepEqual(contents, ['content1', 'content2']); + assert.deepEqual(vi.mocked(fs.mkdir).mock.calls[0], [base_dir, undefined] as any); + assert.deepEqual(vi.mocked(fs.readdir).mock.calls[0], [base_dir, undefined] as any); + assert.deepEqual(vi.mocked(fs.rm).mock.calls[0], [base_dir, {recursive: true}] as any); }); }); @@ -681,11 +671,11 @@ describe('ScopedFs - directory path trailing slash handling', () => { // All paths should have trailing slashes internally for (const path of scoped_fs.allowed_paths) { - expect(path.endsWith('/')).toBe(true); + assert.ok(path.endsWith('/')); } // Original array should be unmodified - expect(paths_with_mix).toEqual(['/path1', '/path2/', '/path3/subdir', '/path4/subdir/']); + assert.deepEqual(paths_with_mix, ['/path1', '/path2/', '/path3/subdir', '/path4/subdir/']); }); test('should correctly validate paths regardless of trailing slashes', () => { @@ -703,7 +693,7 @@ describe('ScopedFs - directory path trailing slash handling', () => { ]; for (const path of valid_paths) { - expect(scoped_fs.is_path_allowed(path)).toBe(true); + assert.ok(scoped_fs.is_path_allowed(path)); } // These paths should all be rejected @@ -715,7 +705,7 @@ describe('ScopedFs - directory path trailing slash handling', () => { ]; for (const path of invalid_paths) { - expect(scoped_fs.is_path_allowed(path)).toBe(false); + assert.ok(!scoped_fs.is_path_allowed(path)); } }); @@ -742,12 +732,12 @@ describe('ScopedFs - directory path trailing slash handling', () => { // Read operations should work with all variations await scoped_fs.read_file(path); - expect(fs.readFile).toHaveBeenCalledWith(path, 'utf8'); + assert.deepEqual(vi.mocked(fs.readFile).mock.calls[0], [path, 'utf8']); // Write operations should also work vi.mocked(fs.writeFile).mockClear(); await scoped_fs.write_file(path, 'content'); - expect(fs.writeFile).toHaveBeenCalledWith(path, 'content', 'utf8'); + assert.deepEqual(vi.mocked(fs.writeFile).mock.calls[0], [path, 'content', 'utf8']); } }); @@ -776,13 +766,14 @@ describe('ScopedFs - directory path trailing slash handling', () => { // Test valid paths for (const path of valid_paths) { - expect(scoped_fs.is_path_allowed(path)).toBe(true); + assert.ok(scoped_fs.is_path_allowed(path)); } // Test invalid paths for (const path of invalid_paths) { - expect(scoped_fs.is_path_allowed(path)).toBe(false); - await expect(scoped_fs.read_file(path)).rejects.toThrow('Path is not allowed'); + assert.ok(!scoped_fs.is_path_allowed(path)); + const error = await assert_rejects(() => scoped_fs.read_file(path)); + assert.include(error.message, 'Path is not allowed'); } }); @@ -794,14 +785,14 @@ describe('ScopedFs - directory path trailing slash handling', () => { const test_paths = ['/', '/etc', '/usr/bin', '/home/user/file.txt']; for (const path of test_paths) { - expect(scoped_fs.is_path_allowed(path)).toBe(true); + assert.ok(scoped_fs.is_path_allowed(path)); // Mock successful read vi.mocked(fs.readFile).mockReset(); vi.mocked(fs.readFile).mockResolvedValueOnce('content' as any); const content = await scoped_fs.read_file(path); - expect(content).toBe('content'); + assert.strictEqual(content, 'content'); } }); }); diff --git a/src/test/server/scoped_fs_basic.test.ts b/src/test/server/scoped_fs_basic.test.ts index c3127abbd..c3d0e7c1a 100644 --- a/src/test/server/scoped_fs_basic.test.ts +++ b/src/test/server/scoped_fs_basic.test.ts @@ -1,14 +1,11 @@ -// @slop Claude Sonnet 3.7 - -import {test, expect, vi, beforeEach, afterEach, describe} from 'vitest'; +import {test, vi, beforeEach, describe, assert} from 'vitest'; +import {assert_rejects} from '@fuzdev/fuz_util/testing.js'; import * as fs from 'node:fs/promises'; -import * as fs_sync from 'node:fs'; - -import {ScopedFs, SymlinkNotAllowedError} from '$lib/server/scoped_fs.js'; +import type {Stats, BigIntStats} from 'node:fs'; -/* eslint-disable no-await-in-loop, @typescript-eslint/no-empty-function */ +import {ScopedFs, PathNotAllowedError, SymlinkNotAllowedError} from '$lib/server/scoped_fs.js'; -// Mock fs/promises and fs modules +// Mock fs/promises vi.mock('node:fs/promises', () => ({ readFile: vi.fn(), writeFile: vi.fn(), @@ -21,10 +18,6 @@ vi.mock('node:fs/promises', () => ({ access: vi.fn(), })); -vi.mock('node:fs', () => ({ - existsSync: vi.fn(), -})); - // Test constants const TEST_ALLOWED_PATHS = ['/allowed/path', '/allowed/other/path/', '/another/allowed/directory']; const FILE_PATHS = { @@ -41,15 +34,8 @@ const DIR_PATHS = { const create_test_instance = () => new ScopedFs(TEST_ALLOWED_PATHS); -// Setup/cleanup for each test -let console_spy: any; - beforeEach(() => { vi.clearAllMocks(); - console_spy = vi.spyOn(console, 'error').mockImplementation(() => {}); - - // Default mock implementations - vi.mocked(fs_sync.existsSync).mockReturnValue(true); // Default lstat mock returning a non-symlink file vi.mocked(fs.lstat).mockImplementation(() => @@ -61,14 +47,10 @@ beforeEach(() => { ); }); -afterEach(() => { - console_spy.mockRestore(); -}); - describe('ScopedFs - construction and initialization', () => { test('constructor - should accept an array of allowed paths', () => { const scoped_fs = create_test_instance(); - expect(scoped_fs).toBeInstanceOf(ScopedFs); + assert.instanceOf(scoped_fs, ScopedFs); }); test('constructor - should make a defensive copy of allowed paths', () => { @@ -79,96 +61,95 @@ describe('ScopedFs - construction and initialization', () => { original_paths.push('/new/path'); // The instance should still only allow the original paths - expect(scoped_fs.is_path_allowed('/new/path')).toBe(false); + assert.ok(!scoped_fs.is_path_allowed('/new/path')); }); test('constructor - should throw for invalid paths', () => { // Non-absolute path - expect(() => new ScopedFs(['relative/path'])).toThrow(); + assert.throws(() => new ScopedFs(['relative/path'])); // Empty path array should work but won't allow any paths const empty_scoped_fs = new ScopedFs([]); - expect(empty_scoped_fs.is_path_allowed('/any/path')).toBe(false); + assert.ok(!empty_scoped_fs.is_path_allowed('/any/path')); }); -}); - -describe('ScopedFs - path validation', () => { - test('is_path_allowed - should return true for paths within allowed directories', () => { - const scoped_fs = create_test_instance(); - - const valid_paths = [ - ...TEST_ALLOWED_PATHS, - FILE_PATHS.ALLOWED, - FILE_PATHS.NESTED, - '/allowed/path/subdir/', - ]; - for (const path of valid_paths) { - expect(scoped_fs.is_path_allowed(path)).toBe(true); - } + test('constructor - should deduplicate paths', () => { + const scoped_fs = new ScopedFs(['/path/a', '/path/a', '/path/b', '/path/b']); + assert.strictEqual(scoped_fs.allowed_paths.length, 2); }); - test('is_path_allowed - should return false for paths outside allowed directories', () => { - const scoped_fs = create_test_instance(); - - const invalid_paths = [ - FILE_PATHS.OUTSIDE, - DIR_PATHS.OUTSIDE, - '/allowed', // parent of allowed path - '/allowed-other', // similar prefix - ]; - - for (const path of invalid_paths) { - expect(scoped_fs.is_path_allowed(path)).toBe(false); - } + test('constructor - should deduplicate paths that normalize to the same value', () => { + const scoped_fs = new ScopedFs(['/path/a', '/path/a/']); + assert.strictEqual(scoped_fs.allowed_paths.length, 1); }); - test('is_path_allowed - should reject relative paths', () => { + test('exists - should use the normalized path for fs.access', async () => { const scoped_fs = create_test_instance(); - const relative_paths = ['relative/path', './relative/path', '../relative/path']; + vi.mocked(fs.access).mockResolvedValueOnce(); - for (const path of relative_paths) { - expect(scoped_fs.is_path_allowed(path)).toBe(false); - } - }); + await scoped_fs.exists('/allowed/path/./subdir/../file.txt'); - test('is_path_allowed - should detect path traversal attempts', () => { - const scoped_fs = create_test_instance(); + // fs.access should receive the normalized path, not the original + assert.strictEqual(vi.mocked(fs.access).mock.calls.length, 1); + assert.strictEqual(vi.mocked(fs.access).mock.calls[0]![0], '/allowed/path/file.txt'); + }); +}); - const traversal_paths = [FILE_PATHS.TRAVERSAL, '/allowed/path/../not-allowed']; +const path_allowed_cases: Array<[label: string, path: string, expected: boolean]> = [ + // Within allowed directories + ['allowed root /allowed/path', '/allowed/path', true], + ['allowed root /allowed/other/path/', '/allowed/other/path/', true], + ['allowed root /another/allowed/directory', '/another/allowed/directory', true], + ['nested file in allowed path', FILE_PATHS.ALLOWED, true], + ['deep nested file in allowed path', FILE_PATHS.NESTED, true], + ['subdirectory in allowed path', '/allowed/path/subdir/', true], + // Outside allowed directories + ['file outside allowed paths', FILE_PATHS.OUTSIDE, false], + ['directory outside allowed paths', DIR_PATHS.OUTSIDE, false], + ['parent of allowed path', '/allowed', false], + ['similar prefix but not allowed', '/allowed-other', false], + // Relative paths + ['relative path (no prefix)', 'relative/path', false], + ['dot-relative path', './relative/path', false], + ['parent-relative path', '../relative/path', false], + // Path traversal + ['traversal via ../', FILE_PATHS.TRAVERSAL, false], + ['traversal to non-allowed sibling', '/allowed/path/../not-allowed', false], +]; - for (const path of traversal_paths) { - expect(scoped_fs.is_path_allowed(path)).toBe(false); - } +describe('ScopedFs - path validation', () => { + test.each(path_allowed_cases)('is_path_allowed - %s', (_label, path, expected) => { + const scoped_fs = create_test_instance(); + assert.strictEqual(scoped_fs.is_path_allowed(path), expected); }); test('is_path_allowed - should handle special cases correctly', () => { const scoped_fs = create_test_instance(); // Empty path - expect(scoped_fs.is_path_allowed('')).toBe(false); + assert.ok(!scoped_fs.is_path_allowed('')); // Root directory (only allowed if explicitly included) - expect(scoped_fs.is_path_allowed('/')).toBe(false); + assert.ok(!scoped_fs.is_path_allowed('/')); // With root directory explicitly allowed const root_scoped_fs = new ScopedFs(['/']); - expect(root_scoped_fs.is_path_allowed('/')).toBe(true); - expect(root_scoped_fs.is_path_allowed('/any/path')).toBe(true); + assert.ok(root_scoped_fs.is_path_allowed('/')); + assert.ok(root_scoped_fs.is_path_allowed('/any/path')); }); test('is_path_safe - should verify path security including symlink checks', async () => { const scoped_fs = create_test_instance(); // Regular allowed path without symlinks - expect(await scoped_fs.is_path_safe(FILE_PATHS.ALLOWED)).toBe(true); + assert.ok(await scoped_fs.is_path_safe(FILE_PATHS.ALLOWED)); // Path outside allowed directories - expect(await scoped_fs.is_path_safe(FILE_PATHS.OUTSIDE)).toBe(false); + assert.ok(!(await scoped_fs.is_path_safe(FILE_PATHS.OUTSIDE))); // Path with traversal - expect(await scoped_fs.is_path_safe(FILE_PATHS.TRAVERSAL)).toBe(false); + assert.ok(!(await scoped_fs.is_path_safe(FILE_PATHS.TRAVERSAL))); // Mock a symlink to test rejection vi.mocked(fs.lstat).mockImplementationOnce(() => @@ -180,7 +161,42 @@ describe('ScopedFs - path validation', () => { ); // Symlinked file should fail the safety check - expect(await scoped_fs.is_path_safe('/allowed/path/symlink')).toBe(false); + assert.ok(!(await scoped_fs.is_path_safe('/allowed/path/symlink'))); + }); +}); + +describe('ScopedFs - operations use normalized paths', () => { + test('read_file passes normalized path to fs', async () => { + const scoped_fs = create_test_instance(); + vi.mocked(fs.readFile).mockResolvedValueOnce('content' as any); + + await scoped_fs.read_file('/allowed/path/./subdir/../file.txt'); + assert.strictEqual(vi.mocked(fs.readFile).mock.calls[0]![0], '/allowed/path/file.txt'); + }); + + test('write_file passes normalized path to fs', async () => { + const scoped_fs = create_test_instance(); + vi.mocked(fs.writeFile).mockResolvedValueOnce(); + + await scoped_fs.write_file('/allowed/path/./subdir/../file.txt', 'data'); + assert.strictEqual(vi.mocked(fs.writeFile).mock.calls[0]![0], '/allowed/path/file.txt'); + }); + + test('rm passes normalized path to fs', async () => { + const scoped_fs = create_test_instance(); + vi.mocked(fs.rm).mockResolvedValueOnce(); + + await scoped_fs.rm('/allowed/path/./subdir/../file.txt'); + assert.strictEqual(vi.mocked(fs.rm).mock.calls[0]![0], '/allowed/path/file.txt'); + }); + + test('copy_file passes normalized paths to fs for both source and destination', async () => { + const scoped_fs = create_test_instance(); + vi.mocked(fs.copyFile).mockResolvedValueOnce(); + + await scoped_fs.copy_file('/allowed/path/./a/../src.txt', '/allowed/other/path/./b/../dst.txt'); + assert.strictEqual(vi.mocked(fs.copyFile).mock.calls[0]![0], '/allowed/path/src.txt'); + assert.strictEqual(vi.mocked(fs.copyFile).mock.calls[0]![1], '/allowed/other/path/dst.txt'); }); }); @@ -192,8 +208,9 @@ describe('ScopedFs - file operations', () => { vi.mocked(fs.readFile).mockResolvedValueOnce(test_content as any); const content = await scoped_fs.read_file(FILE_PATHS.ALLOWED); - expect(content).toBe(test_content); - expect(fs.readFile).toHaveBeenCalledWith(FILE_PATHS.ALLOWED, 'utf8'); + assert.strictEqual(content, test_content); + assert.strictEqual(vi.mocked(fs.readFile).mock.calls.length, 1); + assert.deepEqual(vi.mocked(fs.readFile).mock.calls[0], [FILE_PATHS.ALLOWED, 'utf8']); }); test('read_file - should return Buffer when options specify buffer encoding', async () => { @@ -203,8 +220,8 @@ describe('ScopedFs - file operations', () => { vi.mocked(fs.readFile).mockResolvedValueOnce(test_buffer); const content = await scoped_fs.read_file(FILE_PATHS.ALLOWED, null); - expect(content).toBe(test_buffer); - expect(fs.readFile).toHaveBeenCalledWith(FILE_PATHS.ALLOWED, null); + assert.strictEqual(content, test_buffer); + assert.deepEqual(vi.mocked(fs.readFile).mock.calls[0], [FILE_PATHS.ALLOWED, null]); }); test('read_file - should pass through various encoding options', async () => { @@ -222,15 +239,16 @@ describe('ScopedFs - file operations', () => { vi.mocked(fs.readFile).mockResolvedValueOnce('content' as any); await scoped_fs.read_file(FILE_PATHS.ALLOWED, options as any); - expect(fs.readFile).toHaveBeenCalledWith(FILE_PATHS.ALLOWED, expected); + assert.deepEqual(vi.mocked(fs.readFile).mock.calls[0], [FILE_PATHS.ALLOWED, expected] as any); } }); test('read_file - should throw for paths outside allowed directories', async () => { const scoped_fs = create_test_instance(); - await expect(scoped_fs.read_file(FILE_PATHS.OUTSIDE)).rejects.toThrow('Path is not allowed'); - expect(fs.readFile).not.toHaveBeenCalled(); + const error = await assert_rejects(() => scoped_fs.read_file(FILE_PATHS.OUTSIDE)); + assert.instanceOf(error, PathNotAllowedError); + assert.strictEqual(vi.mocked(fs.readFile).mock.calls.length, 0); }); test('write_file - should write to files in allowed paths', async () => { @@ -240,16 +258,19 @@ describe('ScopedFs - file operations', () => { vi.mocked(fs.writeFile).mockResolvedValueOnce(); await scoped_fs.write_file(FILE_PATHS.ALLOWED, test_content); - expect(fs.writeFile).toHaveBeenCalledWith(FILE_PATHS.ALLOWED, test_content, 'utf8'); + assert.deepEqual(vi.mocked(fs.writeFile).mock.calls[0], [ + FILE_PATHS.ALLOWED, + test_content, + 'utf8', + ]); }); test('write_file - should throw for paths outside allowed directories', async () => { const scoped_fs = create_test_instance(); - await expect(scoped_fs.write_file(FILE_PATHS.OUTSIDE, 'content')).rejects.toThrow( - 'Path is not allowed', - ); - expect(fs.writeFile).not.toHaveBeenCalled(); + const error = await assert_rejects(() => scoped_fs.write_file(FILE_PATHS.OUTSIDE, 'content')); + assert.instanceOf(error, PathNotAllowedError); + assert.strictEqual(vi.mocked(fs.writeFile).mock.calls.length, 0); }); }); @@ -260,14 +281,15 @@ describe('ScopedFs - directory operations', () => { vi.mocked(fs.mkdir).mockResolvedValueOnce(undefined); await scoped_fs.mkdir(DIR_PATHS.NEW_DIR, {recursive: true}); - expect(fs.mkdir).toHaveBeenCalledWith(DIR_PATHS.NEW_DIR, {recursive: true}); + assert.deepEqual(vi.mocked(fs.mkdir).mock.calls[0], [DIR_PATHS.NEW_DIR, {recursive: true}]); }); test('mkdir - should throw for paths outside allowed directories', async () => { const scoped_fs = create_test_instance(); - await expect(scoped_fs.mkdir(DIR_PATHS.OUTSIDE)).rejects.toThrow('Path is not allowed'); - expect(fs.mkdir).not.toHaveBeenCalled(); + const error = await assert_rejects(() => scoped_fs.mkdir(DIR_PATHS.OUTSIDE)); + assert.instanceOf(error, PathNotAllowedError); + assert.strictEqual(vi.mocked(fs.mkdir).mock.calls.length, 0); }); test('readdir - should list directory contents in allowed paths', async () => { @@ -277,15 +299,16 @@ describe('ScopedFs - directory operations', () => { vi.mocked(fs.readdir).mockResolvedValueOnce(dir_contents as any); const contents = await scoped_fs.readdir(DIR_PATHS.ALLOWED, null); - expect(contents).toEqual(dir_contents); - expect(fs.readdir).toHaveBeenCalledWith(DIR_PATHS.ALLOWED, null); + assert.deepEqual(contents, dir_contents); + assert.deepEqual(vi.mocked(fs.readdir).mock.calls[0], [DIR_PATHS.ALLOWED, null] as any); }); test('readdir - should throw for paths outside allowed directories', async () => { const scoped_fs = create_test_instance(); - await expect(scoped_fs.readdir(DIR_PATHS.OUTSIDE)).rejects.toThrow('Path is not allowed'); - expect(fs.readdir).not.toHaveBeenCalled(); + const error = await assert_rejects(() => scoped_fs.readdir(DIR_PATHS.OUTSIDE)); + assert.instanceOf(error, PathNotAllowedError); + assert.strictEqual(vi.mocked(fs.readdir).mock.calls.length, 0); }); test('rm - should remove files or directories in allowed paths', async () => { @@ -294,14 +317,15 @@ describe('ScopedFs - directory operations', () => { vi.mocked(fs.rm).mockResolvedValueOnce(); await scoped_fs.rm(DIR_PATHS.ALLOWED, {recursive: true}); - expect(fs.rm).toHaveBeenCalledWith(DIR_PATHS.ALLOWED, {recursive: true}); + assert.deepEqual(vi.mocked(fs.rm).mock.calls[0], [DIR_PATHS.ALLOWED, {recursive: true}]); }); test('rm - should throw for paths outside allowed directories', async () => { const scoped_fs = create_test_instance(); - await expect(scoped_fs.rm(DIR_PATHS.OUTSIDE)).rejects.toThrow('Path is not allowed'); - expect(fs.rm).not.toHaveBeenCalled(); + const error = await assert_rejects(() => scoped_fs.rm(DIR_PATHS.OUTSIDE)); + assert.instanceOf(error, PathNotAllowedError); + assert.strictEqual(vi.mocked(fs.rm).mock.calls.length, 0); }); }); @@ -311,20 +335,21 @@ describe('ScopedFs - stat operations', () => { const mock_stats = { isFile: () => true, isDirectory: () => false, - } as fs_sync.Stats; + } as Stats; vi.mocked(fs.stat).mockResolvedValueOnce(mock_stats); const stats = await scoped_fs.stat(FILE_PATHS.ALLOWED); - expect(stats).toBe(mock_stats); - expect(fs.stat).toHaveBeenCalledWith(FILE_PATHS.ALLOWED, undefined); + assert.strictEqual(stats, mock_stats); + assert.deepEqual(vi.mocked(fs.stat).mock.calls[0], [FILE_PATHS.ALLOWED, undefined]); }); test('stat - should throw for paths outside allowed directories', async () => { const scoped_fs = create_test_instance(); - await expect(scoped_fs.stat(FILE_PATHS.OUTSIDE)).rejects.toThrow('Path is not allowed'); - expect(fs.stat).not.toHaveBeenCalled(); + const error = await assert_rejects(() => scoped_fs.stat(FILE_PATHS.OUTSIDE)); + assert.instanceOf(error, PathNotAllowedError); + assert.strictEqual(vi.mocked(fs.stat).mock.calls.length, 0); }); test('stat - should handle bigint option correctly', async () => { @@ -341,7 +366,7 @@ describe('ScopedFs - stat operations', () => { vi.mocked(fs.stat).mockResolvedValueOnce({} as any); await scoped_fs.stat(FILE_PATHS.ALLOWED, options as any); - expect(fs.stat).toHaveBeenCalledWith(FILE_PATHS.ALLOWED, expected_options); + assert.deepEqual(vi.mocked(fs.stat).mock.calls[0], [FILE_PATHS.ALLOWED, expected_options]); } }); @@ -353,13 +378,13 @@ describe('ScopedFs - stat operations', () => { mtimeMs: BigInt(Date.now()), isFile: () => true, isDirectory: () => false, - } as unknown as fs_sync.BigIntStats; + } as unknown as BigIntStats; vi.mocked(fs.stat).mockResolvedValueOnce(bigint_stats); const result = await scoped_fs.stat(FILE_PATHS.ALLOWED, {bigint: true}); - expect(result).toBe(bigint_stats); - expect(typeof (result as unknown as fs_sync.BigIntStats).size).toBe('bigint'); + assert.strictEqual(result, bigint_stats as any); + assert.strictEqual(typeof (result as unknown as BigIntStats).size, 'bigint'); }); }); @@ -378,7 +403,7 @@ describe('ScopedFs - existence checking', () => { for (const {mock_fn, expected} of existence_tests) { mock_fn(); const exists = await scoped_fs.exists(FILE_PATHS.ALLOWED); - expect(exists).toBe(expected); + assert.strictEqual(exists, expected); } }); @@ -386,8 +411,8 @@ describe('ScopedFs - existence checking', () => { const scoped_fs = create_test_instance(); const exists = await scoped_fs.exists(FILE_PATHS.OUTSIDE); - expect(exists).toBe(false); - expect(fs.access).not.toHaveBeenCalled(); + assert.ok(!exists); + assert.strictEqual(vi.mocked(fs.access).mock.calls.length, 0); }); }); @@ -400,7 +425,7 @@ describe('ScopedFs - copy operations', () => { vi.mocked(fs.copyFile).mockResolvedValueOnce(); await scoped_fs.copy_file(source, destination); - expect(fs.copyFile).toHaveBeenCalledWith(source, destination, undefined); + assert.deepEqual(vi.mocked(fs.copyFile).mock.calls[0], [source, destination, undefined]); }); test('copy_file - should pass through mode parameter', async () => { @@ -413,14 +438,14 @@ describe('ScopedFs - copy operations', () => { vi.mocked(fs.copyFile).mockResolvedValueOnce(); await scoped_fs.copy_file(source, destination, COPYFILE_EXCL); - expect(fs.copyFile).toHaveBeenCalledWith(source, destination, COPYFILE_EXCL); + assert.deepEqual(vi.mocked(fs.copyFile).mock.calls[0], [source, destination, COPYFILE_EXCL]); // Test with COPYFILE_FICLONE mode const COPYFILE_FICLONE = 2; vi.mocked(fs.copyFile).mockResolvedValueOnce(); await scoped_fs.copy_file(source, destination, COPYFILE_FICLONE); - expect(fs.copyFile).toHaveBeenCalledWith(source, destination, COPYFILE_FICLONE); + assert.deepEqual(vi.mocked(fs.copyFile).mock.calls[1], [source, destination, COPYFILE_FICLONE]); }); test('copy_file - should throw if either source or destination is outside allowed paths', async () => { @@ -431,10 +456,11 @@ describe('ScopedFs - copy operations', () => { ]; for (const {source, destination} of invalid_copy_operations) { - await expect(scoped_fs.copy_file(source, destination)).rejects.toThrow('Path is not allowed'); + const error = await assert_rejects(() => scoped_fs.copy_file(source, destination)); + assert.instanceOf(error, PathNotAllowedError); } - expect(fs.copyFile).not.toHaveBeenCalled(); + assert.strictEqual(vi.mocked(fs.copyFile).mock.calls.length, 0); }); }); @@ -451,11 +477,10 @@ describe('ScopedFs - symlink detection', () => { } as any), ); - await expect(scoped_fs.read_file('/allowed/path/symlink.txt')).rejects.toThrow( - SymlinkNotAllowedError, - ); + const error = await assert_rejects(() => scoped_fs.read_file('/allowed/path/symlink.txt')); + assert.instanceOf(error, SymlinkNotAllowedError); - expect(fs.readFile).not.toHaveBeenCalled(); + assert.strictEqual(vi.mocked(fs.readFile).mock.calls.length, 0); }); test('should reject operations when parent directory is a symlink', async () => { @@ -479,10 +504,11 @@ describe('ScopedFs - symlink detection', () => { } as any), ); - await expect(scoped_fs.read_file('/allowed/path/symlink-dir/file.txt')).rejects.toThrow( - SymlinkNotAllowedError, + const error = await assert_rejects(() => + scoped_fs.read_file('/allowed/path/symlink-dir/file.txt'), ); + assert.instanceOf(error, SymlinkNotAllowedError); - expect(fs.readFile).not.toHaveBeenCalled(); + assert.strictEqual(vi.mocked(fs.readFile).mock.calls.length, 0); }); }); diff --git a/src/test/server/scoped_fs_dynamic.test.ts b/src/test/server/scoped_fs_dynamic.test.ts new file mode 100644 index 000000000..9f3733c63 --- /dev/null +++ b/src/test/server/scoped_fs_dynamic.test.ts @@ -0,0 +1,275 @@ +import {test, vi, beforeEach, describe, assert} from 'vitest'; +import {assert_rejects} from '@fuzdev/fuz_util/testing.js'; +import * as fs from 'node:fs/promises'; + +import {ScopedFs} from '$lib/server/scoped_fs.js'; + +// Mock fs/promises +vi.mock('node:fs/promises', () => ({ + readFile: vi.fn(), + writeFile: vi.fn(), + rm: vi.fn(), + mkdir: vi.fn(), + readdir: vi.fn(), + stat: vi.fn(), + lstat: vi.fn(), + copyFile: vi.fn(), + access: vi.fn(), +})); + +beforeEach(() => { + vi.clearAllMocks(); + + // Default lstat mock returning a non-symlink file + vi.mocked(fs.lstat).mockImplementation(() => + Promise.resolve({ + isSymbolicLink: () => false, + isDirectory: () => false, + isFile: () => true, + } as any), + ); +}); + +describe('ScopedFs - add_path', () => { + test('adds a new path and allows access to files within it', () => { + const scoped_fs = new ScopedFs(['/initial/path']); + + assert.ok(!scoped_fs.is_path_allowed('/new/path/file.txt')); + + const added = scoped_fs.add_path('/new/path'); + assert.ok(added); + assert.ok(scoped_fs.is_path_allowed('/new/path/file.txt')); + }); + + test('returns false when adding a path that already exists', () => { + const scoped_fs = new ScopedFs(['/existing/path']); + + const added = scoped_fs.add_path('/existing/path'); + assert.ok(!added); + }); + + test('normalizes paths with trailing slashes', () => { + const scoped_fs = new ScopedFs([]); + + scoped_fs.add_path('/new/path'); + // Adding with trailing slash should be a no-op since it normalizes + const added_again = scoped_fs.add_path('/new/path/'); + assert.ok(!added_again); + }); + + test('throws for relative paths', () => { + const scoped_fs = new ScopedFs([]); + + assert.throws(() => scoped_fs.add_path('relative/path')); + }); + + test('allows file operations after adding path', async () => { + const scoped_fs = new ScopedFs([]); + + // Before adding, file ops should fail + const error = await assert_rejects(() => scoped_fs.read_file('/new/path/file.txt')); + assert.include(error.message, 'Path is not allowed'); + + // After adding, file ops should succeed + scoped_fs.add_path('/new/path'); + vi.mocked(fs.readFile).mockResolvedValueOnce('content' as any); + const content = await scoped_fs.read_file('/new/path/file.txt'); + assert.strictEqual(content, 'content'); + }); + + test('multiple paths can be added incrementally', () => { + const scoped_fs = new ScopedFs([]); + + scoped_fs.add_path('/path/a'); + scoped_fs.add_path('/path/b'); + scoped_fs.add_path('/path/c'); + + assert.ok(scoped_fs.is_path_allowed('/path/a/file.txt')); + assert.ok(scoped_fs.is_path_allowed('/path/b/file.txt')); + assert.ok(scoped_fs.is_path_allowed('/path/c/file.txt')); + assert.ok(!scoped_fs.is_path_allowed('/path/d/file.txt')); + }); +}); + +describe('ScopedFs - remove_path', () => { + test('removes a path and denies access to files within it', () => { + const scoped_fs = new ScopedFs(['/path/a', '/path/b']); + + assert.ok(scoped_fs.is_path_allowed('/path/a/file.txt')); + + const removed = scoped_fs.remove_path('/path/a'); + assert.ok(removed); + assert.ok(!scoped_fs.is_path_allowed('/path/a/file.txt')); + // Other paths unaffected + assert.ok(scoped_fs.is_path_allowed('/path/b/file.txt')); + }); + + test('returns false when removing a path that does not exist', () => { + const scoped_fs = new ScopedFs(['/existing/path']); + + const removed = scoped_fs.remove_path('/nonexistent/path'); + assert.ok(!removed); + }); + + test('normalizes path before removing', () => { + const scoped_fs = new ScopedFs(['/some/path/']); + + // Remove without trailing slash should still match + const removed = scoped_fs.remove_path('/some/path'); + assert.ok(removed); + assert.ok(!scoped_fs.is_path_allowed('/some/path/file.txt')); + }); + + test('denies file operations after removing path', async () => { + const scoped_fs = new ScopedFs(['/removable/path']); + + // Before removing, file ops should succeed + vi.mocked(fs.readFile).mockResolvedValueOnce('content' as any); + await scoped_fs.read_file('/removable/path/file.txt'); + + // After removing, file ops should fail + scoped_fs.remove_path('/removable/path'); + const error = await assert_rejects(() => scoped_fs.read_file('/removable/path/file.txt')); + assert.include(error.message, 'Path is not allowed'); + }); + + test('throws for relative paths', () => { + const scoped_fs = new ScopedFs([]); + + assert.throws(() => scoped_fs.remove_path('relative/path')); + }); +}); + +describe('ScopedFs - has_path', () => { + test('returns true for paths in the allowed set', () => { + const scoped_fs = new ScopedFs(['/path/a', '/path/b']); + + assert.ok(scoped_fs.has_path('/path/a')); + assert.ok(scoped_fs.has_path('/path/b')); + }); + + test('returns false for paths not in the allowed set', () => { + const scoped_fs = new ScopedFs(['/path/a']); + + assert.ok(!scoped_fs.has_path('/path/b')); + }); + + test('normalizes paths for comparison', () => { + const scoped_fs = new ScopedFs(['/path/a/']); + + assert.ok(scoped_fs.has_path('/path/a')); + assert.ok(scoped_fs.has_path('/path/a/')); + }); + + test('returns false for relative paths', () => { + const scoped_fs = new ScopedFs(['/path/a']); + + assert.ok(!scoped_fs.has_path('relative/path')); + }); + + test('returns false for child paths (not exact root match)', () => { + const scoped_fs = new ScopedFs(['/path/a']); + + // has_path checks for exact root, not "is allowed" + assert.ok(!scoped_fs.has_path('/path/a/child')); + }); +}); + +describe('ScopedFs - add_path security', () => { + test('path traversal is blocked on dynamically added paths', () => { + const scoped_fs = new ScopedFs([]); + scoped_fs.add_path('/allowed/dir'); + + // traversal out of the allowed dir + assert.ok(!scoped_fs.is_path_allowed('/allowed/dir/../../etc/passwd')); + // normalized form lands outside + assert.ok(!scoped_fs.is_path_allowed('/allowed/dir/../secret/file')); + }); + + test('symlinks are rejected on dynamically added paths', async () => { + const scoped_fs = new ScopedFs([]); + scoped_fs.add_path('/dynamic/path'); + + vi.mocked(fs.lstat).mockImplementationOnce(() => + Promise.resolve({ + isSymbolicLink: () => true, + isDirectory: () => false, + isFile: () => false, + } as any), + ); + + assert.ok(!(await scoped_fs.is_path_safe('/dynamic/path/symlink'))); + }); + + test('prefix-similar paths are independent', () => { + const scoped_fs = new ScopedFs([]); + scoped_fs.add_path('/project'); + + // /project-other should NOT be allowed — it's a different directory + assert.ok(!scoped_fs.is_path_allowed('/project-other/file.txt')); + // /project/file.txt should be allowed + assert.ok(scoped_fs.is_path_allowed('/project/file.txt')); + }); +}); + +describe('ScopedFs - remove_path edge cases', () => { + test('removing all paths leaves nothing accessible', () => { + const scoped_fs = new ScopedFs(['/path/a', '/path/b']); + + scoped_fs.remove_path('/path/a'); + scoped_fs.remove_path('/path/b'); + + assert.strictEqual(scoped_fs.allowed_paths.length, 0); + assert.ok(!scoped_fs.is_path_allowed('/path/a/file.txt')); + assert.ok(!scoped_fs.is_path_allowed('/path/b/file.txt')); + assert.ok(!scoped_fs.is_path_allowed('/any/path')); + }); + + test('removing a path does not affect children of other paths', () => { + const scoped_fs = new ScopedFs(['/workspace/a', '/workspace/b']); + + scoped_fs.remove_path('/workspace/a'); + + // /workspace/b and its children should still work + assert.ok(scoped_fs.is_path_allowed('/workspace/b/deeply/nested/file.txt')); + }); + + test('re-adding a previously removed path works', () => { + const scoped_fs = new ScopedFs(['/ephemeral']); + + scoped_fs.remove_path('/ephemeral'); + assert.ok(!scoped_fs.is_path_allowed('/ephemeral/file.txt')); + + const added = scoped_fs.add_path('/ephemeral'); + assert.ok(added); + assert.ok(scoped_fs.is_path_allowed('/ephemeral/file.txt')); + }); +}); + +describe('ScopedFs - add_path and remove_path round-trip', () => { + test('add then remove returns to original state', () => { + const scoped_fs = new ScopedFs(['/original/path']); + + scoped_fs.add_path('/temporary/path'); + assert.ok(scoped_fs.is_path_allowed('/temporary/path/file.txt')); + + scoped_fs.remove_path('/temporary/path'); + assert.ok(!scoped_fs.is_path_allowed('/temporary/path/file.txt')); + + // Original still works + assert.ok(scoped_fs.is_path_allowed('/original/path/file.txt')); + }); + + test('allowed_paths getter reflects changes', () => { + const scoped_fs = new ScopedFs(['/initial']); + + assert.strictEqual(scoped_fs.allowed_paths.length, 1); + + scoped_fs.add_path('/added'); + assert.strictEqual(scoped_fs.allowed_paths.length, 2); + + scoped_fs.remove_path('/initial'); + assert.strictEqual(scoped_fs.allowed_paths.length, 1); + assert.ok(scoped_fs.allowed_paths[0]!.startsWith('/added')); + }); +}); diff --git a/src/test/server/scoped_fs_security.test.ts b/src/test/server/scoped_fs_security.test.ts index c8a46d15b..50396f86e 100644 --- a/src/test/server/scoped_fs_security.test.ts +++ b/src/test/server/scoped_fs_security.test.ts @@ -1,14 +1,10 @@ -// @slop Claude Sonnet 3.7 - -import {test, expect, vi, beforeEach, afterEach, describe} from 'vitest'; +import {test, vi, beforeEach, describe, assert} from 'vitest'; +import {assert_rejects} from '@fuzdev/fuz_util/testing.js'; import * as fs from 'node:fs/promises'; -import * as fs_sync from 'node:fs'; import {ScopedFs, PathNotAllowedError, SymlinkNotAllowedError} from '$lib/server/scoped_fs.js'; -/* eslint-disable no-await-in-loop, @typescript-eslint/no-empty-function, @typescript-eslint/require-await */ - -// Mock fs/promises and fs modules +// Mock fs/promises vi.mock('node:fs/promises', () => ({ readFile: vi.fn(), writeFile: vi.fn(), @@ -21,10 +17,6 @@ vi.mock('node:fs/promises', () => ({ access: vi.fn(), })); -vi.mock('node:fs', () => ({ - existsSync: vi.fn(), -})); - // Test constants const TEST_ALLOWED_PATHS = ['/allowed/path', '/allowed/other/path/', '/another/allowed/directory']; const FILE_PATHS = { @@ -35,8 +27,6 @@ const FILE_PATHS = { TRAVERSAL_SIMPLE: '/allowed/path/../../../etc/passwd', TRAVERSAL_COMPLEX: '/allowed/path/subdir/.././../../etc/passwd', TRAVERSAL_MIXED: '/allowed/path/./foo/../../etc/passwd', - TRAVERSAL_WINDOWS: '/allowed/path\\..\\..\\Windows\\System32\\config\\sam', - UNICODE_TRAVERSAL: '/allowed/path/NN/../../etc/passwd', // Unicode lookalikes }; const DIR_PATHS = { ALLOWED: '/allowed/path/dir', @@ -48,15 +38,8 @@ const DIR_PATHS = { const create_test_instance = () => new ScopedFs(TEST_ALLOWED_PATHS); -// Setup/cleanup for each test -let console_spy: any; - beforeEach(() => { vi.clearAllMocks(); - console_spy = vi.spyOn(console, 'error').mockImplementation(() => {}); - - // Default mock implementations - vi.mocked(fs_sync.existsSync).mockReturnValue(true); // Default lstat mock returning a non-symlink file vi.mocked(fs.lstat).mockImplementation(() => @@ -68,10 +51,6 @@ beforeEach(() => { ); }); -afterEach(() => { - console_spy.mockRestore(); -}); - describe('ScopedFs - symlink security', () => { test('should reject symlinks in target path', async () => { const scoped_fs = create_test_instance(); @@ -105,7 +84,8 @@ describe('ScopedFs - symlink security', () => { } as any), ); - await expect(operation()).rejects.toThrow(SymlinkNotAllowedError); + const e = await assert_rejects(() => operation()); + assert.instanceOf(e, SymlinkNotAllowedError); } // Test exists() separately @@ -119,18 +99,12 @@ describe('ScopedFs - symlink security', () => { ); const exists = await scoped_fs.exists(FILE_PATHS.SYMLINK); - expect(exists).toBe(false); + assert.ok(!exists); }); test('should reject symlinks in parent directories', async () => { const scoped_fs = create_test_instance(); - // First make sure we have existsSync return true for relevant paths - vi.mocked(fs_sync.existsSync).mockImplementation((path) => { - // Return true for our test directory path and all parent directories - return String(path).includes('symlink-dir') || String(path).includes('/allowed/path'); - }); - // Setup mocks to simulate a parent directory that is a symlink vi.mocked(fs.lstat).mockImplementation(async (path) => { // The file itself is not a symlink @@ -160,14 +134,12 @@ describe('ScopedFs - symlink security', () => { }); // Should throw for any operation on a file in a symlinked parent directory - await expect(scoped_fs.read_file(FILE_PATHS.PARENT_SYMLINK)).rejects.toThrow( - SymlinkNotAllowedError, - ); + const error = await assert_rejects(() => scoped_fs.read_file(FILE_PATHS.PARENT_SYMLINK)); + assert.instanceOf(error, SymlinkNotAllowedError); // Should also throw for mkdir in a symlinked directory - await expect(scoped_fs.mkdir('/allowed/path/symlink-dir/subdir')).rejects.toThrow( - SymlinkNotAllowedError, - ); + const error2 = await assert_rejects(() => scoped_fs.mkdir('/allowed/path/symlink-dir/subdir')); + assert.instanceOf(error2, SymlinkNotAllowedError); }); test('should reject symlinks in grandparent directories', async () => { @@ -204,9 +176,10 @@ describe('ScopedFs - symlink security', () => { }); // Should detect the symlink even when it's not the immediate parent - await expect( + const error = await assert_rejects(() => scoped_fs.read_file(`${DIR_PATHS.GRANDPARENT_SYMLINK_DIR}/file.txt`), - ).rejects.toThrow(SymlinkNotAllowedError); + ); + assert.instanceOf(error, SymlinkNotAllowedError); }); test('should detect symlinks consistently across all operations', async () => { @@ -244,7 +217,8 @@ describe('ScopedFs - symlink security', () => { // All operations should detect the symlink for (const operation of operations) { - await expect(operation()).rejects.toThrow(SymlinkNotAllowedError); + const e = await assert_rejects(() => operation()); + assert.instanceOf(e, SymlinkNotAllowedError); } }); @@ -262,10 +236,10 @@ describe('ScopedFs - symlink security', () => { // Should return false rather than throwing for exists() const result = await scoped_fs.exists(FILE_PATHS.SYMLINK); - expect(result).toBe(false); + assert.ok(!result); // access should not be called since the symlink is detected first - expect(fs.access).not.toHaveBeenCalled(); + assert.strictEqual(vi.mocked(fs.access).mock.calls.length, 0); }); test('is_path_safe should return false for symlinks', async () => { @@ -298,9 +272,34 @@ describe('ScopedFs - symlink security', () => { // Should safely return false without throwing const is_safe = await scoped_fs.is_path_safe(path); - expect(is_safe).toBe(false); + assert.ok(!is_safe); + } + }); +}); + +describe('ScopedFs - null byte rejection', () => { + test('should reject paths containing null bytes', () => { + const scoped_fs = create_test_instance(); + + const null_byte_paths = [ + '/allowed/path/\0file.txt', + '/allowed/path/file\0.txt', + '/allowed/path/\0../../etc/passwd', + ]; + + for (const path of null_byte_paths) { + assert.ok(!scoped_fs.is_path_allowed(path)); } }); + + test('should throw PathNotAllowedError for null byte paths in operations', async () => { + const scoped_fs = create_test_instance(); + + const error = await assert_rejects(() => scoped_fs.read_file('/allowed/path/\0file.txt')); + assert.instanceOf(error, PathNotAllowedError); + + assert.strictEqual(vi.mocked(fs.readFile).mock.calls.length, 0); + }); }); describe('ScopedFs - path traversal security', () => { @@ -318,16 +317,35 @@ describe('ScopedFs - path traversal security', () => { // Check both synchronous and asynchronous validation for (const path of traversal_paths) { // Synchronous check should fail - expect(scoped_fs.is_path_allowed(path)).toBe(false); + assert.ok(!scoped_fs.is_path_allowed(path)); // Async checks should also fail - expect(await scoped_fs.is_path_safe(path)).toBe(false); + assert.ok(!(await scoped_fs.is_path_safe(path))); // Operations should throw - await expect(scoped_fs.read_file(path)).rejects.toThrow(PathNotAllowedError); + const error = await assert_rejects(() => scoped_fs.read_file(path)); + assert.instanceOf(error, PathNotAllowedError); } }); + test('backslashes are literal on POSIX and do not enable traversal', () => { + const scoped_fs = create_test_instance(); + + // On POSIX, backslash is a valid filename character, not a separator. + // normalize leaves it as-is, so this is a literal path segment, not traversal. + const backslash_path = '/allowed/path\\..\\..\\Windows\\System32\\config\\sam'; + assert.ok(!scoped_fs.is_path_allowed(backslash_path)); + }); + + test('fullwidth Unicode lookalikes are literal characters, not traversal', () => { + const scoped_fs = create_test_instance(); + + // Fullwidth ..is NOT .. — normalize treats it as a regular directory name + const unicode_path = '/allowed/path/NN/../../etc/passwd'; + // This stays inside /allowed/path/ after normalization, so it IS allowed + assert.ok(scoped_fs.is_path_allowed(unicode_path)); + }); + test('should safely normalize legitimate paths', async () => { const scoped_fs = create_test_instance(); @@ -340,8 +358,8 @@ describe('ScopedFs - path traversal security', () => { ]; for (const path of legitimate_paths) { - expect(scoped_fs.is_path_allowed(path)).toBe(true); - expect(await scoped_fs.is_path_safe(path)).toBe(true); + assert.ok(scoped_fs.is_path_allowed(path)); + assert.ok(await scoped_fs.is_path_safe(path)); // Mock successful read vi.mocked(fs.readFile).mockReset(); @@ -349,7 +367,7 @@ describe('ScopedFs - path traversal security', () => { // Should allow operations on these paths const content = await scoped_fs.read_file(path); - expect(content).toBe('content'); + assert.strictEqual(content, 'content'); } }); }); @@ -376,16 +394,17 @@ describe('ScopedFs - access control security', () => { ]; for (const {path, allowed} of boundary_test_cases) { - expect(scoped_fs.is_path_allowed(path)).toBe(allowed); + assert.strictEqual(scoped_fs.is_path_allowed(path), allowed); // For valid paths, mock a successful read if (allowed) { vi.mocked(fs.readFile).mockReset(); vi.mocked(fs.readFile).mockResolvedValueOnce('content' as any); const content = await scoped_fs.read_file(path); - expect(content).toBe('content'); + assert.strictEqual(content, 'content'); } else { - await expect(scoped_fs.read_file(path)).rejects.toThrow(PathNotAllowedError); + const e = await assert_rejects(() => scoped_fs.read_file(path)); + assert.instanceOf(e, PathNotAllowedError); } } }); @@ -405,7 +424,7 @@ describe('ScopedFs - access control security', () => { ]; for (const path of root_test_paths) { - expect(root_scoped_fs.is_path_allowed(path)).toBe(true); + assert.ok(root_scoped_fs.is_path_allowed(path)); // Mock successful read vi.mocked(fs.readFile).mockReset(); @@ -413,12 +432,13 @@ describe('ScopedFs - access control security', () => { // Should allow operations const content = await root_scoped_fs.read_file(path); - expect(content).toBe('content'); + assert.strictEqual(content, 'content'); } // Non-absolute paths should still be rejected - expect(root_scoped_fs.is_path_allowed('relative/path')).toBe(false); - await expect(root_scoped_fs.read_file('relative/path')).rejects.toThrow(PathNotAllowedError); + assert.ok(!root_scoped_fs.is_path_allowed('relative/path')); + const error = await assert_rejects(() => root_scoped_fs.read_file('relative/path')); + assert.instanceOf(error, PathNotAllowedError); }); test('should properly isolate between allowed paths', async () => { @@ -445,13 +465,14 @@ describe('ScopedFs - access control security', () => { // Check allowed paths for (const path of allowed_paths) { - expect(complex_scoped_fs.is_path_allowed(path)).toBe(true); + assert.ok(complex_scoped_fs.is_path_allowed(path)); } // Check disallowed paths for (const path of disallowed_paths) { - expect(complex_scoped_fs.is_path_allowed(path)).toBe(false); - await expect(complex_scoped_fs.read_file(path)).rejects.toThrow(PathNotAllowedError); + assert.ok(!complex_scoped_fs.is_path_allowed(path)); + const error = await assert_rejects(() => complex_scoped_fs.read_file(path)); + assert.instanceOf(error, PathNotAllowedError); } }); @@ -459,14 +480,19 @@ describe('ScopedFs - access control security', () => { const scoped_fs = create_test_instance(); // Empty path should be rejected by all operations - await expect(scoped_fs.read_file('')).rejects.toThrow(PathNotAllowedError); - await expect(scoped_fs.write_file('', 'content')).rejects.toThrow(PathNotAllowedError); - await expect(scoped_fs.stat('')).rejects.toThrow(PathNotAllowedError); - await expect(scoped_fs.mkdir('')).rejects.toThrow(PathNotAllowedError); - await expect(scoped_fs.readdir('')).rejects.toThrow(PathNotAllowedError); + for (const operation of [ + () => scoped_fs.read_file(''), + () => scoped_fs.write_file('', 'content'), + () => scoped_fs.stat(''), + () => scoped_fs.mkdir(''), + () => scoped_fs.readdir(''), + ]) { + const e = await assert_rejects(() => operation()); + assert.instanceOf(e, PathNotAllowedError); + } // exists() should return false for empty path - expect(await scoped_fs.exists('')).toBe(false); + assert.ok(!(await scoped_fs.exists(''))); }); test('copy_file should validate both source and destination paths', async () => { @@ -477,29 +503,34 @@ describe('ScopedFs - access control security', () => { await scoped_fs.copy_file('/allowed/path/source.txt', '/allowed/other/path/dest.txt'); // Invalid source - await expect( + const error_src = await assert_rejects(() => scoped_fs.copy_file('/not/allowed/source.txt', '/allowed/path/dest.txt'), - ).rejects.toThrow(PathNotAllowedError); + ); + assert.instanceOf(error_src, PathNotAllowedError); // Invalid destination - await expect( + const error_dest = await assert_rejects(() => scoped_fs.copy_file('/allowed/path/source.txt', '/not/allowed/dest.txt'), - ).rejects.toThrow(PathNotAllowedError); + ); + assert.instanceOf(error_dest, PathNotAllowedError); // Both invalid - await expect( + const error_both = await assert_rejects(() => scoped_fs.copy_file('/not/allowed/source.txt', '/not/allowed/dest.txt'), - ).rejects.toThrow(PathNotAllowedError); + ); + assert.instanceOf(error_both, PathNotAllowedError); // Path traversal in source - await expect( + const error_traversal_src = await assert_rejects(() => scoped_fs.copy_file('/allowed/path/../../../etc/passwd', '/allowed/path/dest.txt'), - ).rejects.toThrow(PathNotAllowedError); + ); + assert.instanceOf(error_traversal_src, PathNotAllowedError); // Path traversal in destination - await expect( + const error_traversal_dest = await assert_rejects(() => scoped_fs.copy_file('/allowed/path/source.txt', '/allowed/path/../../../etc/passwd'), - ).rejects.toThrow(PathNotAllowedError); + ); + assert.instanceOf(error_traversal_dest, PathNotAllowedError); }); }); @@ -516,8 +547,8 @@ describe('ScopedFs - security error handling', () => { for (const path of test_paths) { const error = new PathNotAllowedError(path); - expect(error.message).toBe(`Path is not allowed: ${path}`); - expect(error.name).toBe('PathNotAllowedError'); + assert.strictEqual(error.message, `Path is not allowed: ${path}`); + assert.strictEqual(error.name, 'PathNotAllowedError'); } }); @@ -526,8 +557,8 @@ describe('ScopedFs - security error handling', () => { for (const path of test_paths) { const error = new SymlinkNotAllowedError(path); - expect(error.message).toBe(`Path is a symlink which is not allowed: ${path}`); - expect(error.name).toBe('SymlinkNotAllowedError'); + assert.strictEqual(error.message, `Path is a symlink which is not allowed: ${path}`); + assert.strictEqual(error.name, 'SymlinkNotAllowedError'); } }); @@ -538,7 +569,8 @@ describe('ScopedFs - security error handling', () => { vi.mocked(fs.lstat).mockRejectedValueOnce(new Error('Permission denied')); // Should throw the filesystem error, not a security error - await expect(scoped_fs.read_file(FILE_PATHS.ALLOWED)).rejects.toThrow('Permission denied'); - expect(fs.readFile).not.toHaveBeenCalled(); + const error = await assert_rejects(() => scoped_fs.read_file(FILE_PATHS.ALLOWED)); + assert.include(error.message, 'Permission denied'); + assert.strictEqual(vi.mocked(fs.readFile).mock.calls.length, 0); }); }); diff --git a/src/test/server/security.host.test.ts b/src/test/server/security.host.test.ts new file mode 100644 index 000000000..0bd1c4447 --- /dev/null +++ b/src/test/server/security.host.test.ts @@ -0,0 +1,272 @@ +import {describe, test, vi, assert} from 'vitest'; +import type {Handler} from 'hono'; + +import { + extract_hostname, + build_allowed_hostnames, + create_host_validation_middleware, + LOCAL_HOSTNAMES, + is_open_host, +} from '../../lib/server/security.js'; + +// Test helpers (same pattern as security.test.ts) +const create_mock_context = (headers: Record = {}) => { + const next = vi.fn(); + const json = vi.fn((content: unknown, status: number) => ({content, status})); + + const normalized_headers: Record = {}; + for (const [key, value] of Object.entries(headers)) { + normalized_headers[key.toLowerCase()] = value; + } + + const c = { + req: { + header: (name: string) => normalized_headers[name.toLowerCase()], + }, + json, + }; + + return {c, next, json}; +}; + +const test_middleware_allows = async (handler: Handler, headers: Record) => { + const {c, next} = create_mock_context(headers); + await handler(c as any, next); + assert.ok(next.mock.calls.length > 0, 'middleware should call next()'); +}; + +const test_middleware_blocks = async (handler: Handler, headers: Record) => { + const {c, next, json} = create_mock_context(headers); + await handler(c as any, next); + assert.strictEqual(next.mock.calls.length, 0, 'middleware should not call next()'); + assert.deepEqual(json.mock.calls[0], [{error: 'forbidden_host'}, 403]); +}; + +describe('extract_hostname', () => { + test('extracts hostname from host:port', () => { + assert.strictEqual(extract_hostname('localhost:4460'), 'localhost'); + }); + + test('returns hostname when no port', () => { + assert.strictEqual(extract_hostname('localhost'), 'localhost'); + }); + + test('extracts IPv4 from host:port', () => { + assert.strictEqual(extract_hostname('127.0.0.1:4460'), '127.0.0.1'); + }); + + test('handles IPv6 in brackets with port', () => { + assert.strictEqual(extract_hostname('[::1]:4460'), '[::1]'); + }); + + test('handles IPv6 in brackets without port', () => { + assert.strictEqual(extract_hostname('[::1]'), '[::1]'); + }); + + test('handles bare IPv6 without brackets', () => { + // edge case — some clients might send this + assert.strictEqual(extract_hostname('::1'), '::1'); + }); + + test('handles full IPv6 in brackets', () => { + assert.strictEqual( + extract_hostname('[2001:db8::8a2e:370:7334]:8443'), + '[2001:db8::8a2e:370:7334]', + ); + }); + + test('handles empty string', () => { + assert.strictEqual(extract_hostname(''), ''); + }); + + test('handles just a colon', () => { + assert.strictEqual(extract_hostname(':'), ''); + }); + + test('handles just brackets', () => { + assert.strictEqual(extract_hostname('[]'), '[]'); + }); + + test('handles unclosed bracket', () => { + assert.strictEqual(extract_hostname('[::1'), '[::1'); + }); +}); + +describe('is_open_host', () => { + test('identifies 0.0.0.0 as open', () => { + assert.ok(is_open_host('0.0.0.0')); + }); + + test('identifies :: as open', () => { + assert.ok(is_open_host('::')); + }); + + test('identifies 0 as open', () => { + assert.ok(is_open_host('0')); + }); + + test('localhost is not open', () => { + assert.ok(!is_open_host('localhost')); + }); + + test('127.0.0.1 is not open', () => { + assert.ok(!is_open_host('127.0.0.1')); + }); +}); + +describe('build_allowed_hostnames', () => { + test('localhost includes all loopback forms', () => { + const hostnames = build_allowed_hostnames('localhost'); + assert.ok(hostnames.has('localhost')); + assert.ok(hostnames.has('127.0.0.1')); + assert.ok(hostnames.has('[::1]')); + assert.ok(hostnames.has('::1')); + }); + + test('127.0.0.1 includes all loopback forms', () => { + const hostnames = build_allowed_hostnames('127.0.0.1'); + assert.ok(hostnames.has('localhost')); + assert.ok(hostnames.has('127.0.0.1')); + assert.ok(hostnames.has('[::1]')); + }); + + test('[::1] includes all loopback forms', () => { + const hostnames = build_allowed_hostnames('[::1]'); + assert.ok(hostnames.has('[::1]')); + assert.ok(hostnames.has('::1')); + assert.ok(hostnames.has('localhost')); + assert.ok(hostnames.has('127.0.0.1')); + }); + + test('::1 includes all loopback forms', () => { + const hostnames = build_allowed_hostnames('::1'); + assert.ok(hostnames.has('[::1]')); + assert.ok(hostnames.has('::1')); + assert.ok(hostnames.has('localhost')); + }); + + test('0.0.0.0 includes all local hostnames', () => { + const hostnames = build_allowed_hostnames('0.0.0.0'); + for (const h of LOCAL_HOSTNAMES) { + assert.ok(hostnames.has(h), `should include ${h}`); + } + }); + + test(':: includes all local hostnames', () => { + const hostnames = build_allowed_hostnames('::'); + for (const h of LOCAL_HOSTNAMES) { + assert.ok(hostnames.has(h), `should include ${h}`); + } + }); + + test('custom hostname only includes itself', () => { + const hostnames = build_allowed_hostnames('myhost.local'); + assert.ok(hostnames.has('myhost.local')); + assert.strictEqual(hostnames.size, 1); + }); + + test('is case-insensitive', () => { + const hostnames = build_allowed_hostnames('LocalHost'); + assert.ok(hostnames.has('localhost')); + assert.ok(hostnames.has('127.0.0.1')); + }); +}); + +describe('create_host_validation_middleware', () => { + const localhost_middleware = create_host_validation_middleware( + build_allowed_hostnames('localhost'), + ); + + describe('allows valid hosts', () => { + test('allows localhost:port', async () => { + await test_middleware_allows(localhost_middleware, {host: 'localhost:4460'}); + }); + + test('allows localhost without port', async () => { + await test_middleware_allows(localhost_middleware, {host: 'localhost'}); + }); + + test('allows 127.0.0.1:port', async () => { + await test_middleware_allows(localhost_middleware, {host: '127.0.0.1:4460'}); + }); + + test('allows case-insensitive localhost', async () => { + await test_middleware_allows(localhost_middleware, {host: 'LocalHost:4460'}); + }); + + test('allows requests without Host header', async () => { + await test_middleware_allows(localhost_middleware, {}); + }); + + test('allows requests with other headers but no Host', async () => { + await test_middleware_allows(localhost_middleware, { + 'user-agent': 'curl/7.64.1', + accept: '*/*', + }); + }); + }); + + describe('blocks invalid hosts', () => { + test('blocks evil.com', async () => { + await test_middleware_blocks(localhost_middleware, {host: 'evil.com:4460'}); + }); + + test('blocks evil.com without port', async () => { + await test_middleware_blocks(localhost_middleware, {host: 'evil.com'}); + }); + + test('blocks 192.168.1.1', async () => { + await test_middleware_blocks(localhost_middleware, {host: '192.168.1.1:4460'}); + }); + + test('allows [::1] when bound to localhost', async () => { + // localhost resolves to both IPv4 and IPv6 loopback + await test_middleware_allows(localhost_middleware, {host: '[::1]:4460'}); + }); + }); + + describe('IPv6 binding', () => { + const ipv6_middleware = create_host_validation_middleware(build_allowed_hostnames('[::1]')); + + test('allows [::1]:port', async () => { + await test_middleware_allows(ipv6_middleware, {host: '[::1]:4460'}); + }); + + test('allows bare ::1', async () => { + await test_middleware_allows(ipv6_middleware, {host: '::1'}); + }); + + test('allows localhost when bound to [::1]', async () => { + // all loopback forms are aliases + await test_middleware_allows(ipv6_middleware, {host: 'localhost:4460'}); + }); + + test('blocks evil.com when bound to [::1]', async () => { + await test_middleware_blocks(ipv6_middleware, {host: 'evil.com:4460'}); + }); + }); + + describe('build_allowed_hostnames for 0.0.0.0 (used if auth is added later)', () => { + const open_middleware = create_host_validation_middleware(build_allowed_hostnames('0.0.0.0')); + + test('allows localhost', async () => { + await test_middleware_allows(open_middleware, {host: 'localhost:4460'}); + }); + + test('allows 127.0.0.1', async () => { + await test_middleware_allows(open_middleware, {host: '127.0.0.1:4460'}); + }); + + test('allows [::1]', async () => { + await test_middleware_allows(open_middleware, {host: '[::1]:4460'}); + }); + + test('blocks external hostname even on 0.0.0.0', async () => { + await test_middleware_blocks(open_middleware, {host: 'evil.com:4460'}); + }); + + test('blocks LAN IP even on 0.0.0.0', async () => { + await test_middleware_blocks(open_middleware, {host: '192.168.1.100:4460'}); + }); + }); +}); diff --git a/src/test/server/security.test.ts b/src/test/server/security.test.ts index ee72b9457..cda009e96 100644 --- a/src/test/server/security.test.ts +++ b/src/test/server/security.test.ts @@ -1,18 +1,16 @@ -// @slop Claude Opus 4 - -import {describe, test, expect, vi} from 'vitest'; +import {describe, test, vi, assert} from 'vitest'; import type {Handler} from 'hono'; import { parse_allowed_origins, should_allow_origin, verify_request_source, -} from '$lib/server/security.js'; +} from '@fuzdev/fuz_app/http/origin.js'; // Test helpers const create_mock_context = (headers: Record = {}) => { const next = vi.fn(); - const text = vi.fn((content: string, status: number) => ({content, status})); + const json = vi.fn((content: unknown, status: number) => ({content, status})); // Convert all header keys to lowercase for case-insensitive lookup const normalized_headers: Record = {}; @@ -24,10 +22,10 @@ const create_mock_context = (headers: Record = {}) => { req: { header: (name: string) => normalized_headers[name.toLowerCase()], }, - text, + json, }; - return {c, next, text}; + return {c, next, json}; }; const test_pattern = ( @@ -38,89 +36,87 @@ const test_pattern = ( const regexps = parse_allowed_origins(pattern); for (const origin of valid_origins) { - expect(should_allow_origin(origin, regexps), `${origin} should match ${pattern}`).toBe(true); + assert.ok(should_allow_origin(origin, regexps), `${origin} should match ${pattern}`); } for (const origin of invalid_origins) { - expect(should_allow_origin(origin, regexps), `${origin} should not match ${pattern}`).toBe( - false, - ); + assert.ok(!should_allow_origin(origin, regexps), `${origin} should not match ${pattern}`); } }; const test_middleware_allows = async (handler: Handler, headers: Record) => { const {c, next} = create_mock_context(headers); await handler(c as any, next); - expect(next).toHaveBeenCalled(); + assert.ok(next.mock.calls.length > 0); }; const test_middleware_blocks = async ( handler: Handler, headers: Record, - expected_message: string, + expected_error: string, expected_status = 403, ) => { - const {c, next, text} = create_mock_context(headers); + const {c, next, json} = create_mock_context(headers); const result = await handler(c as any, next); - expect(next).not.toHaveBeenCalled(); - expect(text).toHaveBeenCalledWith(expected_message, expected_status); - expect(result).toEqual({content: expected_message, status: expected_status}); + assert.strictEqual(next.mock.calls.length, 0); + assert.deepEqual(json.mock.calls[0], [{error: expected_error}, expected_status]); + assert.deepEqual(result, {content: {error: expected_error}, status: expected_status}); }; describe('parse_allowed_origins', () => { test('returns empty array for undefined', () => { - expect(parse_allowed_origins(undefined)).toEqual([]); + assert.deepEqual(parse_allowed_origins(undefined), []); }); test('returns empty array for empty string', () => { - expect(parse_allowed_origins('')).toEqual([]); + assert.deepEqual(parse_allowed_origins(''), []); }); test('parses single origin', () => { const patterns = parse_allowed_origins('http://localhost:3000'); - expect(patterns).toHaveLength(1); - expect(patterns[0]).toBeInstanceOf(RegExp); + assert.strictEqual(patterns.length, 1); + assert.instanceOf(patterns[0], RegExp); }); test('parses multiple comma-separated origins', () => { const patterns = parse_allowed_origins('http://localhost:3000,https://example.com'); - expect(patterns).toHaveLength(2); + assert.strictEqual(patterns.length, 2); }); test('trims whitespace from origins', () => { const patterns = parse_allowed_origins(' http://localhost:3000 , https://example.com '); - expect(patterns).toHaveLength(2); + assert.strictEqual(patterns.length, 2); }); test('filters out empty entries', () => { const patterns = parse_allowed_origins('http://localhost:3000,,https://example.com,'); - expect(patterns).toHaveLength(2); + assert.strictEqual(patterns.length, 2); }); test('handles complex patterns', () => { const patterns = parse_allowed_origins( 'https://*.example.com,http://localhost:*,https://*.test.com:*', ); - expect(patterns).toHaveLength(3); + assert.strictEqual(patterns.length, 3); }); }); describe('should_allow_origin', () => { test('returns false for empty patterns', () => { - expect(should_allow_origin('http://example.com', [])).toBe(false); + assert.ok(!should_allow_origin('http://example.com', [])); }); test('matches exact origins', () => { const patterns = parse_allowed_origins('http://example.com'); - expect(should_allow_origin('http://example.com', patterns)).toBe(true); - expect(should_allow_origin('https://example.com', patterns)).toBe(false); + assert.ok(should_allow_origin('http://example.com', patterns)); + assert.ok(!should_allow_origin('https://example.com', patterns)); }); test('matches any of multiple patterns', () => { const patterns = parse_allowed_origins('http://localhost:3000,https://example.com'); - expect(should_allow_origin('http://localhost:3000', patterns)).toBe(true); - expect(should_allow_origin('https://example.com', patterns)).toBe(true); - expect(should_allow_origin('http://other.com', patterns)).toBe(false); + assert.ok(should_allow_origin('http://localhost:3000', patterns)); + assert.ok(should_allow_origin('https://example.com', patterns)); + assert.ok(!should_allow_origin('http://other.com', patterns)); }); }); @@ -151,14 +147,17 @@ describe('pattern_to_regexp', () => { }); test('throws on paths in patterns', () => { - expect(() => parse_allowed_origins('http://example.com/api')).toThrow( - 'Paths not allowed in origin patterns', + assert.throws( + () => parse_allowed_origins('http://example.com/api'), + /Paths not allowed in origin patterns/, ); - expect(() => parse_allowed_origins('https://example.com/api/v1')).toThrow( - 'Paths not allowed in origin patterns', + assert.throws( + () => parse_allowed_origins('https://example.com/api/v1'), + /Paths not allowed in origin patterns/, ); - expect(() => parse_allowed_origins('http://localhost:3000/')).toThrow( - 'Paths not allowed in origin patterns', + assert.throws( + () => parse_allowed_origins('http://localhost:3000/'), + /Paths not allowed in origin patterns/, ); }); @@ -291,10 +290,10 @@ describe('pattern_to_regexp', () => { test('ensures wildcards cannot match dots', () => { const patterns = parse_allowed_origins('https://*.example.com'); // The wildcard should match 'safe' but not 'safe.evil' - expect(should_allow_origin('https://safe.example.com', patterns)).toBe(true); - expect(should_allow_origin('https://safe.evil.example.com', patterns)).toBe(false); + assert.ok(should_allow_origin('https://safe.example.com', patterns)); + assert.ok(!should_allow_origin('https://safe.evil.example.com', patterns)); // This is critical - the wildcard should not be able to match across dots - expect(should_allow_origin('https://safe.com.evil.com.example.com', patterns)).toBe(false); + assert.ok(!should_allow_origin('https://safe.com.evil.com.example.com', patterns)); }); }); @@ -369,47 +368,53 @@ describe('pattern_to_regexp', () => { describe('error handling', () => { test('throws on invalid pattern format', () => { - expect(() => parse_allowed_origins('not-a-url')).toThrow('Invalid origin pattern'); - expect(() => parse_allowed_origins('ftp://example.com')).toThrow('Invalid origin pattern'); - expect(() => parse_allowed_origins('//example.com')).toThrow('Invalid origin pattern'); - expect(() => parse_allowed_origins('*.example.com')).toThrow('Invalid origin pattern'); - expect(() => parse_allowed_origins('example.com')).toThrow('Invalid origin pattern'); - expect(() => parse_allowed_origins('localhost:3000')).toThrow('Invalid origin pattern'); + assert.throws(() => parse_allowed_origins('not-a-url'), /Invalid origin pattern/); + assert.throws(() => parse_allowed_origins('ftp://example.com'), /Invalid origin pattern/); + assert.throws(() => parse_allowed_origins('//example.com'), /Invalid origin pattern/); + assert.throws(() => parse_allowed_origins('*.example.com'), /Invalid origin pattern/); + assert.throws(() => parse_allowed_origins('example.com'), /Invalid origin pattern/); + assert.throws(() => parse_allowed_origins('localhost:3000'), /Invalid origin pattern/); }); test('throws on wildcards in wrong positions', () => { - expect(() => parse_allowed_origins('http://ex*ample.com')).toThrow( - 'Wildcards must be complete labels', + assert.throws( + () => parse_allowed_origins('http://ex*ample.com'), + /Wildcards must be complete labels/, ); - expect(() => parse_allowed_origins('http://example*.com')).toThrow( - 'Wildcards must be complete labels', + assert.throws( + () => parse_allowed_origins('http://example*.com'), + /Wildcards must be complete labels/, ); - expect(() => parse_allowed_origins('http://*example.com')).toThrow( - 'Wildcards must be complete labels', + assert.throws( + () => parse_allowed_origins('http://*example.com'), + /Wildcards must be complete labels/, ); - expect(() => parse_allowed_origins('http://example.*com')).toThrow( - 'Wildcards must be complete labels', + assert.throws( + () => parse_allowed_origins('http://example.*com'), + /Wildcards must be complete labels/, ); }); test('throws on invalid port wildcards', () => { - expect(() => parse_allowed_origins('http://example.com:*000')).toThrow( - 'Invalid origin pattern', - ); - expect(() => parse_allowed_origins('http://example.com:3*')).toThrow( - 'Invalid origin pattern', + assert.throws( + () => parse_allowed_origins('http://example.com:*000'), + /Invalid origin pattern/, ); + assert.throws(() => parse_allowed_origins('http://example.com:3*'), /Invalid origin pattern/); }); test('throws on wildcards in IPv6 addresses', () => { - expect(() => parse_allowed_origins('http://[*::1]:3000')).toThrow( - 'Wildcards not allowed in IPv6 addresses', + assert.throws( + () => parse_allowed_origins('http://[*::1]:3000'), + /Wildcards not allowed in IPv6 addresses/, ); - expect(() => parse_allowed_origins('https://[2001:db8:*::1]')).toThrow( - 'Wildcards not allowed in IPv6 addresses', + assert.throws( + () => parse_allowed_origins('https://[2001:db8:*::1]'), + /Wildcards not allowed in IPv6 addresses/, ); - expect(() => parse_allowed_origins('http://[::ffff:*.0.0.1]:8080')).toThrow( - 'Wildcards not allowed in IPv6 addresses', + assert.throws( + () => parse_allowed_origins('http://[::ffff:*.0.0.1]:8080'), + /Wildcards not allowed in IPv6 addresses/, ); }); }); @@ -419,43 +424,43 @@ describe('pattern_to_regexp', () => { const patterns = parse_allowed_origins('https://example.com'); // All these should match - expect(should_allow_origin('https://example.com', patterns)).toBe(true); - expect(should_allow_origin('https://Example.com', patterns)).toBe(true); - expect(should_allow_origin('https://EXAMPLE.COM', patterns)).toBe(true); - expect(should_allow_origin('https://ExAmPlE.cOm', patterns)).toBe(true); + assert.ok(should_allow_origin('https://example.com', patterns)); + assert.ok(should_allow_origin('https://Example.com', patterns)); + assert.ok(should_allow_origin('https://EXAMPLE.COM', patterns)); + assert.ok(should_allow_origin('https://ExAmPlE.cOm', patterns)); }); test('protocol is also case-insensitive due to regex i flag', () => { const patterns = parse_allowed_origins('https://example.com'); // These should match (case-insensitive regex) - expect(should_allow_origin('https://example.com', patterns)).toBe(true); - expect(should_allow_origin('https://Example.com', patterns)).toBe(true); - expect(should_allow_origin('https://EXAMPLE.COM', patterns)).toBe(true); + assert.ok(should_allow_origin('https://example.com', patterns)); + assert.ok(should_allow_origin('https://Example.com', patterns)); + assert.ok(should_allow_origin('https://EXAMPLE.COM', patterns)); // Different protocol should NOT match - expect(should_allow_origin('http://example.com', patterns)).toBe(false); + assert.ok(!should_allow_origin('http://example.com', patterns)); // Note: The regex uses 'i' flag making the entire pattern case-insensitive // In practice, browsers always send lowercase protocols, but our regex would match this - expect(should_allow_origin('HTTPS://example.com', patterns)).toBe(true); + assert.ok(should_allow_origin('HTTPS://example.com', patterns)); }); test('case-insensitive matching with wildcards', () => { const patterns = parse_allowed_origins('https://*.example.com'); - expect(should_allow_origin('https://API.example.com', patterns)).toBe(true); - expect(should_allow_origin('https://api.EXAMPLE.com', patterns)).toBe(true); - expect(should_allow_origin('https://Api.Example.Com', patterns)).toBe(true); + assert.ok(should_allow_origin('https://API.example.com', patterns)); + assert.ok(should_allow_origin('https://api.EXAMPLE.com', patterns)); + assert.ok(should_allow_origin('https://Api.Example.Com', patterns)); }); test('case-insensitive with IPv6', () => { // IPv6 addresses can have hexadecimal characters that are case-insensitive const patterns = parse_allowed_origins('https://[2001:DB8::1]'); - expect(should_allow_origin('https://[2001:db8::1]', patterns)).toBe(true); - expect(should_allow_origin('https://[2001:DB8::1]', patterns)).toBe(true); - expect(should_allow_origin('https://[2001:dB8::1]', patterns)).toBe(true); + assert.ok(should_allow_origin('https://[2001:db8::1]', patterns)); + assert.ok(should_allow_origin('https://[2001:DB8::1]', patterns)); + assert.ok(should_allow_origin('https://[2001:dB8::1]', patterns)); }); }); @@ -495,7 +500,7 @@ describe('pattern_to_regexp', () => { test('handles very long origin strings', () => { const long_subdomain = 'a'.repeat(63) + '.example.com'; const patterns = parse_allowed_origins(`https://*.example.com`); - expect(should_allow_origin(`https://${long_subdomain}`, patterns)).toBe(true); + assert.ok(should_allow_origin(`https://${long_subdomain}`, patterns)); }); }); @@ -503,14 +508,14 @@ describe('pattern_to_regexp', () => { test('handles IPv6 addresses', () => { // Note: Zone identifiers (e.g., %lo0) are not supported by URL constructor const patterns = parse_allowed_origins('http://[::1]:3000,https://[2001:db8::1]'); - expect(patterns).toHaveLength(2); + assert.strictEqual(patterns.length, 2); // Test various IPv6 formats - expect(should_allow_origin('http://[::1]:3000', patterns)).toBe(true); - expect(should_allow_origin('https://[2001:db8::1]', patterns)).toBe(true); + assert.ok(should_allow_origin('http://[::1]:3000', patterns)); + assert.ok(should_allow_origin('https://[2001:db8::1]', patterns)); // Should not match without brackets - expect(should_allow_origin('http://::1:3000', patterns)).toBe(false); + assert.ok(!should_allow_origin('http://::1:3000', patterns)); }); test('handles various IPv6 formats', () => { @@ -554,20 +559,20 @@ describe('pattern_to_regexp', () => { const patterns = parse_allowed_origins('https://example.com'); // Trailing dots won't match because we do exact string matching - expect(should_allow_origin('https://example.com.', patterns)).toBe(false); - expect(should_allow_origin('https://example.com', patterns)).toBe(true); + assert.ok(!should_allow_origin('https://example.com.', patterns)); + assert.ok(should_allow_origin('https://example.com', patterns)); // If you want to match trailing dots, you need to include them in the pattern const patternsWithDot = parse_allowed_origins('https://example.com.'); - expect(should_allow_origin('https://example.com.', patternsWithDot)).toBe(true); - expect(should_allow_origin('https://example.com', patternsWithDot)).toBe(false); + assert.ok(should_allow_origin('https://example.com.', patternsWithDot)); + assert.ok(!should_allow_origin('https://example.com', patternsWithDot)); }); test('handles punycode domains', () => { // International domain names are converted to punycode const patterns = parse_allowed_origins('https://xn--e1afmkfd.xn--p1ai'); // пример.рф in punycode - expect(should_allow_origin('https://xn--e1afmkfd.xn--p1ai', patterns)).toBe(true); + assert.ok(should_allow_origin('https://xn--e1afmkfd.xn--p1ai', patterns)); // The original Unicode domain would need to be converted to punycode before comparison }); @@ -586,13 +591,13 @@ describe('pattern_to_regexp', () => { ]; for (const origin of localhost_origins) { - expect(should_allow_origin(origin, patterns)).toBe(true); + assert.ok(should_allow_origin(origin, patterns)); } }); test('handles empty hostname edge case', () => { // This should be caught as invalid - expect(() => parse_allowed_origins('http://:3000')).toThrow('Invalid origin pattern'); + assert.throws(() => parse_allowed_origins('http://:3000'), /Invalid origin pattern/); }); test('handles special regex characters in fixed parts', () => { @@ -640,7 +645,7 @@ describe('verify_request_source middleware', () => { { origin: 'http://evil.com', }, - 'forbidden origin', + 'forbidden_origin', ); }); @@ -662,14 +667,14 @@ describe('verify_request_source middleware', () => { { origin: 'http://[::1]:8080', }, - 'forbidden origin', + 'forbidden_origin', ); await test_middleware_blocks( middleware, { origin: 'https://[2001:db8::2]:443', }, - 'forbidden origin', + 'forbidden_origin', ); }); @@ -703,7 +708,7 @@ describe('verify_request_source middleware', () => { { referer: 'http://evil.com/page', }, - 'forbidden referer', + 'forbidden_referer', ); }); @@ -721,7 +726,7 @@ describe('verify_request_source middleware', () => { { referer: 'http://localhost.:3000/page', }, - 'forbidden referer', + 'forbidden_referer', ); // Origin header with trailing dot also won't match @@ -730,7 +735,7 @@ describe('verify_request_source middleware', () => { { origin: 'http://localhost.:3000', }, - 'forbidden origin', + 'forbidden_origin', ); // To match trailing dots, you need them in the pattern @@ -761,7 +766,7 @@ describe('verify_request_source middleware', () => { { referer: 'http://[::2]:3000/page', }, - 'forbidden referer', + 'forbidden_referer', ); }); @@ -771,7 +776,7 @@ describe('verify_request_source middleware', () => { { referer: 'not-a-valid-url', }, - 'forbidden referer', + 'forbidden_referer', ); }); @@ -784,7 +789,7 @@ describe('verify_request_source middleware', () => { { referer: 'data:text/html,

test

', }, - 'forbidden referer', + 'forbidden_referer', ); }); }); @@ -825,7 +830,7 @@ describe('verify_request_source middleware', () => { { origin: 'http://localhost:3000', }, - 'forbidden origin', + 'forbidden_origin', ); }); @@ -835,7 +840,7 @@ describe('verify_request_source middleware', () => { { referer: 'http://localhost:3000/page', }, - 'forbidden referer', + 'forbidden_referer', ); }); @@ -879,7 +884,7 @@ describe('integration scenarios', () => { ]; for (const origin of dev_origins) { - expect(should_allow_origin(origin, dev_patterns)).toBe(true); + assert.ok(should_allow_origin(origin, dev_patterns)); } }); @@ -904,11 +909,11 @@ describe('integration scenarios', () => { ]; for (const origin of allowed) { - expect(should_allow_origin(origin, prod_patterns)).toBe(true); + assert.ok(should_allow_origin(origin, prod_patterns)); } for (const origin of blocked) { - expect(should_allow_origin(origin, prod_patterns)).toBe(false); + assert.ok(!should_allow_origin(origin, prod_patterns)); } }); @@ -938,21 +943,21 @@ describe('integration scenarios', () => { ); // HTTP dev with any port - expect(should_allow_origin('http://api.dev.example.com', patterns)).toBe(true); - expect(should_allow_origin('http://api.dev.example.com:3000', patterns)).toBe(true); - expect(should_allow_origin('http://api.dev.example.com:8080', patterns)).toBe(true); + assert.ok(should_allow_origin('http://api.dev.example.com', patterns)); + assert.ok(should_allow_origin('http://api.dev.example.com:3000', patterns)); + assert.ok(should_allow_origin('http://api.dev.example.com:8080', patterns)); // HTTPS prod without port flexibility - expect(should_allow_origin('https://api.prod.example.com', patterns)).toBe(true); - expect(should_allow_origin('https://api.prod.example.com:443', patterns)).toBe(false); + assert.ok(should_allow_origin('https://api.prod.example.com', patterns)); + assert.ok(!should_allow_origin('https://api.prod.example.com:443', patterns)); // Exact match - expect(should_allow_origin('https://example.com', patterns)).toBe(true); + assert.ok(should_allow_origin('https://example.com', patterns)); // Should not match - expect(should_allow_origin('https://api.dev.example.com', patterns)).toBe(false); // Wrong protocol - expect(should_allow_origin('http://api.prod.example.com', patterns)).toBe(false); // Wrong protocol - expect(should_allow_origin('https://sub.example.com', patterns)).toBe(false); // No wildcard + assert.ok(!should_allow_origin('https://api.dev.example.com', patterns)); // Wrong protocol + assert.ok(!should_allow_origin('http://api.prod.example.com', patterns)); // Wrong protocol + assert.ok(!should_allow_origin('https://sub.example.com', patterns)); // No wildcard }); }); @@ -961,24 +966,24 @@ describe('normalize_origin', () => { const patterns = parse_allowed_origins('https://example.com:443'); // The pattern explicitly includes :443 - expect(should_allow_origin('https://example.com:443', patterns)).toBe(true); + assert.ok(should_allow_origin('https://example.com:443', patterns)); // Without the port, it won't match (we don't normalize) - expect(should_allow_origin('https://example.com', patterns)).toBe(false); + assert.ok(!should_allow_origin('https://example.com', patterns)); }); test('handles explicit default port 80 for HTTP', () => { const patterns = parse_allowed_origins('http://example.com:80'); // The pattern explicitly includes :80 - expect(should_allow_origin('http://example.com:80', patterns)).toBe(true); + assert.ok(should_allow_origin('http://example.com:80', patterns)); // Without the port, it won't match (we don't normalize) - expect(should_allow_origin('http://example.com', patterns)).toBe(false); + assert.ok(!should_allow_origin('http://example.com', patterns)); }); test('preserves non-standard ports', () => { const patterns = parse_allowed_origins('https://example.com:8443'); - expect(should_allow_origin('https://example.com:8443', patterns)).toBe(true); - expect(should_allow_origin('https://example.com', patterns)).toBe(false); + assert.ok(should_allow_origin('https://example.com:8443', patterns)); + assert.ok(!should_allow_origin('https://example.com', patterns)); }); }); diff --git a/src/test/server/security.websocket.test.ts b/src/test/server/security.websocket.test.ts new file mode 100644 index 000000000..477d062ef --- /dev/null +++ b/src/test/server/security.websocket.test.ts @@ -0,0 +1,166 @@ +/** + * Tests that origin verification applies to WebSocket upgrade requests. + * + * Browsers always send an Origin header on WebSocket upgrades (spec-enforced, + * not overridable by JS). The verify_request_source middleware runs for ALL + * routes including the WebSocket GET, so cross-origin connections are rejected + * before the upgrade happens. + */ + +import {describe, test, assert} from 'vitest'; +import {Hono} from 'hono'; +import {parse_allowed_origins, verify_request_source} from '@fuzdev/fuz_app/http/origin.js'; + +import { + build_allowed_hostnames, + create_host_validation_middleware, +} from '../../lib/server/security.js'; + +/** + * Create a minimal Hono app that mirrors the zzz middleware stack. + * Uses a plain GET handler at /ws instead of a real WebSocket upgrade + * (which needs Deno) — what matters is that middleware runs before it. + */ +const create_test_app = (allowed_origins_str: string, bind_host = 'localhost'): Hono => { + const app = new Hono(); + const allowed_hostnames = build_allowed_hostnames(bind_host); + app.use(create_host_validation_middleware(allowed_hostnames)); + const allowed_origins = parse_allowed_origins(allowed_origins_str); + app.use(verify_request_source(allowed_origins)); + // Simulates the WebSocket upgrade handler — if middleware lets the request through, + // this handler responds 200. In production, upgradeWebSocket would upgrade instead. + app.get('/ws', (c) => c.json({upgraded: true})); + app.post('/api/rpc', (c) => c.json({ok: true})); + return app; +}; + +const request_ws = async (app: Hono, headers: Record = {}): Promise => + await app.request('/ws', { + method: 'GET', + headers: { + Connection: 'Upgrade', + Upgrade: 'websocket', + 'Sec-WebSocket-Key': 'dGhlIHNhbXBsZSBub25jZQ==', + 'Sec-WebSocket-Version': '13', + ...headers, + }, + }); + +const request_rpc = async (app: Hono, headers: Record = {}): Promise => + await app.request('/api/rpc', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + ...headers, + }, + body: '{}', + }); + +describe('WebSocket origin security', () => { + const app = create_test_app('http://localhost:*'); + + describe('cross-origin WebSocket blocked', () => { + test('rejects WebSocket upgrade from evil.com', async () => { + const res = await request_ws(app, { + Host: 'localhost:4460', + Origin: 'https://evil.com', + }); + assert.strictEqual(res.status, 403); + const body = await res.json(); + assert.strictEqual(body.error, 'forbidden_origin'); + }); + + test('rejects WebSocket upgrade from attacker localhost lookalike', async () => { + const res = await request_ws(app, { + Host: 'localhost:4460', + Origin: 'http://localhost.evil.com', + }); + assert.strictEqual(res.status, 403); + }); + + test('rejects WebSocket upgrade from different protocol', async () => { + const res = await request_ws(app, { + Host: 'localhost:4460', + Origin: 'https://localhost:4460', + }); + assert.strictEqual(res.status, 403); + }); + }); + + describe('same-origin WebSocket allowed', () => { + test('allows WebSocket upgrade from localhost dev server', async () => { + const res = await request_ws(app, { + Host: 'localhost:4460', + Origin: 'http://localhost:5173', + }); + assert.strictEqual(res.status, 200); + }); + + test('allows WebSocket upgrade from localhost any port', async () => { + const res = await request_ws(app, { + Host: 'localhost:4460', + Origin: 'http://localhost:4460', + }); + assert.strictEqual(res.status, 200); + }); + + test('allows WebSocket upgrade from localhost no port', async () => { + const res = await request_ws(app, { + Host: 'localhost:4460', + Origin: 'http://localhost', + }); + assert.strictEqual(res.status, 200); + }); + }); + + describe('CLI/tool access (no Origin)', () => { + test('allows WebSocket upgrade without Origin header', async () => { + // CLI tools, curl, etc. don't send Origin — allowed through + const res = await request_ws(app, { + Host: 'localhost:4460', + }); + assert.strictEqual(res.status, 200); + }); + }); + + describe('DNS rebinding defense', () => { + test('rejects WebSocket with bad Host header', async () => { + const res = await request_ws(app, { + Host: 'evil.com:4460', + Origin: 'http://evil.com:4460', + }); + // Host validation middleware rejects before origin check + assert.strictEqual(res.status, 403); + }); + + test('rejects even when origin matches but host does not', async () => { + // DNS rebinding: evil.com resolves to 127.0.0.1, page sends + // Origin: http://evil.com, browser sets Host: evil.com + const res = await request_ws(app, { + Host: 'evil.com:4460', + Origin: 'http://localhost:5173', + }); + assert.strictEqual(res.status, 403); + const body = await res.json(); + assert.strictEqual(body.error, 'forbidden_host'); + }); + }); + + describe('HTTP RPC has same protection', () => { + test('rejects HTTP RPC from evil origin', async () => { + const res = await request_rpc(app, { + Host: 'localhost:4460', + Origin: 'https://evil.com', + }); + assert.strictEqual(res.status, 403); + }); + + test('allows HTTP RPC from localhost', async () => { + const res = await request_rpc(app, { + Host: 'localhost:4460', + Origin: 'http://localhost:5173', + }); + assert.strictEqual(res.status, 200); + }); + }); +}); diff --git a/src/test/server/server_env.test.ts b/src/test/server/server_env.test.ts new file mode 100644 index 000000000..fe62df134 --- /dev/null +++ b/src/test/server/server_env.test.ts @@ -0,0 +1,104 @@ +import {describe, test, assert} from 'vitest'; + +import {load_server_env} from '../../lib/server/server_env.js'; + +describe('load_server_env', () => { + // BaseServerEnv requires NODE_ENV and ALLOWED_ORIGINS minimum + const base_env = (key: string): string | undefined => { + if (key === 'NODE_ENV') return 'development'; + if (key === 'DATABASE_URL') return 'memory://'; + if (key === 'SECRET_COOKIE_KEYS') return 'dev-only-not-for-production-use-000'; + if (key === 'ALLOWED_ORIGINS') return 'http://localhost:*'; + return undefined; + }; + + const with_env = + (overrides: Record) => + (key: string): string | undefined => + overrides[key] ?? base_env(key); + + test('defaults host to localhost', () => { + const config = load_server_env(base_env); + assert.strictEqual(config.host, 'localhost'); + }); + + test('defaults port to 4040', () => { + const config = load_server_env(base_env); + assert.strictEqual(config.port, 4040); + }); + + test('reads host from HOST', () => { + const config = load_server_env(with_env({HOST: '127.0.0.1'})); + assert.strictEqual(config.host, '127.0.0.1'); + }); + + test('reads port from PORT', () => { + const config = load_server_env(with_env({PORT: '9999'})); + assert.strictEqual(config.port, 9999); + }); + + test('env object contains ALLOWED_ORIGINS', () => { + const config = load_server_env(base_env); + assert.strictEqual(config.env.ALLOWED_ORIGINS, 'http://localhost:*'); + }); + + test('respects ALLOWED_ORIGINS from env', () => { + const config = load_server_env(with_env({ALLOWED_ORIGINS: 'https://example.com'})); + assert.strictEqual(config.env.ALLOWED_ORIGINS, 'https://example.com'); + }); + + test('defaults websocket_path to /api/ws', () => { + const config = load_server_env(base_env); + assert.strictEqual(config.websocket_path, '/api/ws'); + }); + + test('defaults api_path to /api/rpc', () => { + const config = load_server_env(base_env); + assert.strictEqual(config.api_path, '/api/rpc'); + }); + + test('parses scoped_dirs from comma-separated string', () => { + const config = load_server_env(with_env({PUBLIC_ZZZ_SCOPED_DIRS: '/tmp/a, /tmp/b , /tmp/c'})); + assert.deepEqual(config.scoped_dirs, ['/tmp/a', '/tmp/b', '/tmp/c']); + }); + + test('scoped_dirs defaults to empty array', () => { + const config = load_server_env(base_env); + assert.deepEqual(config.scoped_dirs, []); + }); + + test('reads API keys from env', () => { + const config = load_server_env( + with_env({ + SECRET_ANTHROPIC_API_KEY: 'sk-ant-test', + SECRET_OPENAI_API_KEY: 'sk-test', + SECRET_GOOGLE_API_KEY: 'AIza-test', + }), + ); + assert.strictEqual(config.secret_anthropic_api_key, 'sk-ant-test'); + assert.strictEqual(config.secret_openai_api_key, 'sk-test'); + assert.strictEqual(config.secret_google_api_key, 'AIza-test'); + }); + + test('API keys default to undefined', () => { + const config = load_server_env(base_env); + assert.strictEqual(config.secret_anthropic_api_key, undefined); + assert.strictEqual(config.secret_openai_api_key, undefined); + assert.strictEqual(config.secret_google_api_key, undefined); + }); + + test('reads artificial delay from env', () => { + const config = load_server_env(with_env({PUBLIC_BACKEND_ARTIFICIAL_RESPONSE_DELAY: '500'})); + assert.strictEqual(config.artificial_delay, 500); + }); + + test('artificial delay defaults to 0', () => { + const config = load_server_env(base_env); + assert.strictEqual(config.artificial_delay, 0); + }); + + test('overrides take priority over env', () => { + const config = load_server_env(base_env, {host: '0.0.0.0'}); + assert.strictEqual(config.host, '0.0.0.0'); + }); +}); diff --git a/src/test/socket.svelte.test.ts b/src/test/socket.svelte.test.ts index 3935c5300..d844e2fc2 100644 --- a/src/test/socket.svelte.test.ts +++ b/src/test/socket.svelte.test.ts @@ -1,13 +1,12 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {beforeEach, describe, test, expect, vi, afterEach} from 'vitest'; +import {beforeEach, describe, test, vi, afterEach, assert} from 'vitest'; import {Socket} from '$lib/socket.svelte.js'; import {DEFAULT_CLOSE_CODE} from '$lib/socket_helpers.js'; import {Frontend} from '$lib/frontend.svelte.js'; -import {monkeypatch_zzz_for_tests} from './test_helpers.ts'; + +import {monkeypatch_zzz_for_tests} from './test_helpers.js'; // Mock WebSocket implementation for testing class Mocket { @@ -125,9 +124,10 @@ describe('Socket', () => { const socket = new Socket({app}); socket.connect(TEST_URLS.BASE); - expect(globalThis.WebSocket).toHaveBeenCalledWith(TEST_URLS.BASE); - expect(socket.url).toBe(TEST_URLS.BASE); - expect(socket.status).toBe('pending'); + assert.ok((globalThis.WebSocket as any).mock.calls.length > 0); + assert.deepEqual((globalThis.WebSocket as any).mock.calls[0], [TEST_URLS.BASE]); + assert.strictEqual(socket.url, TEST_URLS.BASE); + assert.strictEqual(socket.status, 'pending'); }); test('disconnect closes WebSocket with default close code', () => { @@ -140,9 +140,9 @@ describe('Socket', () => { // Disconnect socket.disconnect(); - expect(mock_socket.close_code).toBe(DEFAULT_CLOSE_CODE); - expect(socket.ws).toBeNull(); - expect(socket.open).toBe(false); + assert.strictEqual(mock_socket.close_code, DEFAULT_CLOSE_CODE); + assert.isNull(socket.ws); + assert.ok(!socket.open); }); test('connection success updates state correctly', () => { @@ -150,9 +150,9 @@ describe('Socket', () => { socket.connect(TEST_URLS.BASE); mock_socket.connect(); - expect(socket.open).toBe(true); - expect(socket.status).toBe('success'); - expect(socket.connected).toBe(true); + assert.ok(socket.open); + assert.strictEqual(socket.status, 'success'); + assert.ok(socket.connected); }); test('update_url reconnects with new URL if already connected', () => { @@ -160,14 +160,14 @@ describe('Socket', () => { socket.connect(TEST_URLS.BASE); mock_socket.connect(); - expect(socket.url).toBe(TEST_URLS.BASE); + assert.strictEqual(socket.url, TEST_URLS.BASE); // Update URL socket.update_url(TEST_URLS.ALTERNATE); - expect(socket.url).toBe(TEST_URLS.ALTERNATE); - expect(globalThis.WebSocket).toHaveBeenCalledTimes(2); - expect(globalThis.WebSocket).toHaveBeenLastCalledWith(TEST_URLS.ALTERNATE); + assert.strictEqual(socket.url, TEST_URLS.ALTERNATE); + assert.strictEqual((globalThis.WebSocket as any).mock.calls.length, 2); + assert.deepEqual((globalThis.WebSocket as any).mock.calls[1], [TEST_URLS.ALTERNATE]); }); }); @@ -177,8 +177,8 @@ describe('Socket', () => { // Not connected yet const sent = socket.send(TEST_MESSAGE.BASIC); - expect(sent).toBe(false); - expect(socket.queued_message_count).toBe(1); + assert.ok(!sent); + assert.strictEqual(socket.queued_message_count, 1); }); test('send transmits message when socket is connected', () => { @@ -188,11 +188,11 @@ describe('Socket', () => { const sent = socket.send(TEST_MESSAGE.BASIC); - expect(sent).toBe(true); - expect(mock_socket.sent_messages.length).toBe(1); + assert.ok(sent); + assert.strictEqual(mock_socket.sent_messages.length, 1); const first_message = mock_socket.sent_messages[0]; - expect(first_message).toBeDefined(); - expect(JSON.parse(first_message!)).toEqual(TEST_MESSAGE.BASIC); + assert.isDefined(first_message); + assert.deepEqual(JSON.parse(first_message), TEST_MESSAGE.BASIC); }); test('message queueing sends queued messages when reconnected', () => { @@ -202,15 +202,15 @@ describe('Socket', () => { socket.send({method: 'message_a'}); socket.send({method: 'message_b'}); - expect(socket.queued_message_count).toBe(2); + assert.strictEqual(socket.queued_message_count, 2); // Connect socket.connect(TEST_URLS.BASE); mock_socket.connect(); // Messages should be sent - expect(mock_socket.sent_messages.length).toBe(2); - expect(socket.queued_message_count).toBe(0); + assert.strictEqual(mock_socket.sent_messages.length, 2); + assert.strictEqual(socket.queued_message_count, 0); }); }); @@ -220,7 +220,7 @@ describe('Socket', () => { // Queue a message socket.send(TEST_MESSAGE.BASIC); - expect(socket.queued_message_count).toBe(1); + assert.strictEqual(socket.queued_message_count, 1); // Mock send failure const error_message = 'Send operation failed'; @@ -233,13 +233,13 @@ describe('Socket', () => { mock_socket.connect(); // Message should move to failed - expect(socket.queued_message_count).toBe(0); - expect(socket.failed_message_count).toBe(1); + assert.strictEqual(socket.queued_message_count, 0); + assert.strictEqual(socket.failed_message_count, 1); // Check error reason const failed_message = Array.from(socket.failed_messages.values())[0]; - expect(failed_message).toBeDefined(); - expect(failed_message!.reason).toBe(error_message); + assert.isDefined(failed_message); + assert.strictEqual(failed_message.reason, error_message); }); test('clear_failed_messages removes all failed messages', () => { @@ -259,12 +259,12 @@ describe('Socket', () => { socket.retry_queued_messages(); // Verify message moved to failed - expect(socket.queued_message_count).toBe(0); - expect(socket.failed_message_count).toBe(1); + assert.strictEqual(socket.queued_message_count, 0); + assert.strictEqual(socket.failed_message_count, 1); // Clear failed messages socket.clear_failed_messages(); - expect(socket.failed_message_count).toBe(0); + assert.strictEqual(socket.failed_message_count, 0); }); }); @@ -278,12 +278,12 @@ describe('Socket', () => { // Simulate unexpected close mock_socket.dispatchEvent('close'); - expect(socket.open).toBe(false); - expect(socket.status).toBe('failure'); + assert.ok(!socket.open); + assert.strictEqual(socket.status, 'failure'); // Should reconnect after delay vi.advanceTimersByTime(1000); - expect(globalThis.WebSocket).toHaveBeenCalledTimes(2); + assert.strictEqual((globalThis.WebSocket as any).mock.calls.length, 2); }); test('reconnect delay uses exponential backoff', () => { @@ -295,17 +295,17 @@ describe('Socket', () => { // Initial connect socket.connect(TEST_URLS.BASE); mock_socket.connect(); - expect(socket.status).toBe('success'); + assert.strictEqual(socket.status, 'success'); // First unexpected close mock_socket.dispatchEvent('close', {code: 1006}); - expect(socket.status).toBe('failure'); - expect(socket.reconnect_count).toBe(1); - expect(socket.current_reconnect_delay).toBe(1000); // 1000 * 1.5^0 + assert.strictEqual(socket.status, 'failure'); + assert.strictEqual(socket.reconnect_count, 1); + assert.strictEqual(socket.current_reconnect_delay, 1000); // 1000 * 1.5^0 // Trigger first reconnect vi.advanceTimersByTime(1000); - expect(globalThis.WebSocket).toHaveBeenCalledTimes(2); + assert.strictEqual((globalThis.WebSocket as any).mock.calls.length, 2); // Test subsequent reconnects with increasing delays // Clear timers between tests to avoid interference @@ -317,8 +317,8 @@ describe('Socket', () => { socket.status = 'failure'; socket.reconnect_count = 1; socket.maybe_reconnect(); - expect(socket.reconnect_count).toBe(2); - expect(socket.current_reconnect_delay).toBe(1500); // 1000 * 1.5^1 + assert.strictEqual(socket.reconnect_count, 2); + assert.strictEqual(socket.current_reconnect_delay, 1500); // 1000 * 1.5^1 // Clear timeout to avoid interference if (socket.reconnect_timeout !== null) { @@ -329,8 +329,8 @@ describe('Socket', () => { socket.status = 'failure'; socket.reconnect_count = 2; socket.maybe_reconnect(); - expect(socket.reconnect_count).toBe(3); - expect(socket.current_reconnect_delay).toBe(2250); // 1000 * 1.5^2 + assert.strictEqual(socket.reconnect_count, 3); + assert.strictEqual(socket.current_reconnect_delay, 2250); // 1000 * 1.5^2 // Test max delay cap if (socket.reconnect_timeout !== null) { @@ -339,8 +339,8 @@ describe('Socket', () => { socket.status = 'failure'; socket.reconnect_count = 14; socket.maybe_reconnect(); - expect(socket.reconnect_count).toBe(15); - expect(socket.current_reconnect_delay).toBe(30000); // Capped at max value + assert.strictEqual(socket.reconnect_count, 15); + assert.strictEqual(socket.current_reconnect_delay, 30000); // Capped at max value }); }); @@ -355,7 +355,7 @@ describe('Socket', () => { vi.advanceTimersByTime(1000); // Check ping was sent - expect(app.api.ping).toHaveBeenCalled(); + assert.ok((app.api.ping as any).mock.calls.length > 0); }); }); }); diff --git a/src/test/sortable.svelte.test.ts b/src/test/sortable.svelte.test.ts index a054ee7bb..c12d9e92c 100644 --- a/src/test/sortable.svelte.test.ts +++ b/src/test/sortable.svelte.test.ts @@ -1,15 +1,14 @@ -// @slop Claude Sonnet 3.7 - // @vitest-environment jsdom -import {test, expect, describe, beforeEach} from 'vitest'; +import {test, describe, beforeEach, assert} from 'vitest'; import {z} from 'zod'; import {Sortable, type Sorter, sort_by_text, sort_by_numeric} from '$lib/sortable.svelte.js'; import {Cell} from '$lib/cell.svelte.js'; import {UuidWithDefault, type Uuid, DatetimeNow, create_uuid} from '$lib/zod_helpers.js'; import {Frontend} from '$lib/frontend.svelte.js'; -import {monkeypatch_zzz_for_tests} from './test_helpers.ts'; + +import {monkeypatch_zzz_for_tests} from './test_helpers.js'; // Create a schema for our test cell const TestCellSchema = z.object({ @@ -82,12 +81,12 @@ describe('Sortable', () => { ); const first_sorter = sorters[0]; - expect(first_sorter).toBeDefined(); - expect(sortable.items).toBe(items); - expect(sortable.sorters).toBe(sorters); - expect(sortable.active_key).toBe(first_sorter!.key); - expect(sortable.active_sorter).toBe(first_sorter); - expect(sortable.active_sort_fn).toBe(first_sorter!.fn); + assert.isDefined(first_sorter); + assert.strictEqual(sortable.items, items); + assert.strictEqual(sortable.sorters, sorters); + assert.strictEqual(sortable.active_key, first_sorter.key); + assert.strictEqual(sortable.active_sorter, first_sorter); + assert.strictEqual(sortable.active_sort_fn, first_sorter.fn); }); test('uses default key when provided', () => { @@ -98,10 +97,10 @@ describe('Sortable', () => { ); const sorter_at_2 = sorters[2]; - expect(sorter_at_2).toBeDefined(); - expect(sortable.default_key).toBe('value'); - expect(sortable.active_key).toBe('value'); - expect(sortable.active_sorter).toBe(sorter_at_2); + assert.isDefined(sorter_at_2); + assert.strictEqual(sortable.default_key, 'value'); + assert.strictEqual(sortable.active_key, 'value'); + assert.strictEqual(sortable.active_sorter, sorter_at_2); }); test('falls back to first sorter when default key is invalid', () => { @@ -112,9 +111,9 @@ describe('Sortable', () => { ); const first_sorter = sorters[0]; - expect(first_sorter).toBeDefined(); - expect(sortable.default_key).toBe('invalid_key'); - expect(sortable.active_key).toBe(first_sorter!.key); + assert.isDefined(first_sorter); + assert.strictEqual(sortable.default_key, 'invalid_key'); + assert.strictEqual(sortable.active_key, first_sorter.key); }); test('handles empty sorters array', () => { @@ -123,9 +122,9 @@ describe('Sortable', () => { () => [], ); - expect(sortable.active_key).toBe(''); - expect(sortable.active_sorter).toBeUndefined(); - expect(sortable.active_sort_fn).toBeUndefined(); + assert.strictEqual(sortable.active_key, ''); + assert.ok(sortable.active_sorter === undefined); + assert.ok(sortable.active_sort_fn === undefined); }); }); @@ -138,8 +137,8 @@ describe('Sortable', () => { ); const first_sorter = sorters[0]; - expect(first_sorter).toBeDefined(); - expect(sortable.active_key).toBe(first_sorter!.key); + assert.isDefined(first_sorter); + assert.strictEqual(sortable.active_key, first_sorter.key); // Change sorters to new array without the current active key current_sorters = [sorters[2]!, sorters[3]!]; @@ -149,7 +148,7 @@ describe('Sortable', () => { sortable.update_active_key(); // Now the active key should match the first sorter in the new array - expect(sortable.active_key).toBe('value'); + assert.strictEqual(sortable.active_key, 'value'); }); test('preserves active key if still valid after sorters change', () => { @@ -161,27 +160,27 @@ describe('Sortable', () => { const sorter_at_1 = sorters[1]; const sorter_at_2 = sorters[2]; - expect(sorter_at_1).toBeDefined(); - expect(sorter_at_2).toBeDefined(); + assert.isDefined(sorter_at_1); + assert.isDefined(sorter_at_2); // Set active key to the second sorter - sortable.active_key = sorter_at_1!.key; + sortable.active_key = sorter_at_1.key; // Change sorters but keep the active key - current_sorters = [sorter_at_1!, sorter_at_2!]; + current_sorters = [sorter_at_1, sorter_at_2]; sortable.update_active_key(); - expect(sortable.active_key).toBe(sorter_at_1!.key); + assert.strictEqual(sortable.active_key, sorter_at_1.key); }); }); describe('sort_by_text', () => { test('sorts text values in ascending order', () => { const sorter_0 = sorters[0]; - expect(sorter_0).toBeDefined(); + assert.isDefined(sorter_0); const sortable = new Sortable( () => items, - () => [sorter_0!], + () => [sorter_0], ); const sorted = sortable.sorted_items; @@ -189,27 +188,27 @@ describe('Sortable', () => { const item1 = sorted[1]; const item2 = sorted[2]; const item3 = sorted[3]; - expect(item0).toBeDefined(); - expect(item1).toBeDefined(); - expect(item2).toBeDefined(); - expect(item3).toBeDefined(); + assert.isDefined(item0); + assert.isDefined(item1); + assert.isDefined(item2); + assert.isDefined(item3); - expect(item0!.name).toBe('Apple'); - expect(item1!.name).toBe('Apple'); - expect(item2!.name).toBe('Banana'); - expect(item3!.name).toBe('Cherry'); + assert.strictEqual(item0.name, 'Apple'); + assert.strictEqual(item1.name, 'Apple'); + assert.strictEqual(item2.name, 'Banana'); + assert.strictEqual(item3.name, 'Cherry'); // Verify that items with the same name are sorted by cid as fallback - expect(item0!.cid).toBe(40); // First "Apple" has higher cid - expect(item1!.cid).toBe(10); // Second "Apple" has lower cid + assert.strictEqual(item0.cid, 40); // First "Apple" has higher cid + assert.strictEqual(item1.cid, 10); // Second "Apple" has lower cid }); test('sorts text values in descending order', () => { const sorter_1 = sorters[1]; - expect(sorter_1).toBeDefined(); + assert.isDefined(sorter_1); const sortable = new Sortable( () => items, - () => [sorter_1!], + () => [sorter_1], ); const sorted = sortable.sorted_items; @@ -217,29 +216,29 @@ describe('Sortable', () => { const item1 = sorted[1]; const item2 = sorted[2]; const item3 = sorted[3]; - expect(item0).toBeDefined(); - expect(item1).toBeDefined(); - expect(item2).toBeDefined(); - expect(item3).toBeDefined(); + assert.isDefined(item0); + assert.isDefined(item1); + assert.isDefined(item2); + assert.isDefined(item3); - expect(item0!.name).toBe('Cherry'); - expect(item1!.name).toBe('Banana'); - expect(item2!.name).toBe('Apple'); - expect(item3!.name).toBe('Apple'); + assert.strictEqual(item0.name, 'Cherry'); + assert.strictEqual(item1.name, 'Banana'); + assert.strictEqual(item2.name, 'Apple'); + assert.strictEqual(item3.name, 'Apple'); // Verify that items with the same name are sorted by cid as fallback - expect(item2!.cid).toBe(40); // First "Apple" has higher cid - expect(item3!.cid).toBe(10); // Second "Apple" has lower cid + assert.strictEqual(item2.cid, 40); // First "Apple" has higher cid + assert.strictEqual(item3.cid, 10); // Second "Apple" has lower cid }); }); describe('sort_by_numeric', () => { test('sorts numeric values in ascending order', () => { const sorter_2 = sorters[2]; - expect(sorter_2).toBeDefined(); + assert.isDefined(sorter_2); const sortable = new Sortable( () => items, - () => [sorter_2!], + () => [sorter_2], ); const sorted = sortable.sorted_items; @@ -247,23 +246,23 @@ describe('Sortable', () => { const item1 = sorted[1]; const item2 = sorted[2]; const item3 = sorted[3]; - expect(item0).toBeDefined(); - expect(item1).toBeDefined(); - expect(item2).toBeDefined(); - expect(item3).toBeDefined(); - - expect(item0!.value).toBe(5); - expect(item1!.value).toBe(10); - expect(item2!.value).toBe(15); - expect(item3!.value).toBe(20); + assert.isDefined(item0); + assert.isDefined(item1); + assert.isDefined(item2); + assert.isDefined(item3); + + assert.strictEqual(item0.value, 5); + assert.strictEqual(item1.value, 10); + assert.strictEqual(item2.value, 15); + assert.strictEqual(item3.value, 20); }); test('sorts numeric values in descending order', () => { const sorter_3 = sorters[3]; - expect(sorter_3).toBeDefined(); + assert.isDefined(sorter_3); const sortable = new Sortable( () => items, - () => [sorter_3!], + () => [sorter_3], ); const sorted = sortable.sorted_items; @@ -271,15 +270,15 @@ describe('Sortable', () => { const item1 = sorted[1]; const item2 = sorted[2]; const item3 = sorted[3]; - expect(item0).toBeDefined(); - expect(item1).toBeDefined(); - expect(item2).toBeDefined(); - expect(item3).toBeDefined(); - - expect(item0!.value).toBe(20); - expect(item1!.value).toBe(15); - expect(item2!.value).toBe(10); - expect(item3!.value).toBe(5); + assert.isDefined(item0); + assert.isDefined(item1); + assert.isDefined(item2); + assert.isDefined(item3); + + assert.strictEqual(item0.value, 20); + assert.strictEqual(item1.value, 15); + assert.strictEqual(item2.value, 10); + assert.strictEqual(item3.value, 5); }); test('maintains stable sort order with equal values using cid', () => { @@ -300,14 +299,14 @@ describe('Sortable', () => { const item0 = sorted[0]; const item1 = sorted[1]; const item2 = sorted[2]; - expect(item0).toBeDefined(); - expect(item1).toBeDefined(); - expect(item2).toBeDefined(); + assert.isDefined(item0); + assert.isDefined(item1); + assert.isDefined(item2); // Items with equal values should be sorted by cid - expect(item0!.cid).toBe(300); - expect(item1!.cid).toBe(200); - expect(item2!.cid).toBe(100); + assert.strictEqual(item0.cid, 300); + assert.strictEqual(item1.cid, 200); + assert.strictEqual(item2.cid, 100); }); }); @@ -321,7 +320,7 @@ describe('Sortable', () => { ); // Start with 4 items - expect(sortable.sorted_items.length).toBe(4); + assert.strictEqual(sortable.sorted_items.length, 4); // Add a new item const new_item = new TestCell(app, create_uuid(), 'Dragonfruit', 25, 50); @@ -330,8 +329,8 @@ describe('Sortable', () => { current_items = [...current_items, new_item]; // Now we should see 5 items - expect(sortable.sorted_items.length).toBe(5); - expect(sortable.sorted_items.some((item) => item.cid === 50)).toBe(true); + assert.strictEqual(sortable.sorted_items.length, 5); + assert.ok(sortable.sorted_items.some((item) => item.cid === 50)); }); test('updates when active_key changes', () => { @@ -341,19 +340,19 @@ describe('Sortable', () => { ); const first_item = sortable.sorted_items[0]; - expect(first_item).toBeDefined(); + assert.isDefined(first_item); // Initially sorted by name (first sorter) - expect(first_item!.name).toBe('Apple'); + assert.strictEqual(first_item.name, 'Apple'); // Change to sort by value sortable.active_key = 'value'; const first_item_after = sortable.sorted_items[0]; - expect(first_item_after).toBeDefined(); + assert.isDefined(first_item_after); // Should now be sorted by value - expect(first_item_after!.value).toBe(5); + assert.strictEqual(first_item_after.value, 5); }); }); }); diff --git a/src/test/test_helpers.ts b/src/test/test_helpers.ts index db85fca0e..1b64ef386 100644 --- a/src/test/test_helpers.ts +++ b/src/test/test_helpers.ts @@ -1,46 +1,5 @@ -import type {Frontend} from '../lib/frontend.svelte.ts'; -import type {DiskfilePath} from '../lib/diskfile_types.ts'; - -// TODO these aren't used, should they be for improved type safety? - -/** - * Vitest's `expects` does not narrow types, this does for falsy values. - * - * @see https://github.com/vitest-dev/vitest/issues/2883 - */ -export const expect_ok: (value: T, message?: string) => asserts value = (value, message) => { - if (!value) { - throw new Error(message ?? 'Expected value to be truthy'); - } -}; - -/** - * Vitest's `expects` does not narrow types, this does for undefined values. - * - * @see https://github.com/vitest-dev/vitest/issues/2883 - */ -export const expect_defined: (value: T | undefined, message?: string) => asserts value is T = ( - value, - message, -) => { - if (value === undefined) { - throw new Error(message ?? 'Expected value to be defined'); - } -}; - -/** - * Vitest's `expects` does not narrow types, this does for nullish values. - * - * @see https://github.com/vitest-dev/vitest/issues/2883 - */ -export const expect_nonnullish: ( - value: T | undefined | null, - message?: string, -) => asserts value is T = (value, message) => { - if (value == null) { - throw new Error(message ?? 'Expected value to be non-nullish'); - } -}; +import type {Frontend} from '../lib/frontend.svelte.js'; +import type {DiskfilePath} from '../lib/diskfile_types.js'; // TODO improve this pattern /** diff --git a/src/test/workspace.svelte.test.ts b/src/test/workspace.svelte.test.ts new file mode 100644 index 000000000..8af41bad3 --- /dev/null +++ b/src/test/workspace.svelte.test.ts @@ -0,0 +1,197 @@ +// @vitest-environment jsdom + +import {test, beforeEach, describe, assert} from 'vitest'; + +import {Workspace} from '$lib/workspace.svelte.js'; +import {Workspaces} from '$lib/workspaces.svelte.js'; +import {Frontend} from '$lib/frontend.svelte.js'; +import {DiskfileDirectoryPath} from '$lib/diskfile_types.js'; +import type {Uuid} from '$lib/zod_helpers.js'; + +import {monkeypatch_zzz_for_tests} from './test_helpers.js'; + +let app: Frontend; + +beforeEach(() => { + app = monkeypatch_zzz_for_tests(new Frontend()); +}); + +describe('Workspace', () => { + test('initializes with path and defaults', () => { + const path = DiskfileDirectoryPath.parse('/some/dir/'); + const workspace = new Workspace({app, json: {path}}); + + assert.isDefined(workspace.id); + assert.strictEqual(workspace.path, path); + assert.strictEqual(workspace.name, ''); + assert.strictEqual(workspace.opened_at, ''); + }); + + test('initializes with provided json', () => { + const path = DiskfileDirectoryPath.parse('/home/user/project/'); + const workspace = new Workspace({ + app, + json: { + path, + name: 'project', + opened_at: '2026-04-08T00:00:00.000Z', + }, + }); + + assert.strictEqual(workspace.path, path); + assert.strictEqual(workspace.name, 'project'); + assert.strictEqual(workspace.opened_at, '2026-04-08T00:00:00.000Z'); + }); + + test('registers in cell registry', () => { + const path = DiskfileDirectoryPath.parse('/some/dir/'); + const workspace = new Workspace({app, json: {path}}); + assert.ok(app.cell_registry.all.has(workspace.id)); + }); + + test('serializes to json', () => { + const path = DiskfileDirectoryPath.parse('/some/dir/'); + const workspace = new Workspace({ + app, + json: {path, name: 'dir', opened_at: '2026-01-01T00:00:00.000Z'}, + }); + + const json = workspace.json; + assert.strictEqual(json.path, path); + assert.strictEqual(json.name, 'dir'); + assert.strictEqual(json.opened_at, '2026-01-01T00:00:00.000Z'); + }); +}); + +describe('Workspaces', () => { + test('initializes empty', () => { + const workspaces = new Workspaces({app}); + + assert.strictEqual(workspaces.items.by_id.size, 0); + assert.strictEqual(workspaces.active_id, null); + assert.strictEqual(workspaces.active, undefined); + }); + + test('add creates a workspace and auto-activates first', () => { + const workspaces = new Workspaces({app}); + const path = DiskfileDirectoryPath.parse('/home/user/project/'); + + const workspace = workspaces.add({ + path, + name: 'project', + opened_at: '2026-01-01T00:00:00.000Z', + }); + + assert.strictEqual(workspaces.items.by_id.size, 1); + assert.strictEqual(workspaces.active_id, workspace.id); + assert.strictEqual(workspaces.active, workspace); + }); + + test('add deduplicates by path', () => { + const workspaces = new Workspaces({app}); + const path = DiskfileDirectoryPath.parse('/home/user/project/'); + + const first = workspaces.add({path, name: 'project', opened_at: '2026-01-01T00:00:00.000Z'}); + const second = workspaces.add({path, name: 'project', opened_at: '2026-02-01T00:00:00.000Z'}); + + assert.strictEqual(first, second); + assert.strictEqual(workspaces.items.by_id.size, 1); + }); + + test('add multiple workspaces', () => { + const workspaces = new Workspaces({app}); + const path_a = DiskfileDirectoryPath.parse('/path/a/'); + const path_b = DiskfileDirectoryPath.parse('/path/b/'); + + const a = workspaces.add({path: path_a, name: 'a', opened_at: ''}); + const b = workspaces.add({path: path_b, name: 'b', opened_at: ''}); + + assert.strictEqual(workspaces.items.by_id.size, 2); + // First added is auto-activated + assert.strictEqual(workspaces.active_id, a.id); + assert.notStrictEqual(a.id, b.id); + }); + + test('remove deletes workspace and updates active_id', () => { + const workspaces = new Workspaces({app}); + const path_a = DiskfileDirectoryPath.parse('/path/a/'); + const path_b = DiskfileDirectoryPath.parse('/path/b/'); + + const a = workspaces.add({path: path_a, name: 'a', opened_at: ''}); + workspaces.add({path: path_b, name: 'b', opened_at: ''}); + + assert.strictEqual(workspaces.active_id, a.id); + + workspaces.remove(a.id); + + assert.strictEqual(workspaces.items.by_id.size, 1); + // active_id should move to remaining workspace + assert.notStrictEqual(workspaces.active_id, null); + assert.notStrictEqual(workspaces.active_id, a.id); + }); + + test('remove last workspace sets active_id to null', () => { + const workspaces = new Workspaces({app}); + const path = DiskfileDirectoryPath.parse('/path/only/'); + + const only = workspaces.add({path, name: 'only', opened_at: ''}); + workspaces.remove(only.id); + + assert.strictEqual(workspaces.items.by_id.size, 0); + assert.strictEqual(workspaces.active_id, null); + }); + + test('get_by_path returns workspace or undefined', () => { + const workspaces = new Workspaces({app}); + const path = DiskfileDirectoryPath.parse('/home/user/project/'); + + assert.strictEqual(workspaces.get_by_path(path), undefined); + + const workspace = workspaces.add({path, name: 'project', opened_at: ''}); + assert.strictEqual(workspaces.get_by_path(path), workspace); + }); + + test('activate changes active workspace', () => { + const workspaces = new Workspaces({app}); + const path_a = DiskfileDirectoryPath.parse('/path/a/'); + const path_b = DiskfileDirectoryPath.parse('/path/b/'); + + const a = workspaces.add({path: path_a, name: 'a', opened_at: ''}); + const b = workspaces.add({path: path_b, name: 'b', opened_at: ''}); + + assert.strictEqual(workspaces.active_id, a.id); + + workspaces.activate(b.id); + assert.strictEqual(workspaces.active_id, b.id); + assert.strictEqual(workspaces.active, b); + }); + + test('activate with unknown id is a no-op', () => { + const workspaces = new Workspaces({app}); + const path = DiskfileDirectoryPath.parse('/path/a/'); + + const a = workspaces.add({path, name: 'a', opened_at: ''}); + workspaces.activate('nonexistent-id' as Uuid); + + assert.strictEqual(workspaces.active_id, a.id); + }); + + test('initializes from json with items', () => { + const path_a = DiskfileDirectoryPath.parse('/path/a/'); + const path_b = DiskfileDirectoryPath.parse('/path/b/'); + + const workspaces = new Workspaces({ + app, + json: { + items: [ + {path: path_a, name: 'a', opened_at: '2026-01-01T00:00:00.000Z'}, + {path: path_b, name: 'b', opened_at: '2026-02-01T00:00:00.000Z'}, + ], + }, + }); + + assert.strictEqual(workspaces.items.by_id.size, 2); + assert.isDefined(workspaces.get_by_path(path_a)); + assert.isDefined(workspaces.get_by_path(path_b)); + }); +}); diff --git a/src/test/xml.test.ts b/src/test/xml.test.ts index 2342b37c0..d75edfc34 100644 --- a/src/test/xml.test.ts +++ b/src/test/xml.test.ts @@ -1,8 +1,6 @@ -// @slop Claude Sonnet 4 - // @vitest-environment jsdom -import {describe, test, expect} from 'vitest'; +import {describe, test, assert} from 'vitest'; import {z} from 'zod'; import { @@ -18,71 +16,71 @@ import { const uuid_regex = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/; const test_uuid_a = '123e4567-e89b-12d3-a456-426614174000'; -const expect_parse_success = (schema: z.ZodType, input: unknown, expected?: T) => { +const assert_parse_success = (schema: z.ZodType, input: unknown, expected?: T) => { const result = schema.safeParse(input); - expect(result.success).toBe(true); - if (result.success && expected !== undefined) { - expect(result.data).toEqual(expected); + assert.ok(result.success); + if (expected !== undefined) { + assert.deepEqual(result.data, expected); } - return result.success ? result.data : undefined; + return result.data; }; -const expect_parse_failure = (schema: z.ZodType, input: unknown) => { +const assert_parse_failure = (schema: z.ZodType, input: unknown) => { const result = schema.safeParse(input); - expect(result.success).toBe(false); - return result.success ? undefined : result.error; + assert.ok(!result.success); + return result.error; }; describe('XmlAttributeKey', () => { test('accepts valid attribute names', () => { const valid_keys = ['attr', 'data-test', 'xml:lang', 'ns:element', 'class']; for (const key of valid_keys) { - expect_parse_success(XmlAttributeKey, key, key); + assert_parse_success(XmlAttributeKey, key, key); } }); test('trims whitespace', () => { - expect_parse_success(XmlAttributeKey, ' attr ', 'attr'); - expect_parse_success(XmlAttributeKey, '\t class \n', 'class'); + assert_parse_success(XmlAttributeKey, ' attr ', 'attr'); + assert_parse_success(XmlAttributeKey, '\t class \n', 'class'); }); test('rejects empty strings after trimming', () => { - expect_parse_failure(XmlAttributeKey, ''); - expect_parse_failure(XmlAttributeKey, ' '); - expect_parse_failure(XmlAttributeKey, '\t\n'); + assert_parse_failure(XmlAttributeKey, ''); + assert_parse_failure(XmlAttributeKey, ' '); + assert_parse_failure(XmlAttributeKey, '\t\n'); }); test('rejects non-strings', () => { - expect_parse_failure(XmlAttributeKey, null); - expect_parse_failure(XmlAttributeKey, undefined); - expect_parse_failure(XmlAttributeKey, 123); - expect_parse_failure(XmlAttributeKey, {}); + assert_parse_failure(XmlAttributeKey, null); + assert_parse_failure(XmlAttributeKey, undefined); + assert_parse_failure(XmlAttributeKey, 123); + assert_parse_failure(XmlAttributeKey, {}); }); test('handles special characters', () => { - expect_parse_success(XmlAttributeKey, 'data-123'); - expect_parse_success(XmlAttributeKey, 'xml_test'); - expect_parse_success(XmlAttributeKey, 'attr.value'); + assert_parse_success(XmlAttributeKey, 'data-123'); + assert_parse_success(XmlAttributeKey, 'xml_test'); + assert_parse_success(XmlAttributeKey, 'attr.value'); }); test('handles unicode', () => { - expect_parse_success(XmlAttributeKey, 'атрибут'); - expect_parse_success(XmlAttributeKey, '属性'); + assert_parse_success(XmlAttributeKey, 'атрибут'); + assert_parse_success(XmlAttributeKey, '属性'); }); }); describe('XmlAttributeKeyWithDefault', () => { test('provides default value', () => { - expect_parse_success(XmlAttributeKeyWithDefault, undefined, 'attr'); + assert_parse_success(XmlAttributeKeyWithDefault, undefined, 'attr'); }); test('accepts valid strings', () => { - expect_parse_success(XmlAttributeKeyWithDefault, 'custom', 'custom'); + assert_parse_success(XmlAttributeKeyWithDefault, 'custom', 'custom'); }); test('rejects empty strings', () => { - expect_parse_failure(XmlAttributeKeyWithDefault, ''); - expect_parse_failure(XmlAttributeKeyWithDefault, ' '); + assert_parse_failure(XmlAttributeKeyWithDefault, ''); + assert_parse_failure(XmlAttributeKeyWithDefault, ' '); }); }); @@ -90,36 +88,36 @@ describe('XmlAttributeValue', () => { test('accepts any string', () => { const values = ['', 'text', '123', 'true', 'special chars: <>&"\'']; for (const value of values) { - expect_parse_success(XmlAttributeValue, value, value); + assert_parse_success(XmlAttributeValue, value, value); } }); test('accepts unicode', () => { - expect_parse_success(XmlAttributeValue, '测试值'); - expect_parse_success(XmlAttributeValue, 'значение'); - expect_parse_success(XmlAttributeValue, '🔥💯'); + assert_parse_success(XmlAttributeValue, '测试值'); + assert_parse_success(XmlAttributeValue, 'значение'); + assert_parse_success(XmlAttributeValue, '🔥💯'); }); test('accepts very long strings', () => { const long_value = 'a'.repeat(10000); - expect_parse_success(XmlAttributeValue, long_value, long_value); + assert_parse_success(XmlAttributeValue, long_value, long_value); }); test('rejects non-strings', () => { - expect_parse_failure(XmlAttributeValue, null); - expect_parse_failure(XmlAttributeValue, undefined); - expect_parse_failure(XmlAttributeValue, 123); - expect_parse_failure(XmlAttributeValue, []); + assert_parse_failure(XmlAttributeValue, null); + assert_parse_failure(XmlAttributeValue, undefined); + assert_parse_failure(XmlAttributeValue, 123); + assert_parse_failure(XmlAttributeValue, []); }); }); describe('XmlAttributeValueWithDefault', () => { test('provides empty string default', () => { - expect_parse_success(XmlAttributeValueWithDefault, undefined, ''); + assert_parse_success(XmlAttributeValueWithDefault, undefined, ''); }); test('accepts valid strings', () => { - expect_parse_success(XmlAttributeValueWithDefault, 'test', 'test'); + assert_parse_success(XmlAttributeValueWithDefault, 'test', 'test'); }); }); @@ -131,80 +129,80 @@ describe('XmlAttribute', () => { }; test('accepts complete valid attributes', () => { - expect_parse_success(XmlAttribute, valid_base_attr); + assert_parse_success(XmlAttribute, valid_base_attr); }); test('requires all properties', () => { - expect_parse_failure(XmlAttribute, {id: test_uuid_a, key: 'class'}); - expect_parse_failure(XmlAttribute, {id: test_uuid_a, value: 'test'}); - expect_parse_failure(XmlAttribute, {key: 'class', value: 'test'}); + assert_parse_failure(XmlAttribute, {id: test_uuid_a, key: 'class'}); + assert_parse_failure(XmlAttribute, {id: test_uuid_a, value: 'test'}); + assert_parse_failure(XmlAttribute, {key: 'class', value: 'test'}); }); test('validates uuid format', () => { - expect_parse_failure(XmlAttribute, {...valid_base_attr, id: 'invalid-uuid'}); - expect_parse_failure(XmlAttribute, {...valid_base_attr, id: ''}); + assert_parse_failure(XmlAttribute, {...valid_base_attr, id: 'invalid-uuid'}); + assert_parse_failure(XmlAttribute, {...valid_base_attr, id: ''}); }); test('validates key constraints', () => { - expect_parse_failure(XmlAttribute, {...valid_base_attr, key: ''}); - expect_parse_failure(XmlAttribute, {...valid_base_attr, key: ' '}); + assert_parse_failure(XmlAttribute, {...valid_base_attr, key: ''}); + assert_parse_failure(XmlAttribute, {...valid_base_attr, key: ' '}); }); test('strict mode rejects extra properties', () => { const attr_with_extra = {...valid_base_attr, extra: 'property'}; - expect_parse_failure(XmlAttribute, attr_with_extra); + assert_parse_failure(XmlAttribute, attr_with_extra); }); test('accepts empty values', () => { - expect_parse_success(XmlAttribute, {...valid_base_attr, value: ''}); + assert_parse_success(XmlAttribute, {...valid_base_attr, value: ''}); }); }); describe('XmlAttributeWithDefaults', () => { test('accepts complete attributes', () => { const attr = {id: test_uuid_a, key: 'id', value: 'main'}; - expect_parse_success(XmlAttributeWithDefaults, attr); + assert_parse_success(XmlAttributeWithDefaults, attr); }); test('generates uuid when missing', () => { const attr_no_id = {key: 'class', value: 'test'}; - const result = expect_parse_success(XmlAttributeWithDefaults, attr_no_id); - expect(result?.id).toMatch(uuid_regex); + const result = assert_parse_success(XmlAttributeWithDefaults, attr_no_id); + assert.match(result.id, uuid_regex); }); test('applies key default when missing', () => { const attr_no_key = {id: test_uuid_a, value: 'test'}; - const result = expect_parse_success(XmlAttributeWithDefaults, attr_no_key); - expect(result?.key).toBe('attr'); + const result = assert_parse_success(XmlAttributeWithDefaults, attr_no_key); + assert.strictEqual(result.key, 'attr'); }); test('applies value default when missing', () => { const attr_no_value = {id: test_uuid_a, key: 'disabled'}; - const result = expect_parse_success(XmlAttributeWithDefaults, attr_no_value); - expect(result?.value).toBe(''); + const result = assert_parse_success(XmlAttributeWithDefaults, attr_no_value); + assert.strictEqual(result.value, ''); }); test('applies all defaults when minimal input', () => { - const result = expect_parse_success(XmlAttributeWithDefaults, {}); - expect(result?.id).toMatch(uuid_regex); - expect(result?.key).toBe('attr'); - expect(result?.value).toBe(''); + const result = assert_parse_success(XmlAttributeWithDefaults, {}); + assert.match(result.id, uuid_regex); + assert.strictEqual(result.key, 'attr'); + assert.strictEqual(result.value, ''); }); test('handles undefined id explicitly', () => { const attr = {id: undefined, key: 'test', value: 'value'}; - const result = expect_parse_success(XmlAttributeWithDefaults, attr); - expect(result?.id).toMatch(uuid_regex); + const result = assert_parse_success(XmlAttributeWithDefaults, attr); + assert.match(result.id, uuid_regex); }); test('strict mode rejects extra properties', () => { const attr_with_extra = {id: test_uuid_a, key: 'test', value: 'val', extra: 'prop'}; - expect_parse_failure(XmlAttributeWithDefaults, attr_with_extra); + assert_parse_failure(XmlAttributeWithDefaults, attr_with_extra); }); test('validates constraints after applying defaults', () => { const attr_empty_key = {id: test_uuid_a, key: '', value: 'test'}; - expect_parse_failure(XmlAttributeWithDefaults, attr_empty_key); + assert_parse_failure(XmlAttributeWithDefaults, attr_empty_key); }); }); @@ -213,7 +211,7 @@ describe('XML use cases', () => { const boolean_attrs = ['disabled', 'checked', 'selected', 'hidden']; for (const key of boolean_attrs) { const attr = {id: test_uuid_a, key, value: ''}; - expect_parse_success(XmlAttributeWithDefaults, attr); + assert_parse_success(XmlAttributeWithDefaults, attr); } }); @@ -221,7 +219,7 @@ describe('XML use cases', () => { const ns_attrs = ['xml:lang', 'xmlns:foo', 'xsi:type', 'data:custom']; for (const key of ns_attrs) { const attr = {id: test_uuid_a, key, value: 'test'}; - expect_parse_success(XmlAttributeWithDefaults, attr); + assert_parse_success(XmlAttributeWithDefaults, attr); } }); @@ -234,7 +232,7 @@ describe('XML use cases', () => { ]; for (const value of complex_values) { const attr = {id: test_uuid_a, key: 'style', value}; - expect_parse_success(XmlAttributeWithDefaults, attr); + assert_parse_success(XmlAttributeWithDefaults, attr); } }); @@ -244,14 +242,14 @@ describe('XML use cases', () => { {key: 'class', value: 'container'}, {key: 'id', value: 'main'}, ]; - const result = expect_parse_success(AttributeArray, attrs); - expect(result).toHaveLength(2); - const first_attr = result?.[0]; - const second_attr = result?.[1]; - expect(first_attr).toBeDefined(); - expect(second_attr).toBeDefined(); - expect(first_attr!.id).toMatch(uuid_regex); - expect(second_attr!.id).toMatch(uuid_regex); + const result = assert_parse_success(AttributeArray, attrs); + assert.strictEqual(result.length, 2); + const first_attr = result[0]; + const second_attr = result[1]; + assert.isDefined(first_attr); + assert.isDefined(second_attr); + assert.match(first_attr.id, uuid_regex); + assert.match(second_attr.id, uuid_regex); }); test('integration with record of attributes', () => { @@ -260,24 +258,24 @@ describe('XML use cases', () => { class_attr: {key: 'class', value: 'container'}, id_attr: {key: 'id', value: 'main'}, }; - expect_parse_success(AttributeRecord, attrs); + assert_parse_success(AttributeRecord, attrs); }); }); describe('error handling', () => { test('provides meaningful error messages', () => { const invalid_attr = {id: 'not-uuid', key: '', value: 123}; - const error = expect_parse_failure(XmlAttributeWithDefaults, invalid_attr); + const error = assert_parse_failure(XmlAttributeWithDefaults, invalid_attr); - const issue_paths = error?.issues.map((i) => i.path.join('.')) || []; - expect(issue_paths).toContain('id'); - expect(issue_paths).toContain('key'); - expect(issue_paths).toContain('value'); + const issue_paths = error.issues.map((i) => i.path.join('.')); + assert.include(issue_paths, 'id'); + assert.include(issue_paths, 'key'); + assert.include(issue_paths, 'value'); }); test('handles type coercion failures gracefully', () => { - expect_parse_failure(XmlAttributeWithDefaults, null); - expect_parse_failure(XmlAttributeWithDefaults, 'string'); - expect_parse_failure(XmlAttributeWithDefaults, []); + assert_parse_failure(XmlAttributeWithDefaults, null); + assert_parse_failure(XmlAttributeWithDefaults, 'string'); + assert_parse_failure(XmlAttributeWithDefaults, []); }); }); diff --git a/svelte.config.js b/svelte.config.js index 5a210d080..07fa5a96b 100644 --- a/svelte.config.js +++ b/svelte.config.js @@ -1,3 +1,4 @@ +import adapter from '@sveltejs/adapter-static'; import {vitePreprocess} from '@sveltejs/vite-plugin-svelte'; import {svelte_preprocess_mdz} from '@fuzdev/fuz_ui/svelte_preprocess_mdz.js'; import {svelte_preprocess_fuz_code} from '@fuzdev/fuz_code/svelte_preprocess_fuz_code.js'; @@ -5,19 +6,6 @@ import {svelte_preprocess_fuz_code} from '@fuzdev/fuz_code/svelte_preprocess_fuz // import {create_csp_directives} from '@fuzdev/fuz_ui/csp.js'; // import {csp_trusted_sources_of_fuzdev} from '@fuzdev/fuz_ui/csp_of_fuzdev.js'; -// Dynamically import adapter based on the ZZZ_BUILD env var. -// ZZZ_BUILD=node for production Node server, otherwise static for GitHub Pages. -const build_mode_raw = process.env.ZZZ_BUILD; -// 'static' | 'node' -const build_mode = build_mode_raw === 'node' ? 'node' : 'static'; - -const adapter_module = - build_mode === 'node' - ? await import('@sveltejs/adapter-node') - : await import('@sveltejs/adapter-static'); - -const adapter = adapter_module.default; - /** @type {import('@sveltejs/kit').Config} */ export default { preprocess: [svelte_preprocess_mdz(), svelte_preprocess_fuz_code(), vitePreprocess()], @@ -25,7 +13,7 @@ export default { vitePlugin: {inspector: true}, kit: { adapter: adapter(), - paths: {relative: false}, // use root-absolute paths for SSR path comparison: https://kit.svelte.dev/docs/configuration#paths + paths: {relative: false}, // use root-absolute paths for SSR path comparison: https://svelte.dev/docs/kit/configuration#paths alias: {$routes: 'src/routes', '@fuzdev/zzz': 'src/lib'}, // csp: { // directives: create_csp_directives({ diff --git a/test/integration/account_tests.ts b/test/integration/account_tests.ts new file mode 100644 index 000000000..36657dca3 --- /dev/null +++ b/test/integration/account_tests.ts @@ -0,0 +1,415 @@ +/** + * Account management integration tests. + * + * Tests login, logout, password change, session list, and session revocation + * routes. Separated from tests.ts and bearer_tests.ts to keep modules focused. + * + * These tests create dedicated users and sessions to avoid interfering with + * the main test admin account. Most tests are cross-backend — route paths + * differ but behavior is the same. + */ + +import {type BackendConfig, TEST_DATABASE_URL} from './config.ts'; +import {assert_equal, post_rpc, sql_escape} from './test_helpers.ts'; +import type {TestResult} from './tests.ts'; + +/** POST JSON to an account route. */ +const post_account = async ( + config: BackendConfig, + path: string, + body: unknown, + options?: {cookie?: string}, +): Promise<{status: number; body: unknown; set_cookies: string[]}> => { + const headers: Record = {'Content-Type': 'application/json'}; + if (options?.cookie) headers['Cookie'] = options.cookie; + const res = await fetch(`${config.base_url}${path}`, { + method: 'POST', + headers, + body: JSON.stringify(body), + }); + const json = await res.json(); + const set_cookies = res.headers.getSetCookie(); + return {status: res.status, body: json, set_cookies}; +}; + +/** GET an account route. */ +const get_account = async ( + config: BackendConfig, + path: string, + options?: {cookie?: string}, +): Promise<{status: number; body: unknown}> => { + const headers: Record = {}; + if (options?.cookie) headers['Cookie'] = options.cookie; + const res = await fetch(`${config.base_url}${path}`, { + method: 'GET', + headers, + }); + const json = await res.json(); + return {status: res.status, body: json}; +}; + +/** + * Create a test user via psql with a known password. + * + * Returns the account ID. Uses argon2 hash from the Rust bootstrap + * (the password is 'test-login-password-123'). + */ +const create_test_user = async ( + username: string, + password_hash: string, +): Promise => { + const account_id = crypto.randomUUID(); + const actor_id = crypto.randomUUID(); + const sql = ` + INSERT INTO account (id, username, password_hash) + VALUES ('${sql_escape(account_id)}', '${sql_escape(username)}', '${sql_escape(password_hash)}') + ON CONFLICT DO NOTHING; + + INSERT INTO actor (id, account_id, name) + VALUES ('${sql_escape(actor_id)}', '${sql_escape(account_id)}', '${sql_escape(username)}') + ON CONFLICT DO NOTHING; + `; + const cmd = new Deno.Command('psql', { + args: [TEST_DATABASE_URL, '-c', sql], + stdout: 'null', + stderr: 'piped', + }); + const child = cmd.spawn(); + const status = await child.status; + if (!status.success) { + const stderr_text = (await new Response(child.stderr).text()).trim(); + throw new Error(`create_test_user failed: ${stderr_text}`); + } + await child.stderr.cancel(); + return account_id; +}; + +/** + * Hash a password with argon2 via the `argon2` CLI tool. + * + * Falls back to a pre-computed hash if the CLI is not available. + * For test determinism, we use the Rust backend's own argon2 by + * logging in and trusting the hash from bootstrap. + */ + +// Pre-computed argon2id hash for 'test-login-password-123' — only used if +// we need to create accounts directly via SQL. The hash is valid argon2id. +// Generated offline with: echo -n 'test-login-password-123' | argon2 ... +// Actually we can't pre-compute because salt varies. Instead, we'll use +// the login test to verify the bootstrap admin account which already has +// a known password. + +// -- Test definitions --------------------------------------------------------- + +type TestFn = (config: BackendConfig) => Promise; + +const account_test_list: ReadonlyArray<{ + name: string; + fn: TestFn; + skip?: readonly string[]; +}> = [ + { + name: 'login_success', + fn: async (config) => { + // The bootstrap admin account has a known password — use it to test login + const paths = config.account_paths; + if (!paths) throw new Error('account_paths not configured'); + + const {status, body, set_cookies} = await post_account(config, paths.login, { + username: config.auth!.username, + password: config.auth!.password, + }); + assert_equal(status, 200, 'status'); + const r = body as Record; + assert_equal(r.ok, true, 'ok'); + // Should set a session cookie + assert_equal(set_cookies.length > 0, true, 'set session cookie'); + const has_session_cookie = set_cookies.some( + (c) => c.startsWith('fuz_session=') || c.startsWith('zzz_session='), + ); + assert_equal(has_session_cookie, true, 'session cookie present'); + }, + }, + { + name: 'login_invalid_password', + fn: async (config) => { + const paths = config.account_paths; + if (!paths) throw new Error('account_paths not configured'); + + const {status, body} = await post_account(config, paths.login, { + username: config.auth!.username, + password: 'wrong-password-definitely', + }); + assert_equal(status, 401, 'status'); + const r = body as Record; + assert_equal(r.error, 'invalid_credentials', 'error'); + }, + }, + { + name: 'login_nonexistent_user', + fn: async (config) => { + const paths = config.account_paths; + if (!paths) throw new Error('account_paths not configured'); + + const {status, body} = await post_account(config, paths.login, { + username: `nonexistent_user_${Date.now()}`, + password: 'some-password-here-123', + }); + assert_equal(status, 401, 'status'); + const r = body as Record; + assert_equal(r.error, 'invalid_credentials', 'error'); + }, + }, + { + name: 'logout_clears_session', + fn: async (config) => { + const paths = config.account_paths; + if (!paths) throw new Error('account_paths not configured'); + + // Login first to get a session cookie + const login_res = await post_account(config, paths.login, { + username: config.auth!.username, + password: config.auth!.password, + }); + assert_equal(login_res.status, 200, 'login status'); + const cookie = login_res.set_cookies.map((c) => c.split(';')[0]).join('; '); + + // Verify cookie works + const verify_res = await post_rpc( + config, + JSON.stringify({jsonrpc: '2.0', id: 'lo-v1', method: 'ping'}), + {cookie}, + ); + assert_equal(verify_res.status, 200, 'cookie works before logout'); + + // Logout + const logout_res = await post_account(config, paths.logout, {}, {cookie}); + assert_equal(logout_res.status, 200, 'logout status'); + const lr = logout_res.body as Record; + assert_equal(lr.ok, true, 'logout ok'); + + // Verify cookie no longer works for authenticated actions + const post_logout = await post_rpc( + config, + JSON.stringify({jsonrpc: '2.0', id: 'lo-v2', method: 'workspace_list'}), + {cookie}, + ); + assert_equal(post_logout.status, 401, 'cookie fails after logout'); + }, + }, + { + name: 'logout_unauthenticated', + fn: async (config) => { + const paths = config.account_paths; + if (!paths) throw new Error('account_paths not configured'); + + const {status} = await post_account(config, paths.logout, {}); + assert_equal(status, 401, 'unauthenticated logout → 401'); + }, + }, + { + name: 'password_change_revokes_all', + fn: async (config) => { + const paths = config.account_paths; + if (!paths) throw new Error('account_paths not configured'); + + // Login to get a session + const login_res = await post_account(config, paths.login, { + username: config.auth!.username, + password: config.auth!.password, + }); + assert_equal(login_res.status, 200, 'login status'); + const cookie = login_res.set_cookies.map((c) => c.split(';')[0]).join('; '); + + // Change password + const new_password = 'new-password-integration-456'; + const pw_res = await post_account( + config, + paths.password, + { + current_password: config.auth!.password, + new_password, + }, + {cookie}, + ); + assert_equal(pw_res.status, 200, 'password change status'); + const pr = pw_res.body as Record; + assert_equal(pr.ok, true, 'password change ok'); + + // Old cookie should no longer work + const post_change = await post_rpc( + config, + JSON.stringify({jsonrpc: '2.0', id: 'pw-v1', method: 'workspace_list'}), + {cookie}, + ); + assert_equal(post_change.status, 401, 'old cookie fails after password change'); + + // Login with new password should work + const relogin = await post_account(config, paths.login, { + username: config.auth!.username, + password: new_password, + }); + assert_equal(relogin.status, 200, 'relogin with new password'); + + // Restore original password so other tests aren't affected + const restore_cookie = relogin.set_cookies.map((c) => c.split(';')[0]).join('; '); + const restore_res = await post_account( + config, + paths.password, + { + current_password: new_password, + new_password: config.auth!.password, + }, + {cookie: restore_cookie}, + ); + assert_equal(restore_res.status, 200, 'password restore status'); + }, + }, + { + name: 'password_wrong_current', + fn: async (config) => { + const paths = config.account_paths; + if (!paths) throw new Error('account_paths not configured'); + + // Login to get a session + const login_res = await post_account(config, paths.login, { + username: config.auth!.username, + password: config.auth!.password, + }); + assert_equal(login_res.status, 200, 'login status'); + const cookie = login_res.set_cookies.map((c) => c.split(';')[0]).join('; '); + + // Try to change password with wrong current password + const pw_res = await post_account( + config, + paths.password, + { + current_password: 'wrong-current-password-123', + new_password: 'doesnt-matter-at-all-123', + }, + {cookie}, + ); + assert_equal(pw_res.status, 401, 'wrong current password → 401'); + }, + }, + { + name: 'session_list', + fn: async (config) => { + const paths = config.account_paths; + if (!paths) throw new Error('account_paths not configured'); + + // Login to get a session + const login_res = await post_account(config, paths.login, { + username: config.auth!.username, + password: config.auth!.password, + }); + assert_equal(login_res.status, 200, 'login status'); + const cookie = login_res.set_cookies.map((c) => c.split(';')[0]).join('; '); + + // List sessions + const {status, body} = await get_account(config, paths.sessions, {cookie}); + assert_equal(status, 200, 'status'); + const r = body as Record; + const sessions = r.sessions as Array>; + assert_equal(Array.isArray(sessions), true, 'sessions is array'); + assert_equal(sessions.length > 0, true, 'at least one session'); + // Check session shape (matches fuz_app AuthSessionJson) + const s = sessions[0]; + assert_equal(typeof s.id, 'string', 'session has id'); + assert_equal(typeof s.account_id, 'string', 'session has account_id'); + assert_equal(typeof s.created_at, 'string', 'session has created_at'); + assert_equal(typeof s.last_seen_at, 'string', 'session has last_seen_at'); + assert_equal(typeof s.expires_at, 'string', 'session has expires_at'); + }, + }, + { + name: 'session_revoke', + fn: async (config) => { + const paths = config.account_paths; + if (!paths) throw new Error('account_paths not configured'); + + // Login twice to get two sessions + const login1 = await post_account(config, paths.login, { + username: config.auth!.username, + password: config.auth!.password, + }); + assert_equal(login1.status, 200, 'login 1 status'); + const cookie1 = login1.set_cookies.map((c) => c.split(';')[0]).join('; '); + + const login2 = await post_account(config, paths.login, { + username: config.auth!.username, + password: config.auth!.password, + }); + assert_equal(login2.status, 200, 'login 2 status'); + const cookie2 = login2.set_cookies.map((c) => c.split(';')[0]).join('; '); + + // List sessions from cookie1 + const {body: list_body} = await get_account(config, paths.sessions, {cookie: cookie1}); + const sessions = (list_body as Record).sessions as Array< + Record + >; + assert_equal(sessions.length >= 2, true, 'at least 2 sessions'); + + // Revoke the first session in the list and verify the other still works + const session_to_revoke = sessions[0]; + const revoke_path = paths.session_revoke.replace(':id', session_to_revoke.id as string); + const revoke_res = await post_account(config, revoke_path, {}, {cookie: cookie1}); + assert_equal(revoke_res.status, 200, 'revoke status'); + const rr = revoke_res.body as Record; + assert_equal(rr.ok, true, 'revoke ok'); + assert_equal(rr.revoked, true, 'revoke revoked'); + + // Verify at least one cookie still works (we might have revoked our own, + // but the other should still be valid) + const check1 = await post_rpc( + config, + JSON.stringify({jsonrpc: '2.0', id: 'sr-1', method: 'ping'}), + {cookie: cookie1}, + ); + const check2 = await post_rpc( + config, + JSON.stringify({jsonrpc: '2.0', id: 'sr-2', method: 'ping'}), + {cookie: cookie2}, + ); + // At least one should work + assert_equal( + check1.status === 200 || check2.status === 200, + true, + 'at least one session still works after revoking one', + ); + }, + }, +]; + +// -- Test runner -------------------------------------------------------------- + +export const run_account_tests = async ( + config: BackendConfig, + filter?: string, +): Promise => { + const results: TestResult[] = []; + + if (!config.account_paths) { + return results; + } + + for (const test of account_test_list) { + if (filter && !test.name.includes(filter)) continue; + if (test.skip?.includes(config.name)) continue; + const start = performance.now(); + try { + await test.fn(config); + results.push({name: test.name, passed: true, duration_ms: performance.now() - start}); + } catch (e) { + const message = e instanceof Error ? e.message : String(e); + results.push({ + name: test.name, + passed: false, + duration_ms: performance.now() - start, + error: message, + }); + } + } + + return results; +}; diff --git a/test/integration/bearer_tests.ts b/test/integration/bearer_tests.ts new file mode 100644 index 000000000..91d5a0993 --- /dev/null +++ b/test/integration/bearer_tests.ts @@ -0,0 +1,467 @@ +/** + * Bearer token auth integration tests. + * + * Tests API token authentication via `Authorization: Bearer `, + * keeper credential enforcement, and WebSocket session revocation. + * + * Separated from tests.ts to keep test modules focused. + */ + +import {type BackendConfig, TEST_DATABASE_URL} from './config.ts'; +import {assert_equal, hmac_sign, open_ws, post_rpc, sql_escape} from './test_helpers.ts'; +import type {TestResult} from './tests.ts'; +// @ts-ignore — npm specifier, resolved at runtime by Deno +import {hash as blake3_hash} from 'npm:@fuzdev/blake3_wasm'; +// @ts-ignore — npm specifier, resolved at runtime by Deno +import {to_hex} from 'npm:@fuzdev/fuz_util/hex.js'; + +// -- Token setup helpers ------------------------------------------------------ + +/** Raw token value used in integration tests. */ +const BEARER_TOKEN_RAW = 'zzz-integration-test-api-token-value'; +const BEARER_TOKEN_HASH = to_hex( + blake3_hash(new TextEncoder().encode(BEARER_TOKEN_RAW)), +); + +/** Expired token for negative tests. */ +const EXPIRED_TOKEN_RAW = 'zzz-integration-test-expired-token'; +const EXPIRED_TOKEN_HASH = to_hex( + blake3_hash(new TextEncoder().encode(EXPIRED_TOKEN_RAW)), +); + +/** + * Insert API tokens into the test database for the bootstrapped admin account. + * + * Must be called after bootstrap (admin account exists). Uses the admin + * account's UUID from the account table. + */ +export const setup_bearer_tokens = async (): Promise => { + const sql = ` + DO $$ + DECLARE + admin_id UUID; + BEGIN + SELECT id INTO admin_id FROM account WHERE username = 'testadmin'; + IF admin_id IS NULL THEN + RAISE EXCEPTION 'testadmin account not found'; + END IF; + + -- Valid API token (no expiry) + INSERT INTO api_token (id, account_id, name, token_hash) + VALUES ('test-api-token-1', admin_id, 'integration-test-token', '${sql_escape(BEARER_TOKEN_HASH)}') + ON CONFLICT DO NOTHING; + + -- Expired API token + INSERT INTO api_token (id, account_id, name, token_hash, expires_at) + VALUES ('test-api-token-expired', admin_id, 'expired-token', '${sql_escape(EXPIRED_TOKEN_HASH)}', NOW() - INTERVAL '1 day') + ON CONFLICT DO NOTHING; + END $$; + `; + + const cmd = new Deno.Command('psql', { + args: [TEST_DATABASE_URL, '-c', sql], + stdout: 'null', + stderr: 'piped', + }); + const child = cmd.spawn(); + const status = await child.status; + if (!status.success) { + const stderr_text = (await new Response(child.stderr).text()).trim(); + throw new Error(`Bearer token setup failed: ${stderr_text}`); + } + await child.stderr.cancel(); + console.log(' Bearer tokens created'); +}; + +// -- Test definitions --------------------------------------------------------- + +type TestFn = (config: BackendConfig) => Promise; + +const bearer_test_list: ReadonlyArray<{ + name: string; + fn: TestFn; + skip?: readonly string[]; +}> = [ + { + name: 'bearer_token_auth', + fn: async (config) => { + // Valid bearer token → authenticated action succeeds + const {status, body} = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'bt-1', + method: 'workspace_list', + }), + {bearer: BEARER_TOKEN_RAW}, + ); + assert_equal(status, 200, 'status'); + const r = body as Record; + assert_equal(r.id, 'bt-1', 'id'); + const result = r.result as Record; + assert_equal(Array.isArray(result.workspaces), true, 'has workspaces array'); + }, + }, + { + name: 'bearer_token_invalid', + fn: async (config) => { + // Invalid bearer token → 401 with JSON-RPC envelope. + // Both backends now soft-fail invalid bearer tokens, so the RPC + // layer produces a consistent JSON-RPC unauthenticated error. + const {status, body} = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'bt-inv-1', + method: 'workspace_list', + }), + {bearer: 'not-a-real-token'}, + ); + assert_equal(status, 401, 'status'); + const r = body as Record; + assert_equal(r.id, 'bt-inv-1', 'id'); + const error = r.error as Record; + assert_equal(error.code, -32001, 'error code'); + assert_equal(error.message, 'unauthenticated', 'error message'); + }, + }, + { + name: 'bearer_token_expired', + fn: async (config) => { + // Expired bearer token → 401 with JSON-RPC envelope (same as invalid) + const {status, body} = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'bt-exp-1', + method: 'workspace_list', + }), + {bearer: EXPIRED_TOKEN_RAW}, + ); + assert_equal(status, 401, 'status'); + const r = body as Record; + assert_equal(r.id, 'bt-exp-1', 'id'); + const error = r.error as Record; + assert_equal(error.code, -32001, 'error code'); + assert_equal(error.message, 'unauthenticated', 'error message'); + }, + }, + { + name: 'bearer_token_public_action', + fn: async (config) => { + // Bearer token on a public action → success (auth is optional) + const {status, body} = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'bt-pub-1', + method: 'ping', + }), + {bearer: BEARER_TOKEN_RAW}, + ); + assert_equal(status, 200, 'status'); + const r = body as Record; + assert_equal(r.id, 'bt-pub-1', 'id'); + const result = r.result as Record; + assert_equal(result.ping_id, 'bt-pub-1', 'ping_id'); + }, + }, + { + name: 'bearer_token_ws', + fn: async (config) => { + // Bearer token on WebSocket upgrade → authenticated WS actions work + const conn = await open_ws(config, {bearer: BEARER_TOKEN_RAW}); + try { + conn.send( + JSON.stringify({jsonrpc: '2.0', id: 'bt-ws-1', method: 'workspace_list'}), + ); + const r = (await conn.receive()) as Record; + assert_equal(r.id, 'bt-ws-1', 'id'); + const result = r.result as Record; + assert_equal(Array.isArray(result.workspaces), true, 'workspaces is array'); + } finally { + conn.close(); + } + }, + }, + { + name: 'bearer_token_ws_rejected_invalid', + fn: async (config) => { + // Invalid bearer token on WebSocket → connection rejected + try { + const conn = await open_ws(config, {bearer: 'invalid-token'}); + conn.close(); + throw new Error('WebSocket connected with invalid bearer — expected rejection'); + } catch (e) { + // Expected: connection rejected + if (e instanceof Error && e.message.includes('expected rejection')) { + throw e; + } + // Any other error = rejection, which is correct + } + }, + }, + { + name: 'keeper_requires_daemon_token', + // Both backends enforce daemon_token credential type for keeper actions. + fn: async (config) => { + // API token (bearer) with keeper role account calling keeper action → 403 + // The admin account has keeper permit, but bearer credential type is + // api_token, not daemon_token — keeper actions must be rejected. + const {status, body} = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'bt-keeper-1', + method: 'provider_update_api_key', + params: {provider_name: 'claude', api_key: 'sk-test'}, + }), + {bearer: BEARER_TOKEN_RAW}, + ); + assert_equal(status, 403, 'status'); + const r = body as Record; + assert_equal(r.id, 'bt-keeper-1', 'id'); + const error = r.error as Record; + assert_equal(error.code, -32002, 'error code'); + assert_equal(error.message, 'forbidden', 'error message'); + }, + }, + { + name: 'ws_revocation_on_session_delete', + fn: async (config) => { + // Open a WS connection with a session cookie, delete the session + // from the DB, call close_sockets_for_session, verify WS drops. + // + // Since we can't call close_sockets_for_session directly from + // the test, we delete the session and verify the next WS action + // after a re-auth attempt fails. Instead, we test the simpler + // case: open WS, verify it works, then verify a new WS with a + // deleted session can't connect. + // + // Actually test the infrastructure: create a dedicated session, + // open WS with it, delete the session from DB, then verify the + // connection still works for existing messages (no per-message + // revalidation) but new connections fail. + const dedicated_token = 'zzz-revocation-test-session-token'; + const token_hash = to_hex( + blake3_hash(new TextEncoder().encode(dedicated_token)), + ); + + // Create a dedicated session in the DB + const create_sql = ` + INSERT INTO auth_session (id, account_id, expires_at) + SELECT '${sql_escape(token_hash)}', id, NOW() + INTERVAL '30 days' + FROM account WHERE username = 'testadmin' + ON CONFLICT DO NOTHING; + `; + const create_cmd = new Deno.Command('psql', { + args: [TEST_DATABASE_URL, '-c', create_sql], + stdout: 'null', + stderr: 'null', + }); + const create_status = await (await create_cmd.spawn()).status; + assert_equal(create_status.success, true, 'session created'); + + // Sign the cookie + const expires_at = Math.floor(Date.now() / 1000) + 60 * 60 * 24 * 30; + const cookie_key = config.env?.SECRET_COOKIE_KEYS; + if (!cookie_key) throw new Error('SECRET_COOKIE_KEYS not configured'); + const cookie_value = await hmac_sign( + `${dedicated_token}:${expires_at}`, + cookie_key, + ); + // Both cookie names: Rust uses fuz_session, Deno uses zzz_session + const cookie = `fuz_session=${cookie_value}; zzz_session=${cookie_value}`; + + // Verify the session works + const {status} = await post_rpc( + config, + JSON.stringify({jsonrpc: '2.0', id: 'rev-1', method: 'ping'}), + {cookie}, + ); + assert_equal(status, 200, 'session works before delete'); + + // Delete the session from DB + const delete_sql = `DELETE FROM auth_session WHERE id = '${sql_escape(token_hash)}';`; + const delete_cmd = new Deno.Command('psql', { + args: [TEST_DATABASE_URL, '-c', delete_sql], + stdout: 'null', + stderr: 'null', + }); + await (await delete_cmd.spawn()).status; + + // New request with deleted session → 401 + const {status: post_delete_status, body: post_delete_body} = await post_rpc( + config, + JSON.stringify({jsonrpc: '2.0', id: 'rev-2', method: 'workspace_list'}), + {cookie}, + ); + assert_equal(post_delete_status, 401, 'deleted session → 401'); + const error = (post_delete_body as Record).error as Record< + string, + unknown + >; + assert_equal(error.code, -32001, 'error code'); + }, + }, + { + name: 'bearer_rejects_browser_context_origin', + // Both backends silently discard bearer in browser context (Origin present). + // Bearer is ignored → no auth → unauthenticated 401. + fn: async (config) => { + const headers: Record = { + 'Content-Type': 'application/json', + Authorization: `Bearer ${BEARER_TOKEN_RAW}`, + Origin: 'http://localhost:5173', + }; + const res = await fetch(`${config.base_url}${config.rpc_path}`, { + method: 'POST', + headers, + body: JSON.stringify({ + jsonrpc: '2.0', + id: 'bt-browser-1', + method: 'workspace_list', + }), + }); + const body = (await res.json()) as Record; + assert_equal(res.status, 401, 'status'); + const error = body.error as Record; + assert_equal(error.code, -32001, 'error code'); + }, + }, + { + name: 'bearer_rejects_browser_context_referer', + // Same defense-in-depth but triggered by Referer instead of Origin. + fn: async (config) => { + const headers: Record = { + 'Content-Type': 'application/json', + Authorization: `Bearer ${BEARER_TOKEN_RAW}`, + Referer: 'http://localhost:5173/chats', + }; + const res = await fetch(`${config.base_url}${config.rpc_path}`, { + method: 'POST', + headers, + body: JSON.stringify({ + jsonrpc: '2.0', + id: 'bt-referer-1', + method: 'workspace_list', + }), + }); + const body = (await res.json()) as Record; + assert_equal(res.status, 401, 'status'); + const error = body.error as Record; + assert_equal(error.code, -32001, 'error code'); + }, + }, + { + name: 'bearer_empty_value', + fn: async (config) => { + // "Authorization: Bearer " with nothing after → treated as no auth. + // Both backends soft-fail → JSON-RPC unauthenticated error. + const {status, body} = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'bt-empty-1', + method: 'workspace_list', + }), + {bearer: ''}, + ); + assert_equal(status, 401, 'status'); + const r = body as Record; + assert_equal(r.id, 'bt-empty-1', 'id'); + const error = r.error as Record; + assert_equal(error.code, -32001, 'error code'); + assert_equal(error.message, 'unauthenticated', 'error message'); + }, + }, + { + name: 'bearer_cookie_priority', + // Both backends try cookie auth first. If cookie succeeds, bearer + // is not checked — cookie wins even when bearer is invalid. + fn: async (config) => { + // When both cookie and bearer are present, cookie should win. + // Use a valid cookie + invalid bearer — if cookie wins, request succeeds. + // We need the session cookie, so create a dedicated session. + const dedicated_token = 'zzz-priority-test-session-token'; + const token_hash = to_hex( + blake3_hash(new TextEncoder().encode(dedicated_token)), + ); + + const create_sql = ` + INSERT INTO auth_session (id, account_id, expires_at) + SELECT '${sql_escape(token_hash)}', id, NOW() + INTERVAL '30 days' + FROM account WHERE username = 'testadmin' + ON CONFLICT DO NOTHING; + `; + const create_cmd = new Deno.Command('psql', { + args: [TEST_DATABASE_URL, '-c', create_sql], + stdout: 'null', + stderr: 'null', + }); + await (await create_cmd.spawn()).status; + + const expires_at = Math.floor(Date.now() / 1000) + 60 * 60 * 24 * 30; + const cookie_key = config.env?.SECRET_COOKIE_KEYS; + if (!cookie_key) throw new Error('SECRET_COOKIE_KEYS not configured'); + const cookie_value = await hmac_sign( + `${dedicated_token}:${expires_at}`, + cookie_key, + ); + // Both cookie names: Rust uses fuz_session, Deno uses zzz_session + const cookie = `fuz_session=${cookie_value}; zzz_session=${cookie_value}`; + + // Send both valid cookie AND invalid bearer + const headers: Record = { + 'Content-Type': 'application/json', + Cookie: cookie, + Authorization: 'Bearer totally-invalid-token', + }; + const res = await fetch(`${config.base_url}${config.rpc_path}`, { + method: 'POST', + headers, + body: JSON.stringify({ + jsonrpc: '2.0', + id: 'bt-prio-1', + method: 'workspace_list', + }), + }); + const body = await res.json(); + // Cookie should win → 200 success + assert_equal(res.status, 200, 'status (cookie wins over invalid bearer)'); + const r = body as Record; + assert_equal(r.id, 'bt-prio-1', 'id'); + const result = r.result as Record; + assert_equal(Array.isArray(result.workspaces), true, 'has workspaces array'); + }, + }, +]; + +// -- Test runner -------------------------------------------------------------- + +export const run_bearer_tests = async ( + config: BackendConfig, + filter?: string, +): Promise => { + const results: TestResult[] = []; + + for (const test of bearer_test_list) { + if (filter && !test.name.includes(filter)) continue; + if (test.skip?.includes(config.name)) continue; + const start = performance.now(); + try { + await test.fn(config); + results.push({name: test.name, passed: true, duration_ms: performance.now() - start}); + } catch (e) { + const message = e instanceof Error ? e.message : String(e); + results.push({ + name: test.name, + passed: false, + duration_ms: performance.now() - start, + error: message, + }); + } + } + + return results; +}; diff --git a/test/integration/config.ts b/test/integration/config.ts new file mode 100644 index 000000000..250e58445 --- /dev/null +++ b/test/integration/config.ts @@ -0,0 +1,127 @@ +/** + * Backend configurations for integration tests. + * + * Each backend defines how to start/stop it and which endpoints to hit. + */ + +export interface AuthConfig { + /** Path to the bootstrap endpoint. */ + readonly bootstrap_path: string; + /** Token value to write to the token file and send in the bootstrap request. */ + readonly token: string; + /** Filesystem path where the token file is written before server start. */ + readonly token_file: string; + /** Username for the bootstrapped admin account. */ + readonly username: string; + /** Password for the bootstrapped admin account. */ + readonly password: string; +} + +/** Account management route paths (differ between backends). */ +export interface AccountPaths { + readonly login: string; + readonly logout: string; + readonly password: string; + readonly sessions: string; + /** Template with `:id` placeholder for session revocation. */ + readonly session_revoke: string; +} + +export interface BackendConfig { + readonly name: string; + readonly start_command: readonly string[]; + readonly base_url: string; + readonly rpc_path: string; + readonly ws_path: string; + readonly health_path: string; + readonly startup_timeout_ms: number; + /** Extra env vars merged into the child process environment. */ + readonly env?: Readonly>; + /** Auth setup — if present, the runner bootstraps an admin account before tests. */ + readonly auth?: AuthConfig; + /** Account management route paths (differ between backends). */ + readonly account_paths?: AccountPaths; +} + +const INTEGRATION_BOOTSTRAP_TOKEN = 'zzz-integration-test-token'; +const INTEGRATION_TOKEN_FILE = '/tmp/zzz_integration_bootstrap_token'; + +/** Scoped filesystem directory for filesystem integration tests. */ +export const INTEGRATION_SCOPED_DIR = '/tmp/zzz_integration_scoped'; + +/** Zzz app directory for integration tests. */ +export const INTEGRATION_ZZZ_DIR = '/tmp/zzz_integration_zzz_dir'; + +/** Test database URL — defaults to postgres://localhost/zzz_test. */ +export const TEST_DATABASE_URL = + Deno.env.get('TEST_DATABASE_URL') ?? 'postgres://localhost/zzz_test'; + +export const backends: Record = { + deno: { + name: 'deno', + start_command: ['deno', 'task', 'dev:start'], + base_url: 'http://localhost:4460', + rpc_path: '/api/rpc', + ws_path: '/api/ws', + health_path: '/health', + startup_timeout_ms: 15_000, + // Override port so .env.development values don't conflict with test expectations. + // PORT is the server bind var (BaseServerEnv); PUBLIC_SERVER_PROXIED_PORT + // is the SvelteKit frontend var. Both need to agree. + env: { + PORT: '4460', + PUBLIC_SERVER_PROXIED_PORT: '4460', + BOOTSTRAP_TOKEN_PATH: INTEGRATION_TOKEN_FILE, + DATABASE_URL: TEST_DATABASE_URL, + SECRET_COOKIE_KEYS: 'integration-test-cookie-key-min-32-chars', + ALLOWED_ORIGINS: 'http://localhost:*', + PUBLIC_ZZZ_SCOPED_DIRS: INTEGRATION_SCOPED_DIR, + PUBLIC_ZZZ_DIR: INTEGRATION_ZZZ_DIR, + }, + auth: { + bootstrap_path: '/api/account/bootstrap', + token: INTEGRATION_BOOTSTRAP_TOKEN, + token_file: INTEGRATION_TOKEN_FILE, + username: 'testadmin', + password: 'test-password-integration-123', + }, + account_paths: { + login: '/api/account/login', + logout: '/api/account/logout', + password: '/api/account/password', + sessions: '/api/account/sessions', + session_revoke: '/api/account/sessions/:id/revoke', + }, + }, + rust: { + name: 'rust', + start_command: ['cargo', 'run', '--release', '-p', 'zzz_server', '--', '--port', '1174'], + base_url: 'http://localhost:1174', + rpc_path: '/api/rpc', + ws_path: '/api/ws', + health_path: '/health', + startup_timeout_ms: 60_000, // includes compile time on first run + env: { + DATABASE_URL: TEST_DATABASE_URL, + SECRET_COOKIE_KEYS: 'integration-test-cookie-key-min-32-chars', + BOOTSTRAP_TOKEN_PATH: INTEGRATION_TOKEN_FILE, + ALLOWED_ORIGINS: 'http://localhost:*', + PUBLIC_ZZZ_SCOPED_DIRS: INTEGRATION_SCOPED_DIR, + PUBLIC_ZZZ_DIR: INTEGRATION_ZZZ_DIR, + }, + auth: { + bootstrap_path: '/api/account/bootstrap', + token: INTEGRATION_BOOTSTRAP_TOKEN, + token_file: INTEGRATION_TOKEN_FILE, + username: 'testadmin', + password: 'test-password-integration-123', + }, + account_paths: { + login: '/api/account/login', + logout: '/api/account/logout', + password: '/api/account/password', + sessions: '/api/account/sessions', + session_revoke: '/api/account/sessions/:id/revoke', + }, + }, +}; diff --git a/test/integration/run.ts b/test/integration/run.ts new file mode 100644 index 000000000..a255dd895 --- /dev/null +++ b/test/integration/run.ts @@ -0,0 +1,507 @@ +#!/usr/bin/env -S deno run --allow-net --allow-run --allow-read --allow-write --allow-env + +/** + * Integration test runner for zzz backends. + * + * Usage: + * deno task test:integration --backend=rust + * deno task test:integration --backend=deno + * deno task test:integration --backend=both (default) + * deno task test:integration --filter=ping (substring match on test name) + * + * Starts a backend, runs the test suite against it, stops it, reports results. + * When running both backends, prints a comparison table at the end. + */ + +import {backends, type BackendConfig, INTEGRATION_SCOPED_DIR, INTEGRATION_ZZZ_DIR, TEST_DATABASE_URL} from './config.ts'; +import {run_tests, type TestResult} from './tests.ts'; +import {run_bearer_tests, setup_bearer_tokens} from './bearer_tests.ts'; +import {run_account_tests} from './account_tests.ts'; +import {hmac_sign, sql_escape} from './test_helpers.ts'; +// @ts-ignore — npm specifier, resolved at runtime by Deno +import {hash as blake3_hash} from 'npm:@fuzdev/blake3_wasm'; +// @ts-ignore — npm specifier, resolved at runtime by Deno +import {to_hex} from 'npm:@fuzdev/fuz_util/hex.js'; + +// -- Child process tracking --------------------------------------------------- + +/** Active backend processes — killed on SIGINT so Ctrl+C doesn't leak them. */ +const active_children: Set = new Set(); + +Deno.addSignalListener('SIGINT', () => { + console.log('\n Interrupted — stopping backends...'); + for (const child of active_children) { + try { + child.kill('SIGTERM'); + } catch { + // Already exited + } + } + Deno.exit(130); // 128 + SIGINT(2) +}); + +// -- Formatting --------------------------------------------------------------- + +const fmt_ms = (ms: number): string => (ms < 10 ? `${ms.toFixed(1)}ms` : `${Math.round(ms)}ms`); + +// -- Backend lifecycle -------------------------------------------------------- + +const parse_args = (): {backend: string; filter: string | undefined} => { + let backend = 'both'; + let filter: string | undefined; + + for (const arg of Deno.args) { + if (arg.startsWith('--backend=')) { + backend = arg.slice('--backend='.length); + } else if (arg.startsWith('--filter=')) { + filter = arg.slice('--filter='.length); + } + } + + return {backend, filter}; +}; + +const wait_for_health = async (config: BackendConfig): Promise => { + const url = `${config.base_url}${config.health_path}`; + const deadline = Date.now() + config.startup_timeout_ms; + const poll_interval = 250; + + while (Date.now() < deadline) { + try { + const res = await fetch(url); + if (res.ok) { + await res.body?.cancel(); + return true; + } + await res.body?.cancel(); + } catch { + // Server not ready yet + } + await new Promise((r) => setTimeout(r, poll_interval)); + } + return false; +}; + +const start_backend = async (config: BackendConfig): Promise => { + console.log(`\n Starting ${config.name} backend: ${config.start_command.join(' ')}`); + + const [cmd, ...args] = config.start_command; + const child = new Deno.Command(cmd, { + args, + stdout: 'null', + stderr: 'piped', + env: config.env ? {...Deno.env.toObject(), ...config.env} : undefined, + }).spawn(); + + const healthy = await wait_for_health(config); + if (!healthy) { + child.kill('SIGTERM'); + // Drain stderr for diagnostic output before throwing + try { + const err_text = (await new Response(child.stderr).text()).trim(); + if (err_text) { + console.error( + `\n ${config.name} stderr:\n${err_text.split('\n').map((l) => ' ' + l).join('\n')}`, + ); + } + } catch { + // Process already collected + } + throw new Error(`${config.name} backend failed to start within ${config.startup_timeout_ms}ms`); + } + + console.log(` ${config.name} backend ready at ${config.base_url}`); + active_children.add(child); + return child; +}; + +const stop_backend = async (name: string, child: Deno.ChildProcess): Promise => { + console.log(` Stopping ${name} backend`); + active_children.delete(child); + try { + child.kill('SIGTERM'); + } catch { + // Already exited + } + // Drain stderr so the process isn't blocked on a full pipe + try { + await child.stderr.cancel(); + } catch { + // Already consumed or closed + } + // Wait for the process to actually exit to avoid port conflicts + try { + await child.status; + } catch { + // Process already collected + } +}; + +// -- Auth setup --------------------------------------------------------------- + +/** + * Write the bootstrap token file before the server starts. + * The server reads this path at startup to determine bootstrap availability. + */ +const write_bootstrap_token = async (config: BackendConfig): Promise => { + if (!config.auth) return; + await Deno.writeTextFile(config.auth.token_file, config.auth.token); +}; + +/** + * Bootstrap an admin account and return the session cookie. + * Must be called after the server is healthy. Token file must already exist. + */ +const setup_auth = async (config: BackendConfig): Promise => { + if (!config.auth) return undefined; + + const {auth} = config; + + // Bootstrap: create admin account + get session cookie + const res = await fetch(`${config.base_url}${auth.bootstrap_path}`, { + method: 'POST', + headers: {'Content-Type': 'application/json'}, + body: JSON.stringify({ + token: auth.token, + username: auth.username, + password: auth.password, + }), + }); + + if (!res.ok) { + const body = await res.text(); + throw new Error(`Bootstrap failed (${res.status}): ${body}`); + } + + await res.json(); // consume body + + // Extract all Set-Cookie values (session + signature cookies) + const set_cookies = res.headers.getSetCookie(); + if (set_cookies.length === 0) { + throw new Error('Bootstrap succeeded but no session cookie in response'); + } + + // Build Cookie header: "name=value; name2=value2" + const cookie = set_cookies.map((c) => c.split(';')[0]).join('; '); + console.log(` Auth bootstrapped (${set_cookies.length} cookie(s))`); + return cookie; +}; + +/** Clean up the bootstrap token file if it still exists. */ +const cleanup_auth = async (config: BackendConfig): Promise => { + if (!config.auth) return; + try { + await Deno.remove(config.auth.token_file); + } catch { + // Already deleted by bootstrap or doesn't exist + } +}; + +// -- Non-keeper user setup ---------------------------------------------------- + +/** + * Create a non-keeper authenticated user directly in the test database. + * + * Inserts account + actor (no keeper permit) + session via psql, + * then signs a session cookie using HMAC-SHA256. + */ +const setup_non_keeper_user = async (config: BackendConfig): Promise => { + if (!config.auth || !config.env) return undefined; + + const cookie_key = config.env.SECRET_COOKIE_KEYS; + if (!cookie_key) return undefined; + + const session_token = 'test-non-keeper-session-token'; + const token_hash = to_hex(blake3_hash(new TextEncoder().encode(session_token))); + + // expires_at: 30 days from now (seconds since epoch) + const expires_at = Math.floor(Date.now() / 1000) + 60 * 60 * 24 * 30; + + // Insert account, actor (no keeper permit), and session via psql + const sql = ` + INSERT INTO account (id, username, password_hash) + VALUES ('00000000-0000-0000-0000-000000000002', 'testuser', '$argon2id$v=19$m=19456,t=2,p=1$dummy$dummyhash000000000000000000000000000') + ON CONFLICT DO NOTHING; + + INSERT INTO actor (id, account_id, name) + VALUES ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000002', 'testuser') + ON CONFLICT DO NOTHING; + + INSERT INTO auth_session (id, account_id, expires_at) + VALUES ('${sql_escape(token_hash)}', '00000000-0000-0000-0000-000000000002', NOW() + INTERVAL '30 days') + ON CONFLICT DO NOTHING; + `; + + const cmd = new Deno.Command('psql', { + args: [TEST_DATABASE_URL, '-c', sql], + stdout: 'null', + stderr: 'piped', + }); + const child = cmd.spawn(); + const status = await child.status; + if (!status.success) { + const stderr_text = (await new Response(child.stderr).text()).trim(); + console.warn(` Non-keeper user setup warning: ${stderr_text}`); + return undefined; + } + await child.stderr.cancel(); + + // Sign the cookie: {session_token}:{expires_at}.{signature} + const cookie_value = await hmac_sign(`${session_token}:${expires_at}`, cookie_key); + // Set both cookie names: Rust uses fuz_session, Deno uses zzz_session + const cookie = `fuz_session=${cookie_value}; zzz_session=${cookie_value}`; + console.log(' Non-keeper user created'); + return cookie; +}; + +/** + * Clean auth tables in the test database before a backend run. + * + * Uses TRUNCATE CASCADE to reset all auth state. Runs directly via + * `psql` since we don't want a Postgres client library in the test runner. + */ +const clean_database = async (): Promise => { + const cmd = new Deno.Command('psql', { + args: [ + TEST_DATABASE_URL, + '-c', + `TRUNCATE api_token, auth_session, permit, actor, account, bootstrap_lock, app_settings CASCADE; + INSERT INTO bootstrap_lock (id, bootstrapped) VALUES (1, false) ON CONFLICT (id) DO UPDATE SET bootstrapped = false; + INSERT INTO app_settings (id) VALUES (1) ON CONFLICT DO NOTHING;`, + ], + stdout: 'null', + stderr: 'piped', + }); + const child = cmd.spawn(); + const status = await child.status; + if (!status.success) { + // On first run, tables may not exist yet — that's fine, migrations will create them + const stderr_text = (await new Response(child.stderr).text()).trim(); + if (stderr_text.includes('does not exist')) { + console.log(' DB cleanup skipped (tables not yet created)'); + } else { + console.warn(` DB cleanup warning: ${stderr_text}`); + } + } else { + // Drain stderr + try { + await child.stderr.cancel(); + } catch { + // Already consumed + } + console.log(' DB cleaned'); + } +}; + +// -- Scoped filesystem setup -------------------------------------------------- + +/** Create (or recreate) the scoped directory for filesystem tests. */ +const setup_scoped_dir = async (): Promise => { + try { + await Deno.remove(INTEGRATION_SCOPED_DIR, {recursive: true}); + } catch { + // Didn't exist + } + await Deno.mkdir(INTEGRATION_SCOPED_DIR, {recursive: true}); + console.log(` Scoped dir ready: ${INTEGRATION_SCOPED_DIR}`); +}; + +/** Clean up the scoped directory after a backend run. */ +const cleanup_scoped_dir = async (): Promise => { + try { + await Deno.remove(INTEGRATION_SCOPED_DIR, {recursive: true}); + } catch { + // Already gone + } +}; + +// -- Zzz dir setup ------------------------------------------------------------ + +/** Create (or recreate) the zzz directory for session_load tests. */ +const setup_zzz_dir = async (): Promise => { + try { + await Deno.remove(INTEGRATION_ZZZ_DIR, {recursive: true}); + } catch { + // Didn't exist + } + await Deno.mkdir(INTEGRATION_ZZZ_DIR, {recursive: true}); + console.log(` Zzz dir ready: ${INTEGRATION_ZZZ_DIR}`); +}; + +/** Clean up the zzz directory after a backend run. */ +const cleanup_zzz_dir = async (): Promise => { + try { + await Deno.remove(INTEGRATION_ZZZ_DIR, {recursive: true}); + } catch { + // Already gone + } +}; + +// -- Per-backend run ---------------------------------------------------------- + +interface BackendRun { + name: string; + results: TestResult[]; + passed: number; + failed: number; + total_ms: number; +} + +const run_for_backend = async (config: BackendConfig, filter?: string): Promise => { + console.log(`\n${'='.repeat(60)}`); + console.log(` Backend: ${config.name}`); + console.log(`${'='.repeat(60)}`); + + let child: Deno.ChildProcess | null = null; + try { + await clean_database(); + await setup_scoped_dir(); + await setup_zzz_dir(); + await write_bootstrap_token(config); + child = await start_backend(config); + const session_cookie = await setup_auth(config); + const non_keeper_cookie = await setup_non_keeper_user(config); + await setup_bearer_tokens(); + const results = await run_tests(config, filter, session_cookie, non_keeper_cookie); + const bearer_results = await run_bearer_tests(config, filter); + results.push(...bearer_results); + const account_results = await run_account_tests(config, filter); + results.push(...account_results); + + let passed = 0; + let failed = 0; + + for (const r of results) { + const time = fmt_ms(r.duration_ms).padStart(8); + if (r.passed) { + console.log(` PASS ${time} ${r.name}`); + passed++; + } else { + console.log(` FAIL ${time} ${r.name}`); + console.log(` ${r.error}`); + failed++; + } + } + + const total_ms = results.reduce((sum, r) => sum + r.duration_ms, 0); + console.log(`\n ${passed} passed, ${failed} failed in ${fmt_ms(total_ms)}`); + return {name: config.name, results, passed, failed, total_ms}; + } finally { + await cleanup_auth(config); + await cleanup_scoped_dir(); + await cleanup_zzz_dir(); + if (child) await stop_backend(config.name, child); + } +}; + +// -- Comparison table --------------------------------------------------------- + +/** Tests with a fixed wait floor that skews timing comparison. */ +const SILENCE_TESTS = new Set(['notification_ws']); + +/** Format speedup ratio: >= 10 → 1 decimal, < 10 → 2 decimals. */ +const fmt_ratio = (r: number): string => (r >= 10 ? `${r.toFixed(1)}x` : `${r.toFixed(2)}x`); + +/** Format speedup/slowdown comparison (baseline / current). */ +const fmt_comparison = (baseline: number, current: number): string => { + const ratio = baseline / current; + if (ratio >= 1) return `${fmt_ratio(ratio)} faster`; + return `${fmt_ratio(1 / ratio)} slower`; +}; + +const print_comparison = (runs: BackendRun[]): void => { + if (runs.length < 2) return; + + // Build lookup: test name → duration per backend + const by_test = new Map>(); + for (const run of runs) { + for (const r of run.results) { + if (!by_test.has(r.name)) by_test.set(r.name, new Map()); + by_test.get(r.name)!.set(run.name, r.duration_ms); + } + } + + const names = runs.map((r) => r.name); + const col_w = 10; + + console.log(`\n${'='.repeat(60)}`); + console.log(` Comparison (${names[1]} vs ${names[0]})`); + console.log(`${'='.repeat(60)}\n`); + + const header = ' ' + 'test'.padEnd(36) + names.map((n) => n.padStart(col_w)).join(''); + console.log(header); + console.log(' ' + '-'.repeat(header.length - 2)); + + const totals = names.map(() => 0); + const totals_excl = names.map(() => 0); + + for (const [test_name, timings] of by_test) { + const is_silence = SILENCE_TESTS.has(test_name); + const times = names.map((n) => timings.get(n) ?? 0); + + times.forEach((t, i) => { + totals[i] += t; + if (!is_silence) totals_excl[i] += t; + }); + + const time_cols = times.map((t) => fmt_ms(t).padStart(col_w)).join(''); + + let cmp_str = ''; + if (times.length >= 2 && times[0] > 0 && times[1] > 0) { + cmp_str = is_silence ? ' (silence)' : ` ${fmt_comparison(times[0], times[1])}`; + } + + const label = is_silence ? `${test_name} *` : test_name; + console.log(` ${label.padEnd(36)}${time_cols}${cmp_str}`); + } + + // Totals + console.log(' ' + '-'.repeat(header.length - 2)); + const total_cols = totals.map((t) => fmt_ms(t).padStart(col_w)).join(''); + console.log(` ${'total'.padEnd(36)}${total_cols}`); + + const excl_cols = totals_excl.map((t) => fmt_ms(t).padStart(col_w)).join(''); + const excl_cmp = + totals_excl[0] > 0 && totals_excl[1] > 0 + ? ` ${fmt_comparison(totals_excl[0], totals_excl[1])}` + : ''; + console.log(` ${'total (excl silence)'.padEnd(36)}${excl_cols}${excl_cmp}`); + + console.log('\n * silence tests have a fixed wait floor — excluded from comparison'); +}; + +// -- Main --------------------------------------------------------------------- + +const main = async (): Promise => { + const {backend: backend_arg, filter} = parse_args(); + const targets: BackendConfig[] = []; + + if (backend_arg === 'both') { + targets.push(backends.deno, backends.rust); + } else if (backends[backend_arg]) { + targets.push(backends[backend_arg]); + } else { + console.error(`Unknown backend: ${backend_arg}. Use: deno, rust, or both`); + Deno.exit(1); + } + + const runs: BackendRun[] = []; + let all_passed = true; + for (const config of targets) { + const run = await run_for_backend(config, filter); + runs.push(run); + if (run.failed > 0) all_passed = false; + } + + print_comparison(runs); + + console.log(`\n${'='.repeat(60)}`); + if (all_passed) { + console.log(' All backends passed'); + } else { + console.log(' Some tests failed'); + Deno.exit(1); + } +}; + +await main(); diff --git a/test/integration/test_helpers.ts b/test/integration/test_helpers.ts new file mode 100644 index 000000000..f46998599 --- /dev/null +++ b/test/integration/test_helpers.ts @@ -0,0 +1,191 @@ +/** + * Shared helpers for integration tests. + * + * Assertion utilities, HTTP/WebSocket helpers, crypto helpers, + * and common types used across test modules. + */ + +import {type BackendConfig} from './config.ts'; + +// -- Crypto helpers ----------------------------------------------------------- + +/** + * HMAC-SHA256 sign a value. + * + * Returns `{value}.{base64(signature)}` — same format as auth.rs `Keyring::sign` + * and fuz_app's `sign_with_crypto_key`. + */ +export const hmac_sign = async (value: string, key_str: string): Promise => { + const encoder = new TextEncoder(); + const key = await crypto.subtle.importKey( + 'raw', + encoder.encode(key_str), + {name: 'HMAC', hash: 'SHA-256'}, + false, + ['sign'], + ); + const signature = await crypto.subtle.sign('HMAC', key, encoder.encode(value)); + const sig_b64 = btoa(String.fromCharCode(...new Uint8Array(signature))); + return `${value}.${sig_b64}`; +}; + +// -- SQL helpers -------------------------------------------------------------- + +/** + * Escape a string for safe SQL single-quote interpolation. + * + * Doubles single quotes per the SQL standard. Use at every `'${...}'` + * interpolation site when building SQL for psql. + */ +export const sql_escape = (value: string): string => value.replaceAll("'", "''"); + +// -- URL helpers -------------------------------------------------------------- + +export const rpc_url = (config: BackendConfig): string => `${config.base_url}${config.rpc_path}`; +export const ws_url = (config: BackendConfig): string => { + const url = new URL(config.ws_path, config.base_url); + url.protocol = url.protocol === 'https:' ? 'wss:' : 'ws:'; + return url.href; +}; + +// -- HTTP helpers ------------------------------------------------------------- + +/** POST a raw string body to the RPC endpoint. */ +export const post_rpc = async ( + config: BackendConfig, + body: string, + options?: {cookie?: string; bearer?: string}, +): Promise<{status: number; body: unknown}> => { + const headers: Record = {'Content-Type': 'application/json'}; + if (options?.cookie) headers['Cookie'] = options.cookie; + if (options?.bearer !== undefined) headers['Authorization'] = `Bearer ${options.bearer}`; + const res = await fetch(rpc_url(config), { + method: 'POST', + headers, + body, + }); + const json = await res.json(); + return {status: res.status, body: json}; +}; + +// -- WebSocket helpers -------------------------------------------------------- + +/** Persistent WebSocket connection handle for multi-message tests. */ +export interface WsConnection { + send(message: string): void; + receive(timeout_ms?: number): Promise; + expect_silence(timeout_ms?: number): Promise; + close(): void; +} + +/** Open a WebSocket connection, resolves once connected. */ +export const open_ws = ( + config: BackendConfig, + options?: {cookie?: string; bearer?: string}, +): Promise => + new Promise((resolve, reject) => { + const ws_headers: Record = {}; + if (options?.cookie) ws_headers['Cookie'] = options.cookie; + if (options?.bearer !== undefined) ws_headers['Authorization'] = `Bearer ${options.bearer}`; + const ws_options = Object.keys(ws_headers).length > 0 ? {headers: ws_headers} : undefined; + const ws = new WebSocket(ws_url(config), ws_options as unknown as string[]); + const pending: Array<{ + resolve: (value: unknown) => void; + reject: (error: Error) => void; + timer: ReturnType; + silent: boolean; + }> = []; + + ws.onmessage = (event) => { + const data = JSON.parse(String(event.data)); + const waiter = pending.shift(); + if (!waiter) return; + clearTimeout(waiter.timer); + if (waiter.silent) { + waiter.reject(new Error(`expected no response, got: ${JSON.stringify(data)}`)); + } else { + waiter.resolve(data); + } + }; + + ws.onerror = (event) => { + const err = new Error(`WebSocket error: ${event}`); + if (pending.length > 0) { + const waiter = pending.shift()!; + clearTimeout(waiter.timer); + waiter.reject(err); + } else { + reject(err); + } + }; + + ws.onopen = () => + resolve({ + send: (message) => ws.send(message), + receive: (timeout_ms = 5_000) => + new Promise((res, rej) => { + const timer = setTimeout(() => { + pending.shift(); + rej(new Error('WebSocket response timeout')); + }, timeout_ms); + pending.push({resolve: res, reject: rej, timer, silent: false}); + }), + expect_silence: (timeout_ms = 1_000) => + new Promise((res, rej) => { + const timer = setTimeout(() => { + pending.shift(); + res(); + }, timeout_ms); + pending.push({resolve: res, reject: rej, timer, silent: true}); + }), + close: () => ws.close(), + }); + + // Handle connection rejection (e.g. 401 at upgrade) + ws.onclose = (event) => { + if (event.code !== 1000) { + const err = new Error(`WebSocket closed: code=${event.code} reason=${event.reason}`); + reject(err); + } + }; + }); + +/** + * Ensure a WebSocket connection is fully registered on the server. + * + * After `open_ws` resolves (onopen), the server's `handle_connection` task + * may not have called `add_connection` yet. A round-trip RPC proves the + * connection loop is running and the connection is in `app.connections`. + */ +export const ensure_ws_registered = async (conn: WsConnection): Promise => { + conn.send(JSON.stringify({jsonrpc: '2.0', id: '_warmup', method: 'ping'})); + await conn.receive(); +}; + +// -- Assertion helpers -------------------------------------------------------- + +export const assert_equal = (actual: unknown, expected: unknown, label: string): void => { + if (actual !== expected) { + throw new Error(`${label}: expected ${JSON.stringify(expected)}, got ${JSON.stringify(actual)}`); + } +}; + +/** Recursively sort object keys so key order doesn't affect comparison. */ +export const sort_keys = (v: unknown): unknown => { + if (v === null || typeof v !== 'object') return v; + if (Array.isArray(v)) return v.map(sort_keys); + const sorted: Record = {}; + for (const k of Object.keys(v as Record).sort()) { + sorted[k] = sort_keys((v as Record)[k]); + } + return sorted; +}; + +/** Exact deep equality (key-order-independent). */ +export const assert_deep_equal = (actual: unknown, expected: unknown, label: string): void => { + const a = JSON.stringify(sort_keys(actual)); + const e = JSON.stringify(sort_keys(expected)); + if (a !== e) { + throw new Error(`${label}\n expected: ${e}\n actual: ${a}`); + } +}; diff --git a/test/integration/tests.ts b/test/integration/tests.ts new file mode 100644 index 000000000..d1bcb76e0 --- /dev/null +++ b/test/integration/tests.ts @@ -0,0 +1,1918 @@ +/** + * Integration test suite for zzz backends. + * + * Tests JSON-RPC 2.0 over HTTP and WebSocket, asserting identical behaviour + * between the Deno reference backend and the Rust backend. + * + * Most tests are data-driven tables (http_cases, ws_cases) — adding a test + * case is just adding a row. Special tests that need unique control flow + * (silence assertions, persistent connections, non-RPC endpoints) are + * separate functions in `special_tests`. Tests requiring a non-keeper + * authenticated cookie are in `non_keeper_tests`. + */ + +import {INTEGRATION_SCOPED_DIR, INTEGRATION_ZZZ_DIR, type BackendConfig} from './config.ts'; +import { + post_rpc, + open_ws, + ensure_ws_registered, + assert_equal, + assert_deep_equal, + ws_url, +} from './test_helpers.ts'; + +export interface TestResult { + name: string; + passed: boolean; + duration_ms: number; + error?: string; +} + +/** + * Omit `error.data` from a JSON-RPC error response ONLY when the expected + * response doesn't specify it. This handles a known asymmetry: Deno (fuz_app) + * includes Zod validation issues in `error.data`, Rust omits it pending + * Phase 2 validation detail support. See TODO in `crates/zzz_server/src/rpc.rs`. + * + * NOT a general tolerance — if expected specifies `error.data`, exact match + * is enforced. If actual has unexpected non-data fields, the comparison fails. + */ +const normalize_error_data = ( + actual: unknown, + expected: unknown, +): {actual: unknown; expected: unknown} => { + if ( + actual !== null && + typeof actual === 'object' && + !Array.isArray(actual) && + expected !== null && + typeof expected === 'object' && + !Array.isArray(expected) + ) { + const a = actual as Record; + const e = expected as Record; + if ( + 'error' in a && + typeof a.error === 'object' && + a.error !== null && + 'error' in e && + typeof e.error === 'object' && + e.error !== null + ) { + const a_err = a.error as Record; + const e_err = e.error as Record; + // Only omit if actual has data but expected doesn't mention it + if ('data' in a_err && !('data' in e_err)) { + const {data: _, ...a_err_rest} = a_err; + return {actual: {...a, error: a_err_rest}, expected}; + } + } + } + return {actual, expected}; +}; + +// == Table-driven test cases ================================================== +// +// Each row is a test. The runner posts the body, asserts the status and +// response. To add a test case, add a row — no function needed. + +/** HTTP test case: POST body → assert status + response. */ +interface HttpCase { + readonly name: string; + /** Object → JSON.stringify'd, string → sent raw. */ + readonly body: unknown; + readonly status: number; + /** Expected response body. Use `assert_equal` for primitives, `assert_deep_equal` for objects. */ + readonly expected: unknown; + /** Optional comment shown in test output on failure. */ + readonly comment?: string; + /** Skip for specific backends. */ + readonly skip?: readonly string[]; +} + +/** WebSocket test case: send message → assert response. */ +interface WsCase { + readonly name: string; + /** Always sent as a string (raw text frame). Object bodies need JSON.stringify in the value. */ + readonly message: string; + readonly expected: unknown; + readonly comment?: string; + readonly skip?: readonly string[]; +} + +// -- HTTP cases --------------------------------------------------------------- + +// Rust backend wire format aligned with fuz_app's create_rpc_endpoint (2026-04-11). +// HTTP status mapping, parse error envelopes, notification rejection, and id +// validation now match Deno. All tests pass on both backends with 0 skips. +const http_cases: readonly HttpCase[] = [ + // Ping — happy path + { + name: 'ping_http', + body: {jsonrpc: '2.0', id: 'test-1', method: 'ping'}, + status: 200, + expected: {jsonrpc: '2.0', id: 'test-1', result: {ping_id: 'test-1'}}, + }, + { + name: 'ping_numeric_id', + body: {jsonrpc: '2.0', id: 42, method: 'ping'}, + status: 200, + expected: {jsonrpc: '2.0', id: 42, result: {ping_id: 42}}, + }, + { + name: 'null_id_is_invalid', + body: {jsonrpc: '2.0', id: null, method: 'nonexistent'}, + status: 400, + expected: { + jsonrpc: '2.0', + id: null, + error: {code: -32600, message: 'invalid request'}, + }, + comment: 'id:null is not a valid JsonrpcRequestId (string|number only, per MCP)', + }, + + // Parse errors — full JSON-RPC envelope, HTTP 400 + { + name: 'parse_error_http', + body: 'not json at all', + status: 400, + expected: {jsonrpc: '2.0', id: null, error: {code: -32700, message: 'parse error'}}, + }, + { + name: 'parse_error_empty_body', + body: '', + status: 400, + expected: {jsonrpc: '2.0', id: null, error: {code: -32700, message: 'parse error'}}, + }, + + // Method not found — HTTP 404 + { + name: 'method_not_found_http', + body: {jsonrpc: '2.0', id: 'mnf-1', method: 'nonexistent'}, + status: 404, + expected: { + jsonrpc: '2.0', + id: 'mnf-1', + error: {code: -32601, message: 'method not found: nonexistent'}, + }, + }, + + // Invalid requests — HTTP 400 + { + name: 'invalid_request_missing_method', + body: {jsonrpc: '2.0', id: 'ir-1'}, + status: 400, + expected: {jsonrpc: '2.0', id: 'ir-1', error: {code: -32600, message: 'invalid request'}}, + comment: 'valid JSON-RPC object with id but no method', + }, + { + name: 'invalid_request_not_object', + body: '"just a string"', + status: 400, + expected: { + jsonrpc: '2.0', + id: null, + error: {code: -32600, message: 'invalid request'}, + }, + comment: 'non-object body — fuz_app safeParse returns id: null', + }, + { + name: 'invalid_request_bad_version', + body: {jsonrpc: '1.0', id: 'bv-1', method: 'ping'}, + status: 400, + expected: {jsonrpc: '2.0', id: 'bv-1', error: {code: -32600, message: 'invalid request'}}, + comment: 'wrong jsonrpc version', + }, + { + name: 'invalid_request_missing_version', + body: {id: 'mv-1', method: 'ping'}, + status: 400, + expected: {jsonrpc: '2.0', id: 'mv-1', error: {code: -32600, message: 'invalid request'}}, + comment: 'missing jsonrpc field entirely', + }, + + // Notifications — has method but no id → rejected on HTTP + { + name: 'notification_http', + body: {jsonrpc: '2.0', method: 'ping'}, + status: 400, + expected: { + jsonrpc: '2.0', + id: null, + error: {code: -32600, message: 'invalid request'}, + }, + comment: 'HTTP requires id — notifications rejected until WS Phase 5', + }, +]; + +// -- WebSocket cases ---------------------------------------------------------- + +const ws_cases: readonly WsCase[] = [ + { + name: 'ping_ws', + message: JSON.stringify({jsonrpc: '2.0', id: 'ws-1', method: 'ping'}), + expected: {jsonrpc: '2.0', id: 'ws-1', result: {ping_id: 'ws-1'}}, + }, + { + name: 'parse_error_ws', + message: 'not json at all', + expected: {jsonrpc: '2.0', id: null, error: {code: -32700, message: 'parse error'}}, + }, + { + name: 'method_not_found_ws', + message: JSON.stringify({jsonrpc: '2.0', id: 'mnf-ws-1', method: 'nonexistent'}), + expected: { + jsonrpc: '2.0', + id: 'mnf-ws-1', + error: {code: -32601, message: 'method not found: nonexistent'}, + }, + }, + { + name: 'invalid_request_ws', + message: JSON.stringify({jsonrpc: '2.0', id: 'ir-ws-1'}), + expected: { + jsonrpc: '2.0', + id: 'ir-ws-1', + error: {code: -32600, message: 'invalid request'}, + }, + comment: 'missing method over WS', + }, +]; + +// == Special tests ============================================================ +// +// Tests that need unique control flow: silence assertions, persistent +// connections, non-RPC endpoints. + +type TestFn = (config: BackendConfig, session_cookie?: string) => Promise; + +const special_tests: ReadonlyArray<{name: string; fn: TestFn}> = [ + { + name: 'notification_ws', + fn: async (config, session_cookie) => { + // Notification over WS → no response sent + const conn = await open_ws(config, {cookie: session_cookie}); + try { + conn.send(JSON.stringify({jsonrpc: '2.0', method: 'ping'})); + await conn.expect_silence(); + } finally { + conn.close(); + } + }, + }, + { + name: 'multi_message_ws', + fn: async (config, session_cookie) => { + // Multiple messages on one connection — verify it stays alive + const conn = await open_ws(config, {cookie: session_cookie}); + try { + conn.send(JSON.stringify({jsonrpc: '2.0', id: 'multi-1', method: 'ping'})); + const r1 = await conn.receive(); + assert_deep_equal( + r1, + {jsonrpc: '2.0', id: 'multi-1', result: {ping_id: 'multi-1'}}, + 'first', + ); + + conn.send(JSON.stringify({jsonrpc: '2.0', id: 'multi-2', method: 'ping'})); + const r2 = await conn.receive(); + assert_deep_equal( + r2, + {jsonrpc: '2.0', id: 'multi-2', result: {ping_id: 'multi-2'}}, + 'second', + ); + } finally { + conn.close(); + } + }, + }, + { + name: 'health_check', + fn: async (config) => { + const res = await fetch(`${config.base_url}${config.health_path}`); + assert_equal(res.status, 200, 'status'); + const body = await res.json(); + assert_equal(body.status, 'ok', 'health status'); + }, + }, + { + name: 'workspace_open_and_list', + fn: async (config, session_cookie) => { + const tmp_dir = await Deno.makeTempDir({prefix: 'zzz_test_'}); + try { + // 1. Open workspace + const open_res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wo-1', + method: 'workspace_open', + params: {path: tmp_dir}, + }), + {cookie: session_cookie}, + ); + assert_equal(open_res.status, 200, 'open status'); + const open_rpc = open_res.body as Record; + assert_equal(open_rpc.id, 'wo-1', 'open id'); + const open_result = open_rpc.result as Record; + const workspace = open_result.workspace as Record; + + // WorkspaceInfoJson shape (path, name, opened_at) + assert_equal(typeof workspace.path, 'string', 'path is string'); + assert_equal((workspace.path as string).endsWith('/'), true, 'path ends with /'); + assert_equal(typeof workspace.name, 'string', 'name is string'); + assert_equal(typeof workspace.opened_at, 'string', 'opened_at is string'); + assert_equal(Array.isArray(open_result.files), true, 'files is array'); + + // 2. List workspaces — opened workspace must appear + const list_res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wl-1', + method: 'workspace_list', + }), + {cookie: session_cookie}, + ); + assert_equal(list_res.status, 200, 'list status'); + const list_rpc = list_res.body as Record; + const list_result = list_rpc.result as Record; + const workspaces = list_result.workspaces as Array>; + assert_equal(Array.isArray(workspaces), true, 'workspaces is array'); + const found = workspaces.some((w) => w.path === workspace.path); + assert_equal(found, true, 'opened workspace in list'); + } finally { + await Deno.remove(tmp_dir, {recursive: true}); + } + }, + }, + { + name: 'workspace_open_idempotent', + fn: async (config, session_cookie) => { + const tmp_dir = await Deno.makeTempDir({prefix: 'zzz_test_'}); + try { + // Open same path twice + const r1 = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wi-1', + method: 'workspace_open', + params: {path: tmp_dir}, + }), + {cookie: session_cookie}, + ); + assert_equal(r1.status, 200, 'first open status'); + const w1 = ((r1.body as Record).result as Record) + .workspace as Record; + + const r2 = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wi-2', + method: 'workspace_open', + params: {path: tmp_dir}, + }), + {cookie: session_cookie}, + ); + assert_equal(r2.status, 200, 'second open status'); + const w2 = ((r2.body as Record).result as Record) + .workspace as Record; + + // Same opened_at — workspace was not re-created + assert_equal(w1.opened_at, w2.opened_at, 'same opened_at'); + assert_equal(w1.path, w2.path, 'same path'); + } finally { + await Deno.remove(tmp_dir, {recursive: true}); + } + }, + }, + { + name: 'workspace_open_nonexistent', + fn: async (config, session_cookie) => { + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wne-1', + method: 'workspace_open', + params: {path: `/tmp/zzz_nonexistent_${Date.now()}`}, + }), + {cookie: session_cookie}, + ); + assert_equal(res.status, 500, 'status'); + const r = res.body as Record; + assert_equal(r.id, 'wne-1', 'id'); + const error = r.error as Record; + assert_equal(error.code, -32603, 'error code'); + assert_equal( + (error.message as string).startsWith( + 'failed to open workspace: directory does not exist:', + ), + true, + 'error message format', + ); + }, + }, + { + name: 'auth_required_without_cookie', + fn: async (config) => { + // Authenticated action without any Cookie header → 401 + const {status, body} = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'auth-1', + method: 'workspace_list', + }), + // no session_cookie + ); + assert_equal(status, 401, 'status'); + const r = body as Record; + assert_equal(r.id, 'auth-1', 'id'); + const error = r.error as Record; + assert_equal(error.code, -32001, 'error code'); + assert_equal(error.message, 'unauthenticated', 'error message'); + }, + }, + { + name: 'auth_required_invalid_cookie', + fn: async (config) => { + // Authenticated action with garbage cookie → 401 + const {status, body} = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'auth-2', + method: 'workspace_list', + }), + {cookie: 'fuz_session=garbage-invalid-cookie-value'}, + ); + assert_equal(status, 401, 'status'); + const r = body as Record; + assert_equal(r.id, 'auth-2', 'id'); + const error = r.error as Record; + assert_equal(error.code, -32001, 'error code'); + assert_equal(error.message, 'unauthenticated', 'error message'); + }, + }, + { + name: 'auth_public_no_cookie', + fn: async (config) => { + // Public action without any Cookie header → 200 success + const {status, body} = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'auth-3', + method: 'ping', + }), + // no session_cookie + ); + assert_equal(status, 200, 'status'); + const r = body as Record; + assert_equal(r.id, 'auth-3', 'id'); + const result = r.result as Record; + assert_equal(result.ping_id, 'auth-3', 'ping_id'); + }, + }, + { + name: 'workspace_close', + fn: async (config, session_cookie) => { + const tmp_dir = await Deno.makeTempDir({prefix: 'zzz_test_'}); + try { + // 1. Open workspace + const open_res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wc-open', + method: 'workspace_open', + params: {path: tmp_dir}, + }), + {cookie: session_cookie}, + ); + assert_equal(open_res.status, 200, 'open status'); + const workspace = ( + (open_res.body as Record).result as Record + ).workspace as Record; + + // 2. Close workspace — use the normalized path from open response + const close_res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wc-close', + method: 'workspace_close', + params: {path: workspace.path}, + }), + {cookie: session_cookie}, + ); + assert_equal(close_res.status, 200, 'close status'); + const close_rpc = close_res.body as Record; + assert_equal(close_rpc.result, null, 'close result is null'); + + // 3. List — workspace should be gone + const list_res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wc-list', + method: 'workspace_list', + }), + {cookie: session_cookie}, + ); + assert_equal(list_res.status, 200, 'list status'); + const list_result = (list_res.body as Record).result as Record< + string, + unknown + >; + const workspaces = list_result.workspaces as Array>; + const found = workspaces.some((w) => w.path === workspace.path); + assert_equal(found, false, 'closed workspace not in list'); + + // 4. Close again — should error (not open) + // Both backends return -32602 (invalid_params, 400). + const close2_res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wc-close2', + method: 'workspace_close', + params: {path: workspace.path}, + }), + {cookie: session_cookie}, + ); + assert_equal(close2_res.status, 400, 'double close status'); + const close2_rpc = close2_res.body as Record; + const error = close2_rpc.error as Record; + assert_equal(error.code, -32602, 'double close error code'); + assert_equal( + (error.message as string).startsWith('workspace not open:'), + true, + 'double close error message format', + ); + } finally { + await Deno.remove(tmp_dir, {recursive: true}); + } + }, + }, + + // -- WebSocket auth tests ----------------------------------------------------- + { + name: 'ws_auth_required', + fn: async (config) => { + // Attempt WebSocket connect without cookies → should be rejected + const url = ws_url(config); + await new Promise((resolve, reject) => { + const ws = new WebSocket(url); + const timer = setTimeout(() => { + ws.close(); + reject(new Error('WebSocket timeout — expected rejection')); + }, 5_000); + + ws.onopen = () => { + clearTimeout(timer); + ws.close(); + reject(new Error('WebSocket connected without auth — expected rejection')); + }; + + ws.onerror = () => { + clearTimeout(timer); + // Error before open = connection rejected (401 at upgrade) + resolve(); + }; + + ws.onclose = (event) => { + clearTimeout(timer); + // Closed without ever opening = rejection + if (event.code !== 1000) { + resolve(); + } else { + reject(new Error('WebSocket closed normally — expected rejection')); + } + }; + }); + }, + }, + + // -- Session load + provider status ------------------------------------------- + { + name: 'session_load_basic', + fn: async (config, session_cookie) => { + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'sl-1', + method: 'session_load', + }), + {cookie: session_cookie}, + ); + assert_equal(res.status, 200, 'status'); + const rpc = res.body as Record; + assert_equal(rpc.id, 'sl-1', 'id'); + const result = rpc.result as Record; + const data = result.data as Record; + + // zzz_dir is the canonicalized INTEGRATION_ZZZ_DIR with trailing slash + const zzz_dir = data.zzz_dir as string; + assert_equal(zzz_dir.startsWith('/'), true, 'zzz_dir is absolute'); + assert_equal(zzz_dir.endsWith('/'), true, 'zzz_dir has trailing slash'); + + // scoped_dirs contains the integration scoped dir, absolute with trailing slash + const scoped_dirs = data.scoped_dirs as Array; + assert_equal(scoped_dirs.length >= 1, true, `scoped_dirs has entries (got ${scoped_dirs.length})`); + assert_equal(scoped_dirs[0].startsWith('/'), true, 'scoped_dirs[0] is absolute'); + assert_equal(scoped_dirs[0].endsWith('/'), true, 'scoped_dirs[0] has trailing slash'); + + assert_equal(Array.isArray(data.files), true, 'files is array'); + assert_equal(Array.isArray(data.provider_status), true, 'provider_status is array'); + assert_equal(Array.isArray(data.workspaces), true, 'workspaces is array'); + }, + }, + { + name: 'session_load_returns_zzz_dir_files', + fn: async (config, session_cookie) => { + // Create a test file in zzz_dir before loading session + const test_content = 'session load file test'; + await Deno.writeTextFile(`${INTEGRATION_ZZZ_DIR}/test_session.txt`, test_content); + + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'slf-1', + method: 'session_load', + }), + {cookie: session_cookie}, + ); + assert_equal(res.status, 200, 'status'); + const rpc = res.body as Record; + const result = rpc.result as Record; + const data = result.data as Record; + const files = data.files as Array>; + + // Find our test file — fail with useful context if missing + const test_file = files.find((f) => (f.id as string).endsWith('/test_session.txt')); + if (!test_file) { + const ids = files.map((f) => f.id); + throw new Error(`test file not found in ${files.length} files: ${JSON.stringify(ids)}`); + } + + assert_equal(test_file.contents, test_content, 'file contents match'); + assert_equal((test_file.source_dir as string).startsWith('/'), true, 'source_dir is absolute'); + assert_equal((test_file.source_dir as string).endsWith('/'), true, 'source_dir has trailing slash'); + assert_equal((test_file.id as string).startsWith('/'), true, 'file id is absolute path'); + assert_deep_equal(test_file.dependents, [], 'dependents is empty array'); + assert_deep_equal(test_file.dependencies, [], 'dependencies is empty array'); + assert_equal(typeof test_file.mtime, 'number', 'mtime is number'); + + // Clean up + await Deno.remove(`${INTEGRATION_ZZZ_DIR}/test_session.txt`); + }, + }, + { + name: 'diskfile_update_in_zzz_dir', + fn: async (config, session_cookie) => { + // ScopedFs should allow writes to zzz_dir + const file_path = `${INTEGRATION_ZZZ_DIR}/test_scoped_write.txt`; + const content = 'write to zzz_dir'; + + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'dfu-zzz-1', + method: 'diskfile_update', + params: {path: file_path, content}, + }), + {cookie: session_cookie}, + ); + assert_equal(res.status, 200, 'status'); + const rpc = res.body as Record; + assert_equal(rpc.result, null, 'result is null'); + + // Verify the file exists and has the right content + const actual = await Deno.readTextFile(file_path); + assert_equal(actual, content, 'file content'); + + // Clean up + await Deno.remove(file_path); + }, + }, + { + name: 'session_load_returns_nested_files', + fn: async (config, session_cookie) => { + // Create a file in a subdirectory of zzz_dir + await Deno.mkdir(`${INTEGRATION_ZZZ_DIR}/state/nested`, {recursive: true}); + await Deno.writeTextFile(`${INTEGRATION_ZZZ_DIR}/state/nested/deep.txt`, 'nested content'); + + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'sln-1', + method: 'session_load', + }), + {cookie: session_cookie}, + ); + assert_equal(res.status, 200, 'status'); + const rpc = res.body as Record; + const result = rpc.result as Record; + const data = result.data as Record; + const files = data.files as Array>; + + const nested_file = files.find((f) => (f.id as string).endsWith('/state/nested/deep.txt')); + if (!nested_file) { + const ids = files.map((f) => f.id); + throw new Error(`nested file not found in ${files.length} files: ${JSON.stringify(ids)}`); + } + assert_equal(nested_file.contents, 'nested content', 'nested file contents'); + + // Clean up + await Deno.remove(`${INTEGRATION_ZZZ_DIR}/state`, {recursive: true}); + }, + }, + { + name: 'diskfile_update_in_zzz_dir_subdirectory', + fn: async (config, session_cookie) => { + // ScopedFs should allow writes to existing subdirectories under zzz_dir + await Deno.mkdir(`${INTEGRATION_ZZZ_DIR}/state/sub`, {recursive: true}); + const file_path = `${INTEGRATION_ZZZ_DIR}/state/sub/new_file.txt`; + const content = 'nested write'; + + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'dfu-zzz-sub-1', + method: 'diskfile_update', + params: {path: file_path, content}, + }), + {cookie: session_cookie}, + ); + assert_equal(res.status, 200, 'status'); + const rpc = res.body as Record; + assert_equal(rpc.result, null, 'result is null'); + + const actual = await Deno.readTextFile(file_path); + assert_equal(actual, content, 'file content'); + + // Clean up + await Deno.remove(`${INTEGRATION_ZZZ_DIR}/state`, {recursive: true}); + }, + }, + { + name: 'provider_load_status_empty', + fn: async (config, session_cookie) => { + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'pls-1', + method: 'provider_load_status', + params: {provider_name: 'ollama'}, + }), + {cookie: session_cookie}, + ); + const rpc = res.body as Record; + assert_equal(rpc.id, 'pls-1', 'id'); + // Both backends return {status: ProviderStatus} per the action spec + assert_equal(res.status, 200, 'status'); + const result = rpc.result as Record; + const status = result.status as Record; + assert_equal(status.name, 'ollama', 'status.name'); + assert_equal(typeof status.available, 'boolean', 'status.available is boolean'); + assert_equal(typeof status.checked_at, 'number', 'status.checked_at is number'); + if (status.available === false) { + assert_equal(typeof status.error, 'string', 'status.error is string when unavailable'); + } + }, + }, + + // -- WebSocket authenticated action test -------------------------------------- + { + name: 'ws_workspace_list', + fn: async (config, session_cookie) => { + // Authenticated action over WS — workspace_list returns {workspaces: [...]} + const conn = await open_ws(config, {cookie: session_cookie}); + try { + conn.send( + JSON.stringify({jsonrpc: '2.0', id: 'wsl-1', method: 'workspace_list'}), + ); + const r = (await conn.receive()) as Record; + assert_equal(r.id, 'wsl-1', 'id'); + const result = r.result as Record; + assert_equal(Array.isArray(result.workspaces), true, 'workspaces is array'); + } finally { + conn.close(); + } + }, + }, + + // -- workspace_changed notification tests ------------------------------------- + { + name: 'workspace_changed_on_open', + fn: async (config, session_cookie) => { + // Open a WS connection, then open a workspace via HTTP + // → WS client should receive a workspace_changed notification + const conn = await open_ws(config, {cookie: session_cookie}); + await ensure_ws_registered(conn); + const tmp_dir = await Deno.makeTempDir({prefix: 'zzz_test_wc_'}); + try { + // Open workspace via HTTP RPC + const open_res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wco-1', + method: 'workspace_open', + params: {path: tmp_dir}, + }), + {cookie: session_cookie}, + ); + assert_equal(open_res.status, 200, 'open status'); + + // WS should receive workspace_changed notification + const notification = (await conn.receive()) as Record; + assert_equal(notification.jsonrpc, '2.0', 'jsonrpc version'); + assert_equal(notification.method, 'workspace_changed', 'method'); + assert_equal('id' in notification, false, 'no id (notification)'); + const params = notification.params as Record; + assert_equal(params.type, 'open', 'change type'); + const workspace = params.workspace as Record; + assert_equal(typeof workspace.path, 'string', 'workspace.path is string'); + assert_equal((workspace.path as string).endsWith('/'), true, 'path ends with /'); + assert_equal(typeof workspace.name, 'string', 'workspace.name is string'); + assert_equal(typeof workspace.opened_at, 'string', 'workspace.opened_at is string'); + } finally { + conn.close(); + // Clean up: close workspace + await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wco-cleanup', + method: 'workspace_close', + params: {path: tmp_dir}, + }), + {cookie: session_cookie}, + ); + await Deno.remove(tmp_dir, {recursive: true}); + } + }, + }, + { + name: 'workspace_changed_on_close', + fn: async (config, session_cookie) => { + // Open a workspace, then open WS, then close the workspace + // → WS client should receive a workspace_changed close notification + const tmp_dir = await Deno.makeTempDir({prefix: 'zzz_test_wc_'}); + try { + // Open workspace first + const open_res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wcc-open', + method: 'workspace_open', + params: {path: tmp_dir}, + }), + {cookie: session_cookie}, + ); + assert_equal(open_res.status, 200, 'open status'); + const workspace = ( + (open_res.body as Record).result as Record + ).workspace as Record; + + // Now open WS connection + const conn = await open_ws(config, {cookie: session_cookie}); + await ensure_ws_registered(conn); + try { + // Close workspace via HTTP + const close_res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wcc-close', + method: 'workspace_close', + params: {path: workspace.path}, + }), + {cookie: session_cookie}, + ); + assert_equal(close_res.status, 200, 'close status'); + + // WS should receive workspace_changed close notification + const notification = (await conn.receive()) as Record; + assert_equal(notification.jsonrpc, '2.0', 'jsonrpc version'); + assert_equal(notification.method, 'workspace_changed', 'method'); + assert_equal('id' in notification, false, 'no id (notification)'); + const params = notification.params as Record; + assert_equal(params.type, 'close', 'change type'); + const ws_info = params.workspace as Record; + assert_equal(ws_info.path, workspace.path, 'same workspace path'); + } finally { + conn.close(); + } + } finally { + await Deno.remove(tmp_dir, {recursive: true}); + } + }, + }, + { + name: 'workspace_changed_idempotent_no_notification', + fn: async (config, session_cookie) => { + // Opening an already-open workspace should NOT send a notification + const tmp_dir = await Deno.makeTempDir({prefix: 'zzz_test_wc_'}); + try { + // First open (creates workspace) + await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wci-1', + method: 'workspace_open', + params: {path: tmp_dir}, + }), + {cookie: session_cookie}, + ); + + // Open WS after first open + const conn = await open_ws(config, {cookie: session_cookie}); + await ensure_ws_registered(conn); + try { + // Second open (idempotent — should NOT broadcast) + await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wci-2', + method: 'workspace_open', + params: {path: tmp_dir}, + }), + {cookie: session_cookie}, + ); + + // Should NOT receive any notification + await conn.expect_silence(); + } finally { + conn.close(); + } + } finally { + // Cleanup + await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wci-cleanup', + method: 'workspace_close', + params: {path: tmp_dir}, + }), + {cookie: session_cookie}, + ); + await Deno.remove(tmp_dir, {recursive: true}); + } + }, + }, + + // -- Filesystem tests --------------------------------------------------------- + { + name: 'diskfile_update_and_read', + fn: async (config, session_cookie) => { + const file_path = `${INTEGRATION_SCOPED_DIR}/test_write.txt`; + const content = 'hello from integration test'; + + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'dfu-1', + method: 'diskfile_update', + params: {path: file_path, content}, + }), + {cookie: session_cookie}, + ); + assert_equal(res.status, 200, 'status'); + const rpc = res.body as Record; + assert_equal(rpc.result, null, 'result is null'); + + // Verify the file exists and has the right content + const actual = await Deno.readTextFile(file_path); + assert_equal(actual, content, 'file content'); + }, + }, + { + name: 'diskfile_delete', + fn: async (config, session_cookie) => { + const file_path = `${INTEGRATION_SCOPED_DIR}/test_delete.txt`; + // Create the file first + await Deno.writeTextFile(file_path, 'to be deleted'); + + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'dfd-1', + method: 'diskfile_delete', + params: {path: file_path}, + }), + {cookie: session_cookie}, + ); + assert_equal(res.status, 200, 'status'); + const rpc = res.body as Record; + assert_equal(rpc.result, null, 'result is null'); + + // Verify file is gone + try { + await Deno.stat(file_path); + throw new Error('file should not exist after delete'); + } catch (e) { + if (!(e instanceof Deno.errors.NotFound)) throw e; + } + }, + }, + { + name: 'directory_create', + fn: async (config, session_cookie) => { + const dir_path = `${INTEGRATION_SCOPED_DIR}/nested/deep/dir`; + + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'dc-1', + method: 'directory_create', + params: {path: dir_path}, + }), + {cookie: session_cookie}, + ); + assert_equal(res.status, 200, 'status'); + const rpc = res.body as Record; + assert_equal(rpc.result, null, 'result is null'); + + // Verify directory exists + const stat = await Deno.stat(dir_path); + assert_equal(stat.isDirectory, true, 'is directory'); + }, + }, + { + name: 'diskfile_update_outside_scope', + fn: async (config, session_cookie) => { + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'dfo-1', + method: 'diskfile_update', + params: {path: '/tmp/zzz_outside_scope/evil.txt', content: 'nope'}, + }), + {cookie: session_cookie}, + ); + assert_equal(res.status, 500, 'status'); + const rpc = res.body as Record; + const error = rpc.error as Record; + assert_equal(error.code, -32603, 'error code'); + assert_equal( + (error.message as string).startsWith('failed to write file:'), + true, + 'error message format', + ); + }, + }, + { + name: 'diskfile_update_path_traversal', + fn: async (config, session_cookie) => { + // Path traversal via ../ — normalized path escapes scope + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'dft-1', + method: 'diskfile_update', + params: {path: `${INTEGRATION_SCOPED_DIR}/../../../tmp/evil.txt`, content: 'nope'}, + }), + {cookie: session_cookie}, + ); + assert_equal(res.status, 500, 'status'); + const rpc = res.body as Record; + const error = rpc.error as Record; + assert_equal(error.code, -32603, 'error code'); + }, + }, + { + name: 'diskfile_update_relative_path', + fn: async (config, session_cookie) => { + // Relative path (not absolute) → rejected as invalid params + // Deno rejects at Zod validation, Rust rejects at handler validation. + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'dfr-1', + method: 'diskfile_update', + params: {path: 'relative/path.txt', content: 'nope'}, + }), + {cookie: session_cookie}, + ); + assert_equal(res.status, 400, 'status'); + const rpc = res.body as Record; + const error = rpc.error as Record; + assert_equal(error.code, -32602, 'error code'); + }, + }, + { + name: 'diskfile_delete_nonexistent', + fn: async (config, session_cookie) => { + // Delete a file that doesn't exist → error + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'dfdn-1', + method: 'diskfile_delete', + params: {path: `${INTEGRATION_SCOPED_DIR}/does_not_exist_${Date.now()}.txt`}, + }), + {cookie: session_cookie}, + ); + assert_equal(res.status, 500, 'status'); + const rpc = res.body as Record; + const error = rpc.error as Record; + assert_equal(error.code, -32603, 'error code'); + }, + }, + + { + name: 'directory_create_already_exists', + fn: async (config, session_cookie) => { + // Creating an already-existing directory should succeed (idempotent) + const dir_path = `${INTEGRATION_SCOPED_DIR}/idempotent_dir_${Date.now()}`; + try { + // Create it once + const r1 = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'dcae-1', + method: 'directory_create', + params: {path: dir_path}, + }), + {cookie: session_cookie}, + ); + assert_equal(r1.status, 200, 'first create status'); + + // Create it again — should still succeed + const r2 = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'dcae-2', + method: 'directory_create', + params: {path: dir_path}, + }), + {cookie: session_cookie}, + ); + assert_equal(r2.status, 200, 'second create status'); + assert_equal((r2.body as Record).result, null, 'result is null'); + } finally { + try { + await Deno.remove(dir_path, {recursive: true}); + } catch { + // ignore cleanup errors + } + } + }, + }, + { + name: 'workspace_open_not_directory', + fn: async (config, session_cookie) => { + // Opening a file (not a directory) as a workspace → error + const file_path = `${INTEGRATION_SCOPED_DIR}/not_a_dir_${Date.now()}.txt`; + try { + await Deno.writeTextFile(file_path, 'content'); + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wond-1', + method: 'workspace_open', + params: {path: file_path}, + }), + {cookie: session_cookie}, + ); + assert_equal(res.status, 500, 'status'); + const rpc = res.body as Record; + const error = rpc.error as Record; + assert_equal(error.code, -32603, 'error code'); + } finally { + try { + await Deno.remove(file_path); + } catch { + // ignore cleanup errors + } + } + }, + }, + { + name: 'filer_change_on_file_create', + fn: async (config, session_cookie) => { + // Open a workspace, create a file in it, verify filer_change notification + const tmp_dir = await Deno.makeTempDir({prefix: 'zzz_test_filer_'}); + try { + // Open workspace + const open_res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'fc-open', + method: 'workspace_open', + params: {path: tmp_dir}, + }), + {cookie: session_cookie}, + ); + assert_equal(open_res.status, 200, 'open status'); + + // Open WS and wait for connection to register + const conn = await open_ws(config, {cookie: session_cookie}); + try { + await ensure_ws_registered(conn); + + // Create a file in the workspace + const new_file = `${tmp_dir}/filer_test_${Date.now()}.txt`; + await Deno.writeTextFile(new_file, 'hello from filer test'); + + // Wait for filer_change notification (file watchers have latency) + let got_notification = false; + for (let i = 0; i < 5 && !got_notification; i++) { + try { + const msg = (await conn.receive(3_000)) as Record; + if (msg.method === 'filer_change') { + const params = msg.params as Record; + const change = params.change as Record; + assert_equal(typeof change.path, 'string', 'change has path'); + assert_equal(typeof change.type, 'string', 'change has type'); + got_notification = true; + } + } catch { + // timeout — retry + } + } + assert_equal(got_notification, true, 'received filer_change notification'); + } finally { + conn.close(); + } + + // Clean up workspace + await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'fc-close', + method: 'workspace_close', + params: {path: tmp_dir}, + }), + {cookie: session_cookie}, + ); + } finally { + try { + await Deno.remove(tmp_dir, {recursive: true}); + } catch { + // ignore + } + } + }, + }, + + // -- Workspace scoped_fs tests ------------------------------------------------ + + { + name: 'workspace_open_adds_to_scoped_fs', + fn: async (config, session_cookie) => { + // Opening a workspace should allow diskfile_update inside it, even + // though it's not in the initial scoped_dirs. Closing should revoke. + const tmp_dir = await Deno.makeTempDir({prefix: 'zzz_test_ws_scope_'}); + const file_path = `${tmp_dir}/scoped_test.txt`; + try { + // Before opening: write should fail (not in scoped_dirs) + const before_res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wss-pre', + method: 'diskfile_update', + params: {path: file_path, content: 'before open'}, + }), + {cookie: session_cookie}, + ); + assert_equal(before_res.status, 500, 'pre-open status is error'); + const before_rpc = before_res.body as Record; + const before_error = before_rpc.error as Record; + assert_equal(before_error.code, -32603, 'pre-open error code'); + + // Open workspace + const open_res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wss-open', + method: 'workspace_open', + params: {path: tmp_dir}, + }), + {cookie: session_cookie}, + ); + assert_equal(open_res.status, 200, 'open status'); + + // After opening: write should succeed + const during_res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wss-during', + method: 'diskfile_update', + params: {path: file_path, content: 'after open'}, + }), + {cookie: session_cookie}, + ); + assert_equal(during_res.status, 200, 'during-open status'); + const during_rpc = during_res.body as Record; + assert_equal(during_rpc.result, null, 'during-open result is null'); + + // Verify file was written + const content = await Deno.readTextFile(file_path); + assert_equal(content, 'after open', 'file content matches'); + + // Close workspace + const close_res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wss-close', + method: 'workspace_close', + params: {path: tmp_dir}, + }), + {cookie: session_cookie}, + ); + assert_equal(close_res.status, 200, 'close status'); + + // After closing: write should fail again + const after_res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'wss-post', + method: 'diskfile_update', + params: {path: file_path, content: 'after close'}, + }), + {cookie: session_cookie}, + ); + assert_equal(after_res.status, 500, 'post-close status is error'); + const after_rpc = after_res.body as Record; + const after_error = after_rpc.error as Record; + assert_equal(after_error.code, -32603, 'post-close error code'); + } finally { + try { + await Deno.remove(tmp_dir, {recursive: true}); + } catch { + // ignore cleanup errors + } + } + }, + }, + + // -- Terminal tests ----------------------------------------------------------- + + { + name: 'terminal_create_echo', + fn: async (config, session_cookie) => { + // Spawn "echo hello" via WS, receive terminal_data notification with + // output containing "hello", then terminal_exited with exit_code 0. + const conn = await open_ws(config, {cookie: session_cookie}); + try { + await ensure_ws_registered(conn); + + // Create terminal + conn.send( + JSON.stringify({ + jsonrpc: '2.0', + id: 'tc-1', + method: 'terminal_create', + params: {command: 'echo', args: ['hello']}, + }), + ); + const create_res = (await conn.receive()) as Record; + assert_equal(create_res.id, 'tc-1', 'create id'); + const create_result = create_res.result as Record; + assert_equal(typeof create_result.terminal_id, 'string', 'terminal_id is string'); + assert_equal( + (create_result.terminal_id as string).length > 0, + true, + 'terminal_id not empty', + ); + + // Collect notifications — expect terminal_data with "hello" and + // terminal_exited with exit_code 0. Order may vary, collect up to 10. + let got_data = false; + let got_exited = false; + let exit_code: number | null = null; + for (let i = 0; i < 10 && !(got_data && got_exited); i++) { + const msg = (await conn.receive(5_000)) as Record; + if (msg.method === 'terminal_data') { + const params = msg.params as Record; + assert_equal( + params.terminal_id, + create_result.terminal_id, + 'data terminal_id matches', + ); + if ((params.data as string).includes('hello')) { + got_data = true; + } + } else if (msg.method === 'terminal_exited') { + const params = msg.params as Record; + assert_equal( + params.terminal_id, + create_result.terminal_id, + 'exited terminal_id matches', + ); + exit_code = params.exit_code as number | null; + got_exited = true; + } + } + assert_equal(got_data, true, 'received terminal_data with hello'); + assert_equal(got_exited, true, 'received terminal_exited'); + assert_equal(exit_code, 0, 'exit_code is 0'); + } finally { + conn.close(); + } + }, + }, + { + name: 'terminal_close', + fn: async (config, session_cookie) => { + // Spawn a long-running process, then close it explicitly. + // The close response and terminal_exited notification may arrive + // in either order — collect both. + const conn = await open_ws(config, {cookie: session_cookie}); + try { + await ensure_ws_registered(conn); + + conn.send( + JSON.stringify({ + jsonrpc: '2.0', + id: 'tcl-1', + method: 'terminal_create', + params: {command: 'sleep', args: ['60']}, + }), + ); + const create_res = (await conn.receive()) as Record; + assert_equal(create_res.id, 'tcl-1', 'create id'); + const terminal_id = (create_res.result as Record) + .terminal_id as string; + + // Close the terminal + conn.send( + JSON.stringify({ + jsonrpc: '2.0', + id: 'tcl-2', + method: 'terminal_close', + params: {terminal_id}, + }), + ); + + // Collect up to 3 messages — expect the close response and + // possibly a terminal_exited notification (order varies by backend) + let got_close_response = false; + for (let i = 0; i < 3 && !got_close_response; i++) { + const msg = (await conn.receive(5_000)) as Record; + if (msg.id === 'tcl-2') { + got_close_response = true; + const close_result = msg.result as Record; + assert_equal( + close_result.exit_code === null || typeof close_result.exit_code === 'number', + true, + 'exit_code is number or null', + ); + } + // terminal_exited or terminal_data notifications are fine — skip them + } + assert_equal(got_close_response, true, 'received close response'); + } finally { + conn.close(); + } + }, + }, + { + name: 'terminal_write_and_read', + fn: async (config, session_cookie) => { + // Spawn cat, write data, verify it's echoed back via terminal_data + const conn = await open_ws(config, {cookie: session_cookie}); + try { + await ensure_ws_registered(conn); + + // Create terminal running cat (echoes stdin to stdout) + conn.send( + JSON.stringify({ + jsonrpc: '2.0', + id: 'twr-1', + method: 'terminal_create', + params: {command: 'cat', args: []}, + }), + ); + const create_res = (await conn.receive()) as Record; + assert_equal(create_res.id, 'twr-1', 'create id'); + const terminal_id = (create_res.result as Record) + .terminal_id as string; + + // Write data to the terminal + conn.send( + JSON.stringify({ + jsonrpc: '2.0', + id: 'twr-2', + method: 'terminal_data_send', + params: {terminal_id, data: 'integration test\n'}, + }), + ); + const write_res = (await conn.receive()) as Record; + assert_equal(write_res.id, 'twr-2', 'write id'); + assert_equal(write_res.result, null, 'write result is null'); + + // Collect terminal_data notifications until we see our echoed text + let got_echo = false; + for (let i = 0; i < 20 && !got_echo; i++) { + const msg = (await conn.receive(5_000)) as Record; + if (msg.method === 'terminal_data') { + const params = msg.params as Record; + if ((params.data as string).includes('integration test')) { + got_echo = true; + } + } + } + assert_equal(got_echo, true, 'received echoed data'); + + // Clean up + conn.send( + JSON.stringify({ + jsonrpc: '2.0', + id: 'twr-3', + method: 'terminal_close', + params: {terminal_id}, + }), + ); + // Drain close response (and any notifications) + for (let i = 0; i < 3; i++) { + const msg = (await conn.receive(5_000)) as Record; + if (msg.id === 'twr-3') break; + } + } finally { + conn.close(); + } + }, + }, + { + name: 'terminal_resize_live', + fn: async (config, session_cookie) => { + // Spawn a process, resize it, verify no error + const conn = await open_ws(config, {cookie: session_cookie}); + try { + await ensure_ws_registered(conn); + + conn.send( + JSON.stringify({ + jsonrpc: '2.0', + id: 'trl-1', + method: 'terminal_create', + params: {command: 'sleep', args: ['60']}, + }), + ); + const create_res = (await conn.receive()) as Record; + assert_equal(create_res.id, 'trl-1', 'create id'); + const terminal_id = (create_res.result as Record) + .terminal_id as string; + + // Resize + conn.send( + JSON.stringify({ + jsonrpc: '2.0', + id: 'trl-2', + method: 'terminal_resize', + params: {terminal_id, cols: 120, rows: 40}, + }), + ); + const resize_res = (await conn.receive()) as Record; + assert_equal(resize_res.id, 'trl-2', 'resize id'); + assert_equal(resize_res.result, null, 'resize result is null'); + + // Clean up + conn.send( + JSON.stringify({ + jsonrpc: '2.0', + id: 'trl-3', + method: 'terminal_close', + params: {terminal_id}, + }), + ); + for (let i = 0; i < 3; i++) { + const msg = (await conn.receive(5_000)) as Record; + if (msg.id === 'trl-3') break; + } + } finally { + conn.close(); + } + }, + }, + { + name: 'terminal_create_with_cwd', + fn: async (config, session_cookie) => { + // Spawn pwd with explicit cwd, verify output contains the cwd path + const conn = await open_ws(config, {cookie: session_cookie}); + try { + await ensure_ws_registered(conn); + + conn.send( + JSON.stringify({ + jsonrpc: '2.0', + id: 'tcc-1', + method: 'terminal_create', + params: {command: 'pwd', args: [], cwd: '/tmp'}, + }), + ); + const create_res = (await conn.receive()) as Record; + assert_equal(create_res.id, 'tcc-1', 'create id'); + const terminal_id = (create_res.result as Record) + .terminal_id as string; + + let got_tmp = false; + for (let i = 0; i < 10 && !got_tmp; i++) { + const msg = (await conn.receive(5_000)) as Record; + if ( + msg.method === 'terminal_data' && + ((msg.params as Record).data as string).includes('/tmp') + ) { + got_tmp = true; + } + } + assert_equal(got_tmp, true, 'pwd output contains /tmp'); + } finally { + conn.close(); + } + }, + }, + { + name: 'terminal_create_nonexistent_command', + fn: async (config, session_cookie) => { + // Spawning a nonexistent binary. Two valid behaviors: + // - Rust (forkpty): spawn succeeds, child exits 127, terminal_exited notification + // - Deno fallback (Deno.Command): spawn fails, error response + const conn = await open_ws(config, {cookie: session_cookie}); + try { + await ensure_ws_registered(conn); + + conn.send( + JSON.stringify({ + jsonrpc: '2.0', + id: 'tcne-1', + method: 'terminal_create', + params: {command: '/nonexistent/binary_zzz_test', args: []}, + }), + ); + const create_res = (await conn.receive()) as Record; + assert_equal(create_res.id, 'tcne-1', 'create id'); + + if (create_res.error) { + // Deno fallback: spawn failed → error response + const error = create_res.error as Record; + assert_equal(error.code, -32603, 'error code'); + } else { + // Rust / Deno FFI: forkpty succeeded, child exits 127 + const create_result = create_res.result as Record; + assert_equal(typeof create_result.terminal_id, 'string', 'terminal_id is string'); + + let got_exited = false; + let exit_code: number | null = null; + for (let i = 0; i < 10 && !got_exited; i++) { + const msg = (await conn.receive(5_000)) as Record; + if (msg.method === 'terminal_exited') { + got_exited = true; + exit_code = (msg.params as Record).exit_code as + | number + | null; + } + } + assert_equal(got_exited, true, 'received terminal_exited'); + assert_equal(exit_code, 127, 'exit_code is 127 (command not found)'); + } + } finally { + conn.close(); + } + }, + }, + { + name: 'terminal_data_send_missing', + fn: async (config, session_cookie) => { + // terminal_data_send with a nonexistent terminal_id → silent null + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'tdsm-1', + method: 'terminal_data_send', + params: {terminal_id: '00000000-0000-0000-0000-000000000000', data: 'hello'}, + }), + {cookie: session_cookie}, + ); + assert_equal(res.status, 200, 'status'); + const rpc = res.body as Record; + assert_equal(rpc.result, null, 'result is null'); + }, + }, + { + name: 'terminal_close_missing', + fn: async (config, session_cookie) => { + // terminal_close with a nonexistent terminal_id → {exit_code: null} + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'tclm-1', + method: 'terminal_close', + params: {terminal_id: '00000000-0000-0000-0000-000000000000'}, + }), + {cookie: session_cookie}, + ); + assert_equal(res.status, 200, 'status'); + const rpc = res.body as Record; + assert_deep_equal(rpc.result, {exit_code: null}, 'result'); + }, + }, + { + name: 'terminal_resize_missing', + fn: async (config, session_cookie) => { + // terminal_resize with a nonexistent terminal_id → silent null + const res = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'trm-1', + method: 'terminal_resize', + params: {terminal_id: '00000000-0000-0000-0000-000000000000', cols: 80, rows: 24}, + }), + {cookie: session_cookie}, + ); + assert_equal(res.status, 200, 'status'); + const rpc = res.body as Record; + assert_equal(rpc.result, null, 'result is null'); + }, + }, +]; + +// == Non-keeper tests ========================================================= +// +// Tests that require a non-keeper authenticated cookie (separate from the +// admin session cookie used by most tests). + +type NonKeeperTestFn = ( + config: BackendConfig, + session_cookie?: string, + non_keeper_cookie?: string, +) => Promise; + +const non_keeper_tests: ReadonlyArray<{name: string; fn: NonKeeperTestFn}> = [ + { + name: 'non_keeper_authenticated_action', + fn: async (config, _session_cookie, non_keeper_cookie) => { + // Non-keeper users CAN access authenticated (non-keeper) actions + if (!non_keeper_cookie) throw new Error('non_keeper_cookie not available'); + const {status, body} = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'nka-1', + method: 'workspace_list', + }), + {cookie: non_keeper_cookie}, + ); + assert_equal(status, 200, 'status'); + const r = body as Record; + assert_equal(r.id, 'nka-1', 'id'); + const result = r.result as Record; + assert_equal(Array.isArray(result.workspaces), true, 'has workspaces array'); + }, + }, + { + name: 'auth_keeper_forbidden', + fn: async (config, _session_cookie, non_keeper_cookie) => { + // Authenticated non-keeper user calling a keeper action → 403 + if (!non_keeper_cookie) throw new Error('non_keeper_cookie not available'); + const {status, body} = await post_rpc( + config, + JSON.stringify({ + jsonrpc: '2.0', + id: 'akf-1', + method: 'provider_update_api_key', + params: {provider_name: 'claude', api_key: 'sk-test'}, + }), + {cookie: non_keeper_cookie}, + ); + assert_equal(status, 403, 'status'); + const r = body as Record; + assert_equal(r.id, 'akf-1', 'id'); + const error = r.error as Record; + assert_equal(error.code, -32002, 'error code'); + assert_equal(error.message, 'forbidden', 'error message'); + }, + }, +]; + +// == Test runner =============================================================== + +/** Run an HTTP test case. */ +const run_http_case = async ( + config: BackendConfig, + c: HttpCase, + session_cookie?: string, +): Promise => { + const raw_body = typeof c.body === 'string' ? c.body : JSON.stringify(c.body); + const {status, body} = await post_rpc(config, raw_body, session_cookie ? {cookie: session_cookie} : undefined); + assert_equal(status, c.status, 'status'); + if (c.expected === null) { + assert_equal(body, null, 'body'); + } else { + // Exact match. error.data is normalized only when actual includes it + // but expected doesn't — handles Deno/Rust validation detail asymmetry. + const normalized = normalize_error_data(body, c.expected); + assert_deep_equal(normalized.actual, normalized.expected, 'body'); + } +}; + +/** Run a WebSocket test case. */ +const run_ws_case = async ( + config: BackendConfig, + c: WsCase, + session_cookie?: string, +): Promise => { + const conn = await open_ws(config, {cookie: session_cookie}); + try { + conn.send(c.message); + const body = await conn.receive(); + assert_deep_equal(body, c.expected, 'body'); + } finally { + conn.close(); + } +}; + +/** Collect all test cases into a flat list for the runner. */ +const build_test_list = ( + config: BackendConfig, + session_cookie?: string, + non_keeper_cookie?: string, +): Array<{name: string; fn: () => Promise}> => { + const tests: Array<{name: string; fn: () => Promise}> = []; + + for (const c of http_cases) { + if (c.skip?.includes(config.name)) continue; + tests.push({name: c.name, fn: () => run_http_case(config, c, session_cookie)}); + } + for (const c of ws_cases) { + if (c.skip?.includes(config.name)) continue; + tests.push({name: c.name, fn: () => run_ws_case(config, c, session_cookie)}); + } + for (const t of special_tests) { + tests.push({name: t.name, fn: () => t.fn(config, session_cookie)}); + } + for (const t of non_keeper_tests) { + tests.push({name: t.name, fn: () => t.fn(config, session_cookie, non_keeper_cookie)}); + } + + return tests; +}; + +export const run_tests = async ( + config: BackendConfig, + filter?: string, + session_cookie?: string, + non_keeper_cookie?: string, +): Promise => { + const tests = build_test_list(config, session_cookie, non_keeper_cookie); + const results: TestResult[] = []; + + for (const test of tests) { + if (filter && !test.name.includes(filter)) { + continue; + } + const start = performance.now(); + try { + await test.fn(); + results.push({name: test.name, passed: true, duration_ms: performance.now() - start}); + } catch (e) { + const message = e instanceof Error ? e.message : String(e); + results.push({ + name: test.name, + passed: false, + duration_ms: performance.now() - start, + error: message, + }); + } + } + + return results; +}; diff --git a/tsconfig.json b/tsconfig.json index b7da32344..0a2a54ecb 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,7 +1,7 @@ { "extends": "./.svelte-kit/tsconfig.json", "compilerOptions": { - "types": ["@sveltejs/kit"], + "types": ["@sveltejs/kit", "deno"], "module": "nodenext", "moduleResolution": "nodenext", "strict": true, diff --git a/vite.config.ts b/vite.config.ts index 6bb66d6de..f9562b426 100644 --- a/vite.config.ts +++ b/vite.config.ts @@ -1,14 +1,41 @@ +import {availableParallelism} from 'node:os'; import {defineConfig} from 'vite'; import {sveltekit} from '@sveltejs/kit/vite'; import {vite_plugin_library_well_known} from '@fuzdev/fuz_ui/vite_plugin_library_well_known.js'; +const max_workers = Math.max(1, Math.ceil(availableParallelism() / 2)); + export default defineConfig(({mode}) => ({ plugins: [sveltekit(), vite_plugin_library_well_known()], + test: { + projects: [ + { + extends: true, + test: { + name: 'unit', + include: ['src/test/**/*.test.ts'], + exclude: ['src/test/**/*.db.test.ts'], + maxWorkers: max_workers, + sequence: {groupOrder: 1}, + }, + }, + { + extends: true, + test: { + name: 'db', + include: ['src/test/**/*.db.test.ts'], + isolate: false, + fileParallelism: false, + sequence: {groupOrder: 2}, + }, + }, + ], + }, // In test mode, use browser conditions so Svelte's mount() resolves to the client version resolve: mode === 'test' ? {conditions: ['browser']} : undefined, server: { proxy: { - '/api': 'http://localhost:8999', // equal to `PUBLIC_SERVER_HOST + ':' + PUBLIC_SERVER_PROXIED_PORT` + '/api': `http://localhost:${process.env.PUBLIC_SERVER_PROXIED_PORT || '8999'}`, }, }, }));